From 88054ee6272e2a64cf22d2b6f6b3f0334db0e81a Mon Sep 17 00:00:00 2001 From: ross <92001561+z0r0z@users.noreply.github.com> Date: Tue, 23 Jan 2024 10:03:52 +0000 Subject: [PATCH] =?UTF-8?q?=E2=9C=A8=20Beta=20Design?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .gas-snapshot | 6 +- .github/workflows/ci.yml | 36 + .github/workflows/lints.yml | 23 - .github/workflows/tests.yml | 21 - .gitignore | 5 +- .gitmodules | 9 +- .gitpod.yml | 6 - LICENSE | 682 +- README.md | 24 +- foundry.toml | 20 +- lib/ds-test | 1 - lib/forge-std | 2 +- lib/solady | 1 + lib/solbase | 1 - node_modules/.bin/prettier | 1 - node_modules/.bin/semver | 1 - node_modules/.yarn-integrity | 31 - node_modules/@solidity-parser/parser/LICENSE | 21 - .../@solidity-parser/parser/README.md | 142 - .../parser/dist/Solidity-JSLPOCIO.tokens | 249 - .../parser/dist/Solidity-WDNIKDDG.tokens | 245 - .../parser/dist/antlr/Solidity.tokens | 249 - .../parser/dist/antlr/SolidityLexer.tokens | 249 - .../@solidity-parser/parser/dist/index.cjs.js | 36940 ---------- .../parser/dist/index.cjs.js.map | 7 - .../parser/dist/index.iife.js | 40776 ----------- .../parser/dist/index.iife.js.map | 7 - .../parser/dist/src/ASTBuilder.d.ts | 106 - .../parser/dist/src/ErrorListener.d.ts | 9 - .../parser/dist/src/antlr/SolidityLexer.d.ts | 160 - .../dist/src/antlr/SolidityListener.d.ts | 1074 - .../parser/dist/src/antlr/SolidityParser.d.ts | 1371 - .../dist/src/antlr/SolidityVisitor.d.ts | 689 - .../parser/dist/src/ast-types.d.ts | 443 - .../parser/dist/src/index.d.ts | 2 - .../parser/dist/src/parser.d.ts | 21 - .../parser/dist/src/tokens-string.d.ts | 2 - .../parser/dist/src/tokens.d.ts | 2 - .../parser/dist/src/types.d.ts | 28 - .../parser/dist/test/ast.d.ts | 1 - .../parser/dist/test/index.d.ts | 1 - .../parser/dist/test/utils.d.ts | 6 - .../@solidity-parser/parser/package.json | 81 - .../parser/src/.ASTBuilder.ts.swp | Bin 81920 -> 0 bytes .../@solidity-parser/parser/src/ASTBuilder.ts | 2050 - .../parser/src/ErrorListener.ts | 31 - .../parser/src/antlr/Solidity.interp | 374 - .../parser/src/antlr/Solidity.tokens | 249 - .../parser/src/antlr/SolidityLexer.interp | 424 - .../parser/src/antlr/SolidityLexer.tokens | 249 - .../parser/src/antlr/SolidityLexer.ts | 1183 - .../parser/src/antlr/SolidityListener.ts | 1177 - .../parser/src/antlr/SolidityParser.ts | 10669 --- .../parser/src/antlr/SolidityVisitor.ts | 792 - .../@solidity-parser/parser/src/ast-types.ts | 754 - .../parser/src/declarations.d.ts | 1 - .../@solidity-parser/parser/src/index.ts | 3 - .../@solidity-parser/parser/src/parser.ts | 135 - .../parser/src/tokens-string.js | 10 - .../@solidity-parser/parser/src/tokens.ts | 91 - .../@solidity-parser/parser/src/types.ts | 32 - node_modules/ansi-regex/index.d.ts | 37 - node_modules/ansi-regex/index.js | 10 - node_modules/ansi-regex/license | 9 - node_modules/ansi-regex/package.json | 55 - node_modules/ansi-regex/readme.md | 78 - node_modules/antlr4ts/ANTLRErrorListener.d.ts | 43 - node_modules/antlr4ts/ANTLRErrorListener.js | 7 - .../antlr4ts/ANTLRErrorListener.js.map | 1 - node_modules/antlr4ts/ANTLRErrorStrategy.d.ts | 109 - node_modules/antlr4ts/ANTLRErrorStrategy.js | 7 - .../antlr4ts/ANTLRErrorStrategy.js.map | 1 - node_modules/antlr4ts/ANTLRInputStream.d.ts | 51 - node_modules/antlr4ts/ANTLRInputStream.js | 161 - node_modules/antlr4ts/ANTLRInputStream.js.map | 1 - node_modules/antlr4ts/BailErrorStrategy.d.ts | 48 - node_modules/antlr4ts/BailErrorStrategy.js | 82 - .../antlr4ts/BailErrorStrategy.js.map | 1 - .../antlr4ts/BufferedTokenStream.d.ts | 143 - node_modules/antlr4ts/BufferedTokenStream.js | 489 - .../antlr4ts/BufferedTokenStream.js.map | 1 - node_modules/antlr4ts/CharStream.d.ts | 26 - node_modules/antlr4ts/CharStream.js | 7 - node_modules/antlr4ts/CharStream.js.map | 1 - node_modules/antlr4ts/CharStreams.d.ts | 54 - node_modules/antlr4ts/CharStreams.js | 133 - node_modules/antlr4ts/CharStreams.js.map | 1 - node_modules/antlr4ts/CodePointBuffer.d.ts | 40 - node_modules/antlr4ts/CodePointBuffer.js | 234 - node_modules/antlr4ts/CodePointBuffer.js.map | 1 - .../antlr4ts/CodePointCharStream.d.ts | 45 - node_modules/antlr4ts/CodePointCharStream.js | 149 - .../antlr4ts/CodePointCharStream.js.map | 1 - node_modules/antlr4ts/CommonToken.d.ts | 116 - node_modules/antlr4ts/CommonToken.js | 229 - node_modules/antlr4ts/CommonToken.js.map | 1 - node_modules/antlr4ts/CommonTokenFactory.d.ts | 53 - node_modules/antlr4ts/CommonTokenFactory.js | 63 - .../antlr4ts/CommonTokenFactory.js.map | 1 - node_modules/antlr4ts/CommonTokenStream.d.ts | 52 - node_modules/antlr4ts/CommonTokenStream.js | 126 - .../antlr4ts/CommonTokenStream.js.map | 1 - .../antlr4ts/ConsoleErrorListener.d.ts | 29 - node_modules/antlr4ts/ConsoleErrorListener.js | 33 - .../antlr4ts/ConsoleErrorListener.js.map | 1 - node_modules/antlr4ts/Decorators.d.ts | 8 - node_modules/antlr4ts/Decorators.js | 26 - node_modules/antlr4ts/Decorators.js.map | 1 - .../antlr4ts/DefaultErrorStrategy.d.ts | 347 - node_modules/antlr4ts/DefaultErrorStrategy.js | 813 - .../antlr4ts/DefaultErrorStrategy.js.map | 1 - node_modules/antlr4ts/Dependents.d.ts | 69 - node_modules/antlr4ts/Dependents.js | 75 - node_modules/antlr4ts/Dependents.js.map | 1 - .../antlr4ts/DiagnosticErrorListener.d.ts | 60 - .../antlr4ts/DiagnosticErrorListener.js | 147 - .../antlr4ts/DiagnosticErrorListener.js.map | 1 - .../antlr4ts/FailedPredicateException.d.ts | 21 - .../antlr4ts/FailedPredicateException.js | 64 - .../antlr4ts/FailedPredicateException.js.map | 1 - .../antlr4ts/InputMismatchException.d.ts | 14 - .../antlr4ts/InputMismatchException.js | 39 - .../antlr4ts/InputMismatchException.js.map | 1 - node_modules/antlr4ts/IntStream.d.ts | 197 - node_modules/antlr4ts/IntStream.js | 22 - node_modules/antlr4ts/IntStream.js.map | 1 - .../antlr4ts/InterpreterRuleContext.d.ts | 33 - .../antlr4ts/InterpreterRuleContext.js | 46 - .../antlr4ts/InterpreterRuleContext.js.map | 1 - node_modules/antlr4ts/LICENSE | 27 - node_modules/antlr4ts/Lexer.d.ts | 141 - node_modules/antlr4ts/Lexer.js | 335 - node_modules/antlr4ts/Lexer.js.map | 1 - node_modules/antlr4ts/LexerInterpreter.d.ts | 23 - node_modules/antlr4ts/LexerInterpreter.js | 79 - node_modules/antlr4ts/LexerInterpreter.js.map | 1 - .../antlr4ts/LexerNoViableAltException.d.ts | 19 - .../antlr4ts/LexerNoViableAltException.js | 56 - .../antlr4ts/LexerNoViableAltException.js.map | 1 - node_modules/antlr4ts/ListTokenSource.d.ts | 86 - node_modules/antlr4ts/ListTokenSource.js | 209 - node_modules/antlr4ts/ListTokenSource.js.map | 1 - .../antlr4ts/NoViableAltException.d.ts | 30 - node_modules/antlr4ts/NoViableAltException.js | 54 - .../antlr4ts/NoViableAltException.js.map | 1 - node_modules/antlr4ts/Parser.d.ts | 372 - node_modules/antlr4ts/Parser.js | 843 - node_modules/antlr4ts/Parser.js.map | 1 - .../antlr4ts/ParserErrorListener.d.ts | 112 - node_modules/antlr4ts/ParserErrorListener.js | 7 - .../antlr4ts/ParserErrorListener.js.map | 1 - node_modules/antlr4ts/ParserInterpreter.d.ts | 151 - node_modules/antlr4ts/ParserInterpreter.js | 407 - .../antlr4ts/ParserInterpreter.js.map | 1 - node_modules/antlr4ts/ParserRuleContext.d.ts | 169 - node_modules/antlr4ts/ParserRuleContext.js | 300 - .../antlr4ts/ParserRuleContext.js.map | 1 - node_modules/antlr4ts/ProxyErrorListener.d.ts | 20 - node_modules/antlr4ts/ProxyErrorListener.js | 49 - .../antlr4ts/ProxyErrorListener.js.map | 1 - .../antlr4ts/ProxyParserErrorListener.d.ts | 21 - .../antlr4ts/ProxyParserErrorListener.js | 58 - .../antlr4ts/ProxyParserErrorListener.js.map | 1 - node_modules/antlr4ts/README.md | 143 - .../antlr4ts/RecognitionException.d.ts | 87 - node_modules/antlr4ts/RecognitionException.js | 104 - .../antlr4ts/RecognitionException.js.map | 1 - node_modules/antlr4ts/Recognizer.d.ts | 101 - node_modules/antlr4ts/Recognizer.js | 218 - node_modules/antlr4ts/Recognizer.js.map | 1 - node_modules/antlr4ts/RuleContext.d.ts | 124 - node_modules/antlr4ts/RuleContext.js | 216 - node_modules/antlr4ts/RuleContext.js.map | 1 - .../antlr4ts/RuleContextWithAltNum.d.ts | 22 - .../antlr4ts/RuleContextWithAltNum.js | 50 - .../antlr4ts/RuleContextWithAltNum.js.map | 1 - node_modules/antlr4ts/RuleDependency.d.ts | 32 - node_modules/antlr4ts/RuleDependency.js | 22 - node_modules/antlr4ts/RuleDependency.js.map | 1 - node_modules/antlr4ts/RuleVersion.d.ts | 11 - node_modules/antlr4ts/RuleVersion.js | 18 - node_modules/antlr4ts/RuleVersion.js.map | 1 - node_modules/antlr4ts/Token.d.ts | 83 - node_modules/antlr4ts/Token.js | 39 - node_modules/antlr4ts/Token.js.map | 1 - node_modules/antlr4ts/TokenFactory.d.ts | 23 - node_modules/antlr4ts/TokenFactory.js | 7 - node_modules/antlr4ts/TokenFactory.js.map | 1 - node_modules/antlr4ts/TokenSource.d.ts | 67 - node_modules/antlr4ts/TokenSource.js | 7 - node_modules/antlr4ts/TokenSource.js.map | 1 - node_modules/antlr4ts/TokenStream.d.ts | 145 - node_modules/antlr4ts/TokenStream.js | 7 - node_modules/antlr4ts/TokenStream.js.map | 1 - .../antlr4ts/TokenStreamRewriter.d.ts | 216 - node_modules/antlr4ts/TokenStreamRewriter.js | 507 - .../antlr4ts/TokenStreamRewriter.js.map | 1 - node_modules/antlr4ts/Vocabulary.d.ts | 117 - node_modules/antlr4ts/Vocabulary.js | 7 - node_modules/antlr4ts/Vocabulary.js.map | 1 - node_modules/antlr4ts/VocabularyImpl.d.ts | 47 - node_modules/antlr4ts/VocabularyImpl.js | 119 - node_modules/antlr4ts/VocabularyImpl.js.map | 1 - node_modules/antlr4ts/WritableToken.d.ts | 13 - node_modules/antlr4ts/WritableToken.js | 7 - node_modules/antlr4ts/WritableToken.js.map | 1 - node_modules/antlr4ts/atn/ATN.d.ts | 123 - node_modules/antlr4ts/atn/ATN.js | 221 - node_modules/antlr4ts/atn/ATN.js.map | 1 - node_modules/antlr4ts/atn/ATNConfig.d.ts | 140 - node_modules/antlr4ts/atn/ATNConfig.js | 524 - node_modules/antlr4ts/atn/ATNConfig.js.map | 1 - node_modules/antlr4ts/atn/ATNConfigSet.d.ts | 113 - node_modules/antlr4ts/atn/ATNConfigSet.js | 446 - node_modules/antlr4ts/atn/ATNConfigSet.js.map | 1 - .../atn/ATNDeserializationOptions.d.ts | 26 - .../antlr4ts/atn/ATNDeserializationOptions.js | 78 - .../atn/ATNDeserializationOptions.js.map | 1 - .../antlr4ts/atn/ATNDeserializer.d.ts | 86 - node_modules/antlr4ts/atn/ATNDeserializer.js | 1086 - .../antlr4ts/atn/ATNDeserializer.js.map | 1 - node_modules/antlr4ts/atn/ATNSimulator.d.ts | 28 - node_modules/antlr4ts/atn/ATNSimulator.js | 63 - node_modules/antlr4ts/atn/ATNSimulator.js.map | 1 - node_modules/antlr4ts/atn/ATNState.d.ts | 111 - node_modules/antlr4ts/atn/ATNState.js | 186 - node_modules/antlr4ts/atn/ATNState.js.map | 1 - node_modules/antlr4ts/atn/ATNStateType.d.ts | 19 - node_modules/antlr4ts/atn/ATNStateType.js | 25 - node_modules/antlr4ts/atn/ATNStateType.js.map | 1 - node_modules/antlr4ts/atn/ATNType.d.ts | 19 - node_modules/antlr4ts/atn/ATNType.js | 25 - node_modules/antlr4ts/atn/ATNType.js.map | 1 - .../atn/AbstractPredicateTransition.d.ts | 13 - .../atn/AbstractPredicateTransition.js | 19 - .../atn/AbstractPredicateTransition.js.map | 1 - .../antlr4ts/atn/ActionTransition.d.ts | 17 - node_modules/antlr4ts/atn/ActionTransition.js | 55 - .../antlr4ts/atn/ActionTransition.js.map | 1 - node_modules/antlr4ts/atn/AmbiguityInfo.d.ts | 60 - node_modules/antlr4ts/atn/AmbiguityInfo.js | 86 - .../antlr4ts/atn/AmbiguityInfo.js.map | 1 - node_modules/antlr4ts/atn/AtomTransition.d.ts | 18 - node_modules/antlr4ts/atn/AtomTransition.js | 57 - .../antlr4ts/atn/AtomTransition.js.map | 1 - .../antlr4ts/atn/BasicBlockStartState.d.ts | 13 - .../antlr4ts/atn/BasicBlockStartState.js | 31 - .../antlr4ts/atn/BasicBlockStartState.js.map | 1 - node_modules/antlr4ts/atn/BasicState.d.ts | 13 - node_modules/antlr4ts/atn/BasicState.js | 31 - node_modules/antlr4ts/atn/BasicState.js.map | 1 - node_modules/antlr4ts/atn/BlockEndState.d.ts | 12 - node_modules/antlr4ts/atn/BlockEndState.js | 28 - .../antlr4ts/atn/BlockEndState.js.map | 1 - .../antlr4ts/atn/BlockStartState.d.ts | 10 - node_modules/antlr4ts/atn/BlockStartState.js | 13 - .../antlr4ts/atn/BlockStartState.js.map | 1 - .../antlr4ts/atn/CodePointTransitions.d.ts | 27 - .../antlr4ts/atn/CodePointTransitions.js | 50 - .../antlr4ts/atn/CodePointTransitions.js.map | 1 - node_modules/antlr4ts/atn/ConflictInfo.d.ts | 35 - node_modules/antlr4ts/atn/ConflictInfo.js | 69 - node_modules/antlr4ts/atn/ConflictInfo.js.map | 1 - .../antlr4ts/atn/ContextSensitivityInfo.d.ts | 39 - .../antlr4ts/atn/ContextSensitivityInfo.js | 59 - .../atn/ContextSensitivityInfo.js.map | 1 - .../antlr4ts/atn/DecisionEventInfo.d.ts | 54 - .../antlr4ts/atn/DecisionEventInfo.js | 51 - .../antlr4ts/atn/DecisionEventInfo.js.map | 1 - node_modules/antlr4ts/atn/DecisionInfo.d.ts | 201 - node_modules/antlr4ts/atn/DecisionInfo.js | 212 - node_modules/antlr4ts/atn/DecisionInfo.js.map | 1 - node_modules/antlr4ts/atn/DecisionState.d.ts | 10 - node_modules/antlr4ts/atn/DecisionState.js | 19 - .../antlr4ts/atn/DecisionState.js.map | 1 - .../antlr4ts/atn/EpsilonTransition.d.ts | 24 - .../antlr4ts/atn/EpsilonTransition.js | 65 - .../antlr4ts/atn/EpsilonTransition.js.map | 1 - node_modules/antlr4ts/atn/ErrorInfo.d.ts | 32 - node_modules/antlr4ts/atn/ErrorInfo.js | 52 - node_modules/antlr4ts/atn/ErrorInfo.js.map | 1 - node_modules/antlr4ts/atn/InvalidState.d.ts | 13 - node_modules/antlr4ts/atn/InvalidState.js | 30 - node_modules/antlr4ts/atn/InvalidState.js.map | 1 - node_modules/antlr4ts/atn/LL1Analyzer.d.ts | 98 - node_modules/antlr4ts/atn/LL1Analyzer.js | 221 - node_modules/antlr4ts/atn/LL1Analyzer.js.map | 1 - .../antlr4ts/atn/LexerATNSimulator.d.ts | 153 - .../antlr4ts/atn/LexerATNSimulator.js | 716 - .../antlr4ts/atn/LexerATNSimulator.js.map | 1 - node_modules/antlr4ts/atn/LexerAction.d.ts | 47 - node_modules/antlr4ts/atn/LexerAction.js | 7 - node_modules/antlr4ts/atn/LexerAction.js.map | 1 - .../antlr4ts/atn/LexerActionExecutor.d.ts | 104 - .../antlr4ts/atn/LexerActionExecutor.js | 200 - .../antlr4ts/atn/LexerActionExecutor.js.map | 1 - .../antlr4ts/atn/LexerActionType.d.ts | 44 - node_modules/antlr4ts/atn/LexerActionType.js | 50 - .../antlr4ts/atn/LexerActionType.js.map | 1 - .../antlr4ts/atn/LexerChannelAction.d.ts | 48 - .../antlr4ts/atn/LexerChannelAction.js | 104 - .../antlr4ts/atn/LexerChannelAction.js.map | 1 - .../antlr4ts/atn/LexerCustomAction.d.ts | 73 - .../antlr4ts/atn/LexerCustomAction.js | 128 - .../antlr4ts/atn/LexerCustomAction.js.map | 1 - .../atn/LexerIndexedCustomAction.d.ts | 74 - .../antlr4ts/atn/LexerIndexedCustomAction.js | 134 - .../atn/LexerIndexedCustomAction.js.map | 1 - .../antlr4ts/atn/LexerModeAction.d.ts | 48 - node_modules/antlr4ts/atn/LexerModeAction.js | 104 - .../antlr4ts/atn/LexerModeAction.js.map | 1 - .../antlr4ts/atn/LexerMoreAction.d.ts | 47 - node_modules/antlr4ts/atn/LexerMoreAction.js | 95 - .../antlr4ts/atn/LexerMoreAction.js.map | 1 - .../antlr4ts/atn/LexerPopModeAction.d.ts | 47 - .../antlr4ts/atn/LexerPopModeAction.js | 95 - .../antlr4ts/atn/LexerPopModeAction.js.map | 1 - .../antlr4ts/atn/LexerPushModeAction.d.ts | 48 - .../antlr4ts/atn/LexerPushModeAction.js | 104 - .../antlr4ts/atn/LexerPushModeAction.js.map | 1 - .../antlr4ts/atn/LexerSkipAction.d.ts | 47 - node_modules/antlr4ts/atn/LexerSkipAction.js | 95 - .../antlr4ts/atn/LexerSkipAction.js.map | 1 - .../antlr4ts/atn/LexerTypeAction.d.ts | 47 - node_modules/antlr4ts/atn/LexerTypeAction.js | 103 - .../antlr4ts/atn/LexerTypeAction.js.map | 1 - .../antlr4ts/atn/LookaheadEventInfo.d.ts | 37 - .../antlr4ts/atn/LookaheadEventInfo.js | 51 - .../antlr4ts/atn/LookaheadEventInfo.js.map | 1 - node_modules/antlr4ts/atn/LoopEndState.d.ts | 11 - node_modules/antlr4ts/atn/LoopEndState.js | 28 - node_modules/antlr4ts/atn/LoopEndState.js.map | 1 - .../antlr4ts/atn/NotSetTransition.d.ts | 14 - node_modules/antlr4ts/atn/NotSetTransition.js | 48 - .../antlr4ts/atn/NotSetTransition.js.map | 1 - .../antlr4ts/atn/OrderedATNConfigSet.d.ts | 23 - .../antlr4ts/atn/OrderedATNConfigSet.js | 54 - .../antlr4ts/atn/OrderedATNConfigSet.js.map | 1 - node_modules/antlr4ts/atn/ParseInfo.d.ts | 79 - node_modules/antlr4ts/atn/ParseInfo.js | 161 - node_modules/antlr4ts/atn/ParseInfo.js.map | 1 - .../antlr4ts/atn/ParserATNSimulator.d.ts | 567 - .../antlr4ts/atn/ParserATNSimulator.js | 2272 - .../antlr4ts/atn/ParserATNSimulator.js.map | 1 - .../antlr4ts/atn/PlusBlockStartState.d.ts | 16 - .../antlr4ts/atn/PlusBlockStartState.js | 32 - .../antlr4ts/atn/PlusBlockStartState.js.map | 1 - .../antlr4ts/atn/PlusLoopbackState.d.ts | 12 - .../antlr4ts/atn/PlusLoopbackState.js | 30 - .../antlr4ts/atn/PlusLoopbackState.js.map | 1 - .../atn/PrecedencePredicateTransition.d.ts | 21 - .../atn/PrecedencePredicateTransition.js | 62 - .../atn/PrecedencePredicateTransition.js.map | 1 - .../antlr4ts/atn/PredicateEvalInfo.d.ts | 54 - .../antlr4ts/atn/PredicateEvalInfo.js | 62 - .../antlr4ts/atn/PredicateEvalInfo.js.map | 1 - .../antlr4ts/atn/PredicateTransition.d.ts | 25 - .../antlr4ts/atn/PredicateTransition.js | 65 - .../antlr4ts/atn/PredicateTransition.js.map | 1 - .../antlr4ts/atn/PredictionContext.d.ts | 89 - .../antlr4ts/atn/PredictionContext.js | 690 - .../antlr4ts/atn/PredictionContext.js.map | 1 - .../antlr4ts/atn/PredictionContextCache.d.ts | 40 - .../antlr4ts/atn/PredictionContextCache.js | 140 - .../atn/PredictionContextCache.js.map | 1 - node_modules/antlr4ts/atn/PredictionMode.d.ts | 89 - node_modules/antlr4ts/atn/PredictionMode.js | 160 - .../antlr4ts/atn/PredictionMode.js.map | 1 - .../antlr4ts/atn/ProfilingATNSimulator.d.ts | 55 - .../antlr4ts/atn/ProfilingATNSimulator.js | 266 - .../antlr4ts/atn/ProfilingATNSimulator.js.map | 1 - .../antlr4ts/atn/RangeTransition.d.ts | 17 - node_modules/antlr4ts/atn/RangeTransition.js | 57 - .../antlr4ts/atn/RangeTransition.js.map | 1 - node_modules/antlr4ts/atn/RuleStartState.d.ts | 13 - node_modules/antlr4ts/atn/RuleStartState.js | 32 - .../antlr4ts/atn/RuleStartState.js.map | 1 - node_modules/antlr4ts/atn/RuleStopState.d.ts | 15 - node_modules/antlr4ts/atn/RuleStopState.js | 38 - .../antlr4ts/atn/RuleStopState.js.map | 1 - node_modules/antlr4ts/atn/RuleTransition.d.ts | 22 - node_modules/antlr4ts/atn/RuleTransition.js | 55 - .../antlr4ts/atn/RuleTransition.js.map | 1 - .../antlr4ts/atn/SemanticContext.d.ts | 143 - node_modules/antlr4ts/atn/SemanticContext.js | 479 - .../antlr4ts/atn/SemanticContext.js.map | 1 - node_modules/antlr4ts/atn/SetTransition.d.ts | 17 - node_modules/antlr4ts/atn/SetTransition.js | 65 - .../antlr4ts/atn/SetTransition.js.map | 1 - node_modules/antlr4ts/atn/SimulatorState.d.ts | 17 - node_modules/antlr4ts/atn/SimulatorState.js | 35 - .../antlr4ts/atn/SimulatorState.js.map | 1 - .../antlr4ts/atn/StarBlockStartState.d.ts | 10 - .../antlr4ts/atn/StarBlockStartState.js | 27 - .../antlr4ts/atn/StarBlockStartState.js.map | 1 - .../antlr4ts/atn/StarLoopEntryState.d.ts | 37 - .../antlr4ts/atn/StarLoopEntryState.js | 56 - .../antlr4ts/atn/StarLoopEntryState.js.map | 1 - .../antlr4ts/atn/StarLoopbackState.d.ts | 11 - .../antlr4ts/atn/StarLoopbackState.js | 30 - .../antlr4ts/atn/StarLoopbackState.js.map | 1 - .../antlr4ts/atn/TokensStartState.d.ts | 10 - node_modules/antlr4ts/atn/TokensStartState.js | 28 - .../antlr4ts/atn/TokensStartState.js.map | 1 - node_modules/antlr4ts/atn/Transition.d.ts | 38 - node_modules/antlr4ts/atn/Transition.js | 73 - node_modules/antlr4ts/atn/Transition.js.map | 1 - node_modules/antlr4ts/atn/TransitionType.d.ts | 16 - node_modules/antlr4ts/atn/TransitionType.js | 23 - .../antlr4ts/atn/TransitionType.js.map | 1 - .../antlr4ts/atn/WildcardTransition.d.ts | 13 - .../antlr4ts/atn/WildcardTransition.js | 47 - .../antlr4ts/atn/WildcardTransition.js.map | 1 - node_modules/antlr4ts/atn/index.d.ts | 73 - node_modules/antlr4ts/atn/index.js | 87 - node_modules/antlr4ts/atn/index.js.map | 1 - .../antlr4ts/dfa/AcceptStateInfo.d.ts | 33 - node_modules/antlr4ts/dfa/AcceptStateInfo.js | 41 - .../antlr4ts/dfa/AcceptStateInfo.js.map | 1 - node_modules/antlr4ts/dfa/DFA.d.ts | 94 - node_modules/antlr4ts/dfa/DFA.js | 174 - node_modules/antlr4ts/dfa/DFA.js.map | 1 - node_modules/antlr4ts/dfa/DFASerializer.d.ts | 23 - node_modules/antlr4ts/dfa/DFASerializer.js | 141 - .../antlr4ts/dfa/DFASerializer.js.map | 1 - node_modules/antlr4ts/dfa/DFAState.d.ts | 95 - node_modules/antlr4ts/dfa/DFAState.js | 230 - node_modules/antlr4ts/dfa/DFAState.js.map | 1 - .../antlr4ts/dfa/LexerDFASerializer.d.ts | 10 - .../antlr4ts/dfa/LexerDFASerializer.js | 36 - .../antlr4ts/dfa/LexerDFASerializer.js.map | 1 - node_modules/antlr4ts/dfa/index.d.ts | 9 - node_modules/antlr4ts/dfa/index.js | 22 - node_modules/antlr4ts/dfa/index.js.map | 1 - node_modules/antlr4ts/index.d.ts | 49 - node_modules/antlr4ts/index.js | 65 - node_modules/antlr4ts/index.js.map | 1 - node_modules/antlr4ts/misc/Args.d.ts | 13 - node_modules/antlr4ts/misc/Args.js | 23 - node_modules/antlr4ts/misc/Args.js.map | 1 - .../antlr4ts/misc/Array2DHashMap.d.ts | 20 - node_modules/antlr4ts/misc/Array2DHashMap.js | 82 - .../antlr4ts/misc/Array2DHashMap.js.map | 1 - .../antlr4ts/misc/Array2DHashSet.d.ts | 62 - node_modules/antlr4ts/misc/Array2DHashSet.js | 366 - .../antlr4ts/misc/Array2DHashSet.js.map | 1 - .../misc/ArrayEqualityComparator.d.ts | 32 - .../antlr4ts/misc/ArrayEqualityComparator.js | 71 - .../misc/ArrayEqualityComparator.js.map | 1 - node_modules/antlr4ts/misc/Arrays.d.ts | 18 - node_modules/antlr4ts/misc/Arrays.js | 69 - node_modules/antlr4ts/misc/Arrays.js.map | 1 - node_modules/antlr4ts/misc/BitSet.d.ts | 268 - node_modules/antlr4ts/misc/BitSet.js | 671 - node_modules/antlr4ts/misc/BitSet.js.map | 1 - node_modules/antlr4ts/misc/Character.d.ts | 7 - node_modules/antlr4ts/misc/Character.js | 20 - node_modules/antlr4ts/misc/Character.js.map | 1 - .../misc/DefaultEqualityComparator.d.ts | 31 - .../misc/DefaultEqualityComparator.js | 70 - .../misc/DefaultEqualityComparator.js.map | 1 - .../antlr4ts/misc/EqualityComparator.d.ts | 29 - .../antlr4ts/misc/EqualityComparator.js | 7 - .../antlr4ts/misc/EqualityComparator.js.map | 1 - node_modules/antlr4ts/misc/IntSet.d.ts | 129 - node_modules/antlr4ts/misc/IntSet.js | 7 - node_modules/antlr4ts/misc/IntSet.js.map | 1 - node_modules/antlr4ts/misc/IntegerList.d.ts | 69 - node_modules/antlr4ts/misc/IntegerList.js | 293 - node_modules/antlr4ts/misc/IntegerList.js.map | 1 - node_modules/antlr4ts/misc/IntegerStack.d.ts | 15 - node_modules/antlr4ts/misc/IntegerStack.js | 29 - .../antlr4ts/misc/IntegerStack.js.map | 1 - .../antlr4ts/misc/InterpreterDataReader.d.ts | 39 - .../antlr4ts/misc/InterpreterDataReader.js | 176 - .../misc/InterpreterDataReader.js.map | 1 - node_modules/antlr4ts/misc/Interval.d.ts | 57 - node_modules/antlr4ts/misc/Interval.js | 142 - node_modules/antlr4ts/misc/Interval.js.map | 1 - node_modules/antlr4ts/misc/IntervalSet.d.ts | 96 - node_modules/antlr4ts/misc/IntervalSet.js | 645 - node_modules/antlr4ts/misc/IntervalSet.js.map | 1 - node_modules/antlr4ts/misc/MultiMap.d.ts | 9 - node_modules/antlr4ts/misc/MultiMap.js | 32 - node_modules/antlr4ts/misc/MultiMap.js.map | 1 - node_modules/antlr4ts/misc/MurmurHash.d.ts | 45 - node_modules/antlr4ts/misc/MurmurHash.js | 114 - node_modules/antlr4ts/misc/MurmurHash.js.map | 1 - .../misc/ObjectEqualityComparator.d.ts | 32 - .../antlr4ts/misc/ObjectEqualityComparator.js | 58 - .../misc/ObjectEqualityComparator.js.map | 1 - .../misc/ParseCancellationException.d.ts | 18 - .../misc/ParseCancellationException.js | 28 - .../misc/ParseCancellationException.js.map | 1 - node_modules/antlr4ts/misc/Stubs.d.ts | 31 - node_modules/antlr4ts/misc/Stubs.js | 7 - node_modules/antlr4ts/misc/Stubs.js.map | 1 - node_modules/antlr4ts/misc/UUID.d.ts | 13 - node_modules/antlr4ts/misc/UUID.js | 53 - node_modules/antlr4ts/misc/UUID.js.map | 1 - node_modules/antlr4ts/misc/Utils.d.ts | 15 - node_modules/antlr4ts/misc/Utils.js | 174 - node_modules/antlr4ts/misc/Utils.js.map | 1 - node_modules/antlr4ts/misc/index.d.ts | 25 - node_modules/antlr4ts/misc/index.js | 44 - node_modules/antlr4ts/misc/index.js.map | 1 - node_modules/antlr4ts/package.json | 23 - .../tree/AbstractParseTreeVisitor.d.ts | 103 - .../antlr4ts/tree/AbstractParseTreeVisitor.js | 144 - .../tree/AbstractParseTreeVisitor.js.map | 1 - node_modules/antlr4ts/tree/ErrorNode.d.ts | 17 - node_modules/antlr4ts/tree/ErrorNode.js | 35 - node_modules/antlr4ts/tree/ErrorNode.js.map | 1 - node_modules/antlr4ts/tree/ParseTree.d.ts | 36 - node_modules/antlr4ts/tree/ParseTree.js | 7 - node_modules/antlr4ts/tree/ParseTree.js.map | 1 - .../antlr4ts/tree/ParseTreeListener.d.ts | 26 - .../antlr4ts/tree/ParseTreeListener.js | 7 - .../antlr4ts/tree/ParseTreeListener.js.map | 1 - .../antlr4ts/tree/ParseTreeProperty.d.ts | 28 - .../antlr4ts/tree/ParseTreeProperty.js | 41 - .../antlr4ts/tree/ParseTreeProperty.js.map | 1 - .../antlr4ts/tree/ParseTreeVisitor.d.ts | 48 - .../antlr4ts/tree/ParseTreeVisitor.js | 7 - .../antlr4ts/tree/ParseTreeVisitor.js.map | 1 - .../antlr4ts/tree/ParseTreeWalker.d.ts | 35 - node_modules/antlr4ts/tree/ParseTreeWalker.js | 104 - .../antlr4ts/tree/ParseTreeWalker.js.map | 1 - node_modules/antlr4ts/tree/RuleNode.d.ts | 21 - node_modules/antlr4ts/tree/RuleNode.js | 11 - node_modules/antlr4ts/tree/RuleNode.js.map | 1 - node_modules/antlr4ts/tree/SyntaxTree.d.ts | 29 - node_modules/antlr4ts/tree/SyntaxTree.js | 7 - node_modules/antlr4ts/tree/SyntaxTree.js.map | 1 - node_modules/antlr4ts/tree/TerminalNode.d.ts | 27 - node_modules/antlr4ts/tree/TerminalNode.js | 91 - .../antlr4ts/tree/TerminalNode.js.map | 1 - node_modules/antlr4ts/tree/Tree.d.ts | 36 - node_modules/antlr4ts/tree/Tree.js | 7 - node_modules/antlr4ts/tree/Tree.js.map | 1 - node_modules/antlr4ts/tree/Trees.d.ts | 75 - node_modules/antlr4ts/tree/Trees.js | 243 - node_modules/antlr4ts/tree/Trees.js.map | 1 - node_modules/antlr4ts/tree/index.d.ts | 16 - node_modules/antlr4ts/tree/index.js | 29 - node_modules/antlr4ts/tree/index.js.map | 1 - node_modules/antlr4ts/tree/pattern/Chunk.d.ts | 17 - node_modules/antlr4ts/tree/pattern/Chunk.js | 23 - .../antlr4ts/tree/pattern/Chunk.js.map | 1 - .../antlr4ts/tree/pattern/ParseTreeMatch.d.ts | 124 - .../antlr4ts/tree/pattern/ParseTreeMatch.js | 179 - .../tree/pattern/ParseTreeMatch.js.map | 1 - .../tree/pattern/ParseTreePattern.d.ts | 98 - .../antlr4ts/tree/pattern/ParseTreePattern.js | 157 - .../tree/pattern/ParseTreePattern.js.map | 1 - .../tree/pattern/ParseTreePatternMatcher.d.ts | 166 - .../tree/pattern/ParseTreePatternMatcher.js | 477 - .../pattern/ParseTreePatternMatcher.js.map | 1 - .../antlr4ts/tree/pattern/RuleTagToken.d.ts | 122 - .../antlr4ts/tree/pattern/RuleTagToken.js | 197 - .../antlr4ts/tree/pattern/RuleTagToken.js.map | 1 - .../antlr4ts/tree/pattern/TagChunk.d.ts | 59 - .../antlr4ts/tree/pattern/TagChunk.js | 86 - .../antlr4ts/tree/pattern/TagChunk.js.map | 1 - .../antlr4ts/tree/pattern/TextChunk.d.ts | 35 - .../antlr4ts/tree/pattern/TextChunk.js | 69 - .../antlr4ts/tree/pattern/TextChunk.js.map | 1 - .../antlr4ts/tree/pattern/TokenTagToken.d.ts | 56 - .../antlr4ts/tree/pattern/TokenTagToken.js | 94 - .../tree/pattern/TokenTagToken.js.map | 1 - node_modules/antlr4ts/tree/pattern/index.d.ts | 9 - node_modules/antlr4ts/tree/pattern/index.js | 28 - .../antlr4ts/tree/pattern/index.js.map | 1 - node_modules/antlr4ts/tree/xpath/XPath.d.ts | 66 - node_modules/antlr4ts/tree/xpath/XPath.js | 196 - node_modules/antlr4ts/tree/xpath/XPath.js.map | 1 - .../antlr4ts/tree/xpath/XPathElement.d.ts | 19 - .../antlr4ts/tree/xpath/XPathElement.js | 34 - .../antlr4ts/tree/xpath/XPathElement.js.map | 1 - .../antlr4ts/tree/xpath/XPathLexer.d.ts | 36 - .../antlr4ts/tree/xpath/XPathLexer.js | 474 - .../antlr4ts/tree/xpath/XPathLexer.js.map | 1 - .../tree/xpath/XPathLexerErrorListener.d.ts | 10 - .../tree/xpath/XPathLexerErrorListener.js | 24 - .../tree/xpath/XPathLexerErrorListener.js.map | 1 - .../tree/xpath/XPathRuleAnywhereElement.d.ts | 14 - .../tree/xpath/XPathRuleAnywhereElement.js | 33 - .../xpath/XPathRuleAnywhereElement.js.map | 1 - .../antlr4ts/tree/xpath/XPathRuleElement.d.ts | 11 - .../antlr4ts/tree/xpath/XPathRuleElement.js | 42 - .../tree/xpath/XPathRuleElement.js.map | 1 - .../tree/xpath/XPathTokenAnywhereElement.d.ts | 11 - .../tree/xpath/XPathTokenAnywhereElement.js | 31 - .../xpath/XPathTokenAnywhereElement.js.map | 1 - .../tree/xpath/XPathTokenElement.d.ts | 11 - .../antlr4ts/tree/xpath/XPathTokenElement.js | 42 - .../tree/xpath/XPathTokenElement.js.map | 1 - .../xpath/XPathWildcardAnywhereElement.d.ts | 10 - .../xpath/XPathWildcardAnywhereElement.js | 35 - .../xpath/XPathWildcardAnywhereElement.js.map | 1 - .../tree/xpath/XPathWildcardElement.d.ts | 10 - .../tree/xpath/XPathWildcardElement.js | 39 - .../tree/xpath/XPathWildcardElement.js.map | 1 - node_modules/antlr4ts/tree/xpath/index.d.ts | 14 - node_modules/antlr4ts/tree/xpath/index.js | 27 - node_modules/antlr4ts/tree/xpath/index.js.map | 1 - node_modules/emoji-regex/LICENSE-MIT.txt | 20 - node_modules/emoji-regex/README.md | 105 - node_modules/emoji-regex/index.d.ts | 5 - node_modules/emoji-regex/index.js | 4 - node_modules/emoji-regex/package.json | 43 - node_modules/escape-string-regexp/index.d.ts | 18 - node_modules/escape-string-regexp/index.js | 13 - node_modules/escape-string-regexp/license | 9 - .../escape-string-regexp/package.json | 38 - node_modules/escape-string-regexp/readme.md | 34 - .../is-fullwidth-code-point/index.d.ts | 17 - node_modules/is-fullwidth-code-point/index.js | 50 - node_modules/is-fullwidth-code-point/license | 9 - .../is-fullwidth-code-point/package.json | 42 - .../is-fullwidth-code-point/readme.md | 39 - node_modules/lru-cache/LICENSE | 15 - node_modules/lru-cache/README.md | 166 - node_modules/lru-cache/index.js | 334 - node_modules/lru-cache/package.json | 34 - .../prettier-plugin-solidity/.eslintignore | 5 - .../prettier-plugin-solidity/.eslintrc | 9 - .../.github/FUNDING.yml | 1 - .../.github/dependabot.yml | 23 - .../.github/workflows/CI.yml | 83 - node_modules/prettier-plugin-solidity/.nvmrc | 1 - .../prettier-plugin-solidity/.prettierignore | 4 - .../prettier-plugin-solidity/.prettierrc | 4 - .../CODE_OF_CONDUCT.md | 76 - .../HOW_TO_PUBLISH.md | 39 - node_modules/prettier-plugin-solidity/LICENSE | 21 - .../prettier-plugin-solidity/README.md | 352 - .../prettier-plugin-solidity/STYLEGUIDE.md | 382 - .../assets/telegram-badge.svg | 29 - .../prettier-plugin-solidity/jest.config.js | 40 - .../node_modules/.bin/prettier | 1 - .../node_modules/.bin/semver | 1 - .../prettier-plugin-solidity/package.json | 96 - .../scripts/generateIndexes.js | 16 - .../scripts/makeData.js | 23 - .../binary-operator-printers/arithmetic.js | 53 - .../binary-operator-printers/assignment.js | 30 - .../src/binary-operator-printers/bit.js | 6 - .../binary-operator-printers/comparison.js | 38 - .../exponentiation.js | 26 - .../src/binary-operator-printers/index.js | 13 - .../src/binary-operator-printers/logical.js | 41 - .../src/binary-operator-printers/shift.js | 6 - .../prettier-plugin-solidity/src/clean.js | 8 - .../src/comments/handler.js | 93 - .../comments/handlers/ContractDefinition.js | 60 - .../src/comments/ignore.js | 33 - .../src/comments/index.js | 4 - .../src/comments/printer.js | 64 - .../prettier-plugin-solidity/src/index.js | 61 - .../prettier-plugin-solidity/src/loc.js | 16 - .../src/nodes/ArrayTypeName.js | 10 - .../src/nodes/AssemblyAssignment.js | 16 - .../src/nodes/AssemblyBlock.js | 25 - .../src/nodes/AssemblyCall.js | 16 - .../src/nodes/AssemblyCase.js | 9 - .../src/nodes/AssemblyFor.js | 18 - .../src/nodes/AssemblyFunctionDefinition.js | 35 - .../src/nodes/AssemblyIf.js | 10 - .../src/nodes/AssemblyLocalDefinition.js | 25 - .../src/nodes/AssemblyMemberAccess.js | 9 - .../src/nodes/AssemblySwitch.js | 16 - .../src/nodes/BinaryOperation.js | 14 - .../src/nodes/Block.js | 27 - .../src/nodes/BooleanLiteral.js | 5 - .../src/nodes/Break.js | 5 - .../src/nodes/BreakStatement.js | 5 - .../src/nodes/CatchClause.js | 21 - .../src/nodes/Conditional.js | 22 - .../src/nodes/ContinueStatement.js | 5 - .../src/nodes/ContractDefinition.js | 47 - .../src/nodes/CustomErrorDefinition.js | 18 - .../src/nodes/DecimalNumber.js | 5 - .../src/nodes/DoWhileStatement.js | 22 - .../src/nodes/ElementaryTypeName.js | 10 - .../src/nodes/EmitStatement.js | 5 - .../src/nodes/EnumDefinition.js | 22 - .../src/nodes/EnumValue.js | 5 - .../src/nodes/EventDefinition.js | 20 - .../src/nodes/ExpressionStatement.js | 32 - .../src/nodes/FileLevelConstant.js | 12 - .../src/nodes/ForStatement.js | 46 - .../src/nodes/FunctionCall.js | 62 - .../src/nodes/FunctionDefinition.js | 128 - .../src/nodes/FunctionTypeName.js | 44 - .../src/nodes/HexLiteral.js | 16 - .../src/nodes/HexNumber.js | 5 - .../src/nodes/Identifier.js | 5 - .../src/nodes/IfStatement.js | 62 - .../src/nodes/ImportDirective.js | 54 - .../src/nodes/IndexAccess.js | 38 - .../src/nodes/IndexRangeAccess.js | 12 - .../src/nodes/InheritanceSpecifier.js | 15 - .../src/nodes/InlineAssemblyStatement.js | 5 - .../src/nodes/LabelDefinition.js | 11 - .../src/nodes/Mapping.js | 11 - .../src/nodes/MemberAccess.js | 127 - .../src/nodes/ModifierDefinition.js | 56 - .../src/nodes/ModifierInvocation.js | 22 - .../src/nodes/NameValueExpression.js | 10 - .../src/nodes/NameValueList.js | 21 - .../src/nodes/NewExpression.js | 5 - .../src/nodes/NumberLiteral.js | 8 - .../src/nodes/PragmaDirective.js | 5 - .../src/nodes/ReturnStatement.js | 24 - .../src/nodes/RevertStatement.js | 5 - .../src/nodes/SourceUnit.js | 16 - .../src/nodes/StateVariableDeclaration.js | 27 - .../src/nodes/StringLiteral.js | 21 - .../src/nodes/StructDefinition.js | 23 - .../src/nodes/ThrowStatement.js | 5 - .../src/nodes/TryStatement.js | 45 - .../src/nodes/TupleExpression.js | 25 - .../src/nodes/TypeDefinition.js | 5 - .../src/nodes/TypeNameExpression.js | 5 - .../src/nodes/UnaryOperation.js | 12 - .../src/nodes/UncheckedStatement.js | 11 - .../src/nodes/UserDefinedTypeName.js | 5 - .../src/nodes/UsingForDeclaration.js | 11 - .../src/nodes/VariableDeclaration.js | 56 - .../src/nodes/VariableDeclarationStatement.js | 44 - .../src/nodes/WhileStatement.js | 21 - .../src/nodes/index.js | 77 - .../src/nodes/print-comments.js | 27 - .../src/nodes/print-preserving-empty-lines.js | 53 - .../src/nodes/print-separated-item.js | 17 - .../src/nodes/print-separated-list.js | 22 - .../prettier-plugin-solidity/src/options.js | 71 - .../prettier-plugin-solidity/src/parser.js | 196 - .../src/prettier-comments/common/util.js | 684 - .../src/prettier-comments/index.js | 3 - .../prettier-comments/language-js/comments.js | 844 - .../prettier-plugin-solidity/src/printer.js | 43 - .../tests/config/.prettierrc | 5 - .../tests/config/format-test.js | 458 - .../tests/config/require-prettier.js | 5 - .../tests/config/require-standalone.js | 25 - .../tests/config/setup.js | 7 - .../tests/config/utils/check-parsers.js | 207 - .../tests/config/utils/compile-contract.js | 36 - .../config/utils/consistent-end-of-line.js | 13 - .../tests/config/utils/create-snapshot.js | 98 - .../utils/stringify-options-for-title.js | 15 - .../config/utils/visualize-end-of-line.js | 18 - .../tests/config/utils/visualize-range.js | 40 - .../format/AddressPayable/AddressPayable.sol | 10 - .../__snapshots__/jsfmt.spec.js.snap | 37 - .../tests/format/AddressPayable/jsfmt.spec.js | 1 - .../AllSolidityFeatures.sol | 533 - .../__snapshots__/jsfmt.spec.js.snap | 1146 - .../format/AllSolidityFeatures/jsfmt.spec.js | 1 - .../tests/format/Arrays/Arrays.sol | 17 - .../Arrays/__snapshots__/jsfmt.spec.js.snap | 63 - .../tests/format/Arrays/jsfmt.spec.js | 1 - .../tests/format/Assembly/Assembly.sol | 180 - .../Assembly/__snapshots__/jsfmt.spec.js.snap | 406 - .../tests/format/Assembly/jsfmt.spec.js | 1 - .../format/BasicIterator/BasicIterator.sol | 44 - .../__snapshots__/jsfmt.spec.js.snap | 102 - .../tests/format/BasicIterator/jsfmt.spec.js | 1 - .../BinaryOperators/BinaryOperators.sol | 186 - .../format/BinaryOperators/Parentheses.sol | 132 - .../__snapshots__/jsfmt.spec.js.snap | 952 - .../format/BinaryOperators/jsfmt.spec.js | 1 - .../BreakingChangesV0.7.4.sol | 9 - .../__snapshots__/jsfmt.spec.js.snap | 198 - .../BreakingChangesV0.7.4/jsfmt.spec.js | 14 - .../BreakingChangesV0.8.0.sol | 43 - .../__snapshots__/jsfmt.spec.js.snap | 815 - .../BreakingChangesV0.8.0/jsfmt.spec.js | 26 - .../tests/format/Comments/Comments.sol | 104 - .../Comments/__snapshots__/jsfmt.spec.js.snap | 244 - .../tests/format/Comments/jsfmt.spec.js | 1 - .../tests/format/Conditional/Conditional.sol | 26 - .../__snapshots__/jsfmt.spec.js.snap | 80 - .../tests/format/Conditional/jsfmt.spec.js | 1 - .../format/Constructors/Constructors.sol | 8 - .../__snapshots__/jsfmt.spec.js.snap | 54 - .../tests/format/Constructors/jsfmt.spec.js | 1 - .../ContractDefinitions.sol | 2 - .../__snapshots__/jsfmt.spec.js.snap | 22 - .../format/ContractDefinitions/jsfmt.spec.js | 1 - .../format/CustomErrors/CustomErrors.sol | 20 - .../__snapshots__/jsfmt.spec.js.snap | 50 - .../tests/format/CustomErrors/jsfmt.spec.js | 1 - .../EnumDefinitions/EnumDefinitions.sol | 4 - .../__snapshots__/jsfmt.spec.js.snap | 72 - .../format/EnumDefinitions/jsfmt.spec.js | 2 - .../tests/format/Etc/Etc.sol | 48 - .../Etc/__snapshots__/jsfmt.spec.js.snap | 134 - .../tests/format/Etc/jsfmt.spec.js | 1 - .../ExplicitVariableTypes.sol | 36 - .../__snapshots__/jsfmt.spec.js.snap | 258 - .../ExplicitVariableTypes/jsfmt.spec.js | 9 - .../format/ForStatements/ForStatements.sol | 43 - .../__snapshots__/jsfmt.spec.js.snap | 108 - .../tests/format/ForStatements/jsfmt.spec.js | 1 - .../format/FunctionCalls/FunctionCalls.sol | 40 - .../__snapshots__/jsfmt.spec.js.snap | 310 - .../tests/format/FunctionCalls/jsfmt.spec.js | 2 - .../FunctionDefinitions.sol | 169 - .../__snapshots__/jsfmt.spec.js.snap | 875 - .../format/FunctionDefinitions/jsfmt.spec.js | 1 - .../tests/format/HexLiteral/HexLiteral.sol | 7 - .../__snapshots__/jsfmt.spec.js.snap | 57 - .../tests/format/HexLiteral/jsfmt.spec.js | 8 - .../format/IfStatements/IfStatements.sol | 112 - .../__snapshots__/jsfmt.spec.js.snap | 534 - .../tests/format/IfStatements/jsfmt.spec.js | 1 - .../tests/format/Immutable/Immutable.sol | 20 - .../__snapshots__/jsfmt.spec.js.snap | 53 - .../tests/format/Immutable/jsfmt.spec.js | 1 - .../ImportDirective/ImportDirectives.sol | 5 - .../__snapshots__/jsfmt.spec.js.snap | 46 - .../format/ImportDirective/jsfmt.spec.js | 2 - .../tests/format/Inbox/Inbox.sol | 33 - .../Inbox/__snapshots__/jsfmt.spec.js.snap | 79 - .../tests/format/Inbox/jsfmt.spec.js | 1 - .../tests/format/IndexOf/IndexOf.sol | 67 - .../IndexOf/__snapshots__/jsfmt.spec.js.snap | 144 - .../tests/format/IndexOf/jsfmt.spec.js | 1 - .../IndexRangeAccess/IndexRangeAccess.sol | 9 - .../__snapshots__/jsfmt.spec.js.snap | 31 - .../format/IndexRangeAccess/jsfmt.spec.js | 1 - .../InheritanceSpecifier.sol | 2 - .../__snapshots__/jsfmt.spec.js.snap | 24 - .../format/InheritanceSpecifier/jsfmt.spec.js | 1 - .../tests/format/Issues/Issue205.sol | 12 - .../tests/format/Issues/Issue289.sol | 6 - .../tests/format/Issues/Issue355.sol | 12 - .../tests/format/Issues/Issue385.sol | 13 - .../tests/format/Issues/Issue564.sol | 9 - .../Issues/__snapshots__/jsfmt.spec.js.snap | 174 - .../tests/format/Issues/jsfmt.spec.js | 1 - .../format/MemberAccess/MemberAccess.sol | 102 - .../__snapshots__/jsfmt.spec.js.snap | 243 - .../tests/format/MemberAccess/jsfmt.spec.js | 1 - .../ModifierDefinitions.sol | 6 - .../__snapshots__/jsfmt.spec.js.snap | 29 - .../format/ModifierDefinitions/jsfmt.spec.js | 1 - .../ModifierInvocations.sol | 17 - .../__snapshots__/jsfmt.spec.js.snap | 51 - .../format/ModifierInvocations/jsfmt.spec.js | 1 - .../MultipartStrings/MultipartStrings.sol | 10 - .../__snapshots__/jsfmt.spec.js.snap | 39 - .../format/MultipartStrings/jsfmt.spec.js | 1 - .../NameValueExpression.sol | 6 - .../__snapshots__/jsfmt.spec.js.snap | 68 - .../format/NameValueExpression/jsfmt.spec.js | 2 - .../tests/format/Ownable/Ownable.sol | 75 - .../Ownable/__snapshots__/jsfmt.spec.js.snap | 163 - .../tests/format/Ownable/jsfmt.spec.js | 1 - .../format/Parentheses/AddNoParentheses.sol | 92 - .../Parentheses/BitAndNoParentheses.sol | 92 - .../format/Parentheses/BitOrNoParentheses.sol | 92 - .../Parentheses/BitXorNoParentheses.sol | 92 - .../format/Parentheses/DivNoParentheses.sol | 92 - .../format/Parentheses/ExpNoParentheses.sol | 92 - .../format/Parentheses/LogicNoParentheses.sol | 20 - .../format/Parentheses/ModNoParentheses.sol | 92 - .../format/Parentheses/MulNoParentheses.sol | 92 - .../Parentheses/ShiftLNoParentheses.sol | 92 - .../Parentheses/ShiftRNoParentheses.sol | 92 - .../format/Parentheses/SubNoParentheses.sol | 92 - .../__snapshots__/jsfmt.spec.js.snap | 2237 - .../tests/format/Parentheses/jsfmt.spec.js | 1 - .../tests/format/Pragma/Pragma.sol | 8 - .../Pragma/__snapshots__/jsfmt.spec.js.snap | 29 - .../tests/format/Pragma/jsfmt.spec.js | 1 - .../format/PrettierIgnore/PrettierIgnore.sol | 39 - .../__snapshots__/jsfmt.spec.js.snap | 87 - .../tests/format/PrettierIgnore/jsfmt.spec.js | 1 - .../tests/format/Proxy/Proxy.sol | 52 - .../Proxy/__snapshots__/jsfmt.spec.js.snap | 127 - .../tests/format/Proxy/jsfmt.spec.js | 1 - .../__snapshots__/jsfmt.spec.js.snap | 19 - .../RespectDefaultOptions/jsfmt.spec.js | 5 - .../respect-default-options.js | 3 - .../SampleCrowdsale/SampleCrowdsale.sol | 48 - .../__snapshots__/jsfmt.spec.js.snap | 113 - .../format/SampleCrowdsale/jsfmt.spec.js | 1 - .../format/SimpleAuction/SimpleAuction.sol | 123 - .../__snapshots__/jsfmt.spec.js.snap | 250 - .../tests/format/SimpleAuction/jsfmt.spec.js | 1 - .../format/SimpleStorage/SimpleStorage.sol | 14 - .../__snapshots__/jsfmt.spec.js.snap | 41 - .../tests/format/SimpleStorage/jsfmt.spec.js | 1 - .../SplittableCommodity.sol | 57 - .../__snapshots__/jsfmt.spec.js.snap | 135 - .../format/SplittableCommodity/jsfmt.spec.js | 1 - .../StateVariableDeclarations.sol | 3 - .../__snapshots__/jsfmt.spec.js.snap | 20 - .../StateVariableDeclarations/jsfmt.spec.js | 1 - .../format/StringLiteral/StringLiteral.sol | 11 - .../__snapshots__/jsfmt.spec.js.snap | 36 - .../tests/format/StringLiteral/jsfmt.spec.js | 1 - .../tests/format/StyleGuide/BlankLines.sol | 19 - .../format/StyleGuide/ControlStructures.sol | 57 - .../format/StyleGuide/FunctionDeclaration.sol | 135 - .../tests/format/StyleGuide/Mappings.sol | 9 - .../format/StyleGuide/MaximumLineLength.sol | 67 - .../StyleGuide/OtherRecommendations.sol | 29 - .../StyleGuide/VariableDeclarations.sol | 6 - .../StyleGuide/WhitespaceInExpressions.sol | 13 - .../__snapshots__/jsfmt.spec.js.snap | 772 - .../tests/format/StyleGuide/jsfmt.spec.js | 1 - .../tests/format/TryCatch/TryCatch.sol | 103 - .../TryCatch/__snapshots__/jsfmt.spec.js.snap | 241 - .../tests/format/TryCatch/jsfmt.spec.js | 1 - .../tests/format/Tupples/Tupples.sol | 25 - .../Tupples/__snapshots__/jsfmt.spec.js.snap | 74 - .../tests/format/Tupples/jsfmt.spec.js | 1 - .../format/TypeDefinition/TypeDefinition.sol | 12 - .../__snapshots__/jsfmt.spec.js.snap | 74 - .../tests/format/TypeDefinition/jsfmt.spec.js | 1 - .../WhileStatements/WhileStatements.sol | 37 - .../__snapshots__/jsfmt.spec.js.snap | 110 - .../format/WhileStatements/jsfmt.spec.js | 1 - .../format/WrongCompiler/WrongCompiler.sol | 3 - .../__snapshots__/jsfmt.spec.js.snap | 55 - .../tests/format/WrongCompiler/jsfmt.spec.js | 10 - .../tests/format/quotes/Quotes.sol | 24 - .../quotes/__snapshots__/jsfmt.spec.js.snap | 135 - .../tests/format/quotes/jsfmt.spec.js | 2 - .../strings/__snapshots__/jsfmt.spec.js.snap | 1579 - .../tests/format/strings/jsfmt.spec.js | 1 - .../tests/format/strings/strings.sol | 717 - .../__snapshots__/index.test.js.snap | 13 - .../binary-operator-printers/index.test.js | 5 - .../tests/unit/comments/printer.test.js | 9 - .../tests/unit/prettier-version.test.js | 26 - .../tests/unit/printer.test.js | 9 - node_modules/prettier/LICENSE | 7 - node_modules/prettier/README.md | 111 - node_modules/prettier/bin-prettier.js | 20229 ----- node_modules/prettier/doc.js | 5315 -- node_modules/prettier/esm/parser-angular.mjs | 45 - node_modules/prettier/esm/parser-babel.mjs | 1 - node_modules/prettier/esm/parser-espree.mjs | 1 - node_modules/prettier/esm/parser-flow.mjs | 1 - node_modules/prettier/esm/parser-glimmer.mjs | 1 - node_modules/prettier/esm/parser-graphql.mjs | 1 - node_modules/prettier/esm/parser-html.mjs | 111 - node_modules/prettier/esm/parser-markdown.mjs | 13 - node_modules/prettier/esm/parser-meriyah.mjs | 1 - node_modules/prettier/esm/parser-postcss.mjs | 1 - .../prettier/esm/parser-typescript.mjs | 13 - node_modules/prettier/esm/parser-yaml.mjs | 1 - node_modules/prettier/esm/standalone.mjs | 40 - node_modules/prettier/index.js | 61027 ---------------- node_modules/prettier/package.json | 23 - node_modules/prettier/parser-angular.js | 45 - node_modules/prettier/parser-babel.js | 1 - node_modules/prettier/parser-espree.js | 1 - node_modules/prettier/parser-flow.js | 1 - node_modules/prettier/parser-glimmer.js | 1 - node_modules/prettier/parser-graphql.js | 1 - node_modules/prettier/parser-html.js | 111 - node_modules/prettier/parser-markdown.js | 13 - node_modules/prettier/parser-meriyah.js | 1 - node_modules/prettier/parser-postcss.js | 1 - node_modules/prettier/parser-typescript.js | 13 - node_modules/prettier/parser-yaml.js | 1 - node_modules/prettier/standalone.js | 40 - node_modules/prettier/third-party.js | 11712 --- node_modules/semver/CHANGELOG.md | 111 - node_modules/semver/LICENSE | 15 - node_modules/semver/README.md | 566 - node_modules/semver/bin/semver.js | 173 - node_modules/semver/classes/comparator.js | 135 - node_modules/semver/classes/index.js | 5 - node_modules/semver/classes/range.js | 510 - node_modules/semver/classes/semver.js | 287 - node_modules/semver/functions/clean.js | 6 - node_modules/semver/functions/cmp.js | 48 - node_modules/semver/functions/coerce.js | 51 - .../semver/functions/compare-build.js | 7 - .../semver/functions/compare-loose.js | 3 - node_modules/semver/functions/compare.js | 5 - node_modules/semver/functions/diff.js | 23 - node_modules/semver/functions/eq.js | 3 - node_modules/semver/functions/gt.js | 3 - node_modules/semver/functions/gte.js | 3 - node_modules/semver/functions/inc.js | 15 - node_modules/semver/functions/lt.js | 3 - node_modules/semver/functions/lte.js | 3 - node_modules/semver/functions/major.js | 3 - node_modules/semver/functions/minor.js | 3 - node_modules/semver/functions/neq.js | 3 - node_modules/semver/functions/parse.js | 33 - node_modules/semver/functions/patch.js | 3 - node_modules/semver/functions/prerelease.js | 6 - node_modules/semver/functions/rcompare.js | 3 - node_modules/semver/functions/rsort.js | 3 - node_modules/semver/functions/satisfies.js | 10 - node_modules/semver/functions/sort.js | 3 - node_modules/semver/functions/valid.js | 6 - node_modules/semver/index.js | 48 - node_modules/semver/internal/constants.js | 17 - node_modules/semver/internal/debug.js | 9 - node_modules/semver/internal/identifiers.js | 23 - node_modules/semver/internal/parse-options.js | 11 - node_modules/semver/internal/re.js | 182 - node_modules/semver/package.json | 41 - node_modules/semver/preload.js | 2 - node_modules/semver/range.bnf | 16 - node_modules/semver/ranges/gtr.js | 4 - node_modules/semver/ranges/intersects.js | 7 - node_modules/semver/ranges/ltr.js | 4 - node_modules/semver/ranges/max-satisfying.js | 25 - node_modules/semver/ranges/min-satisfying.js | 24 - node_modules/semver/ranges/min-version.js | 60 - node_modules/semver/ranges/outside.js | 80 - node_modules/semver/ranges/simplify.js | 44 - node_modules/semver/ranges/subset.js | 222 - node_modules/semver/ranges/to-comparators.js | 8 - node_modules/semver/ranges/valid.js | 11 - .../solidity-comments-extractor/.eslintrc | 16 - .../.github/workflows/CI.yml | 85 - .../solidity-comments-extractor/.nvmrc | 1 - .../.prettierignore | 1 - .../solidity-comments-extractor/.prettierrc | 4 - .../solidity-comments-extractor/README.md | 27 - .../solidity-comments-extractor/index.js | 118 - .../solidity-comments-extractor/package.json | 20 - .../solidity-comments-extractor/test/test.js | 141 - node_modules/string-width/index.d.ts | 29 - node_modules/string-width/index.js | 47 - node_modules/string-width/license | 9 - .../node_modules/emoji-regex/LICENSE-MIT.txt | 20 - .../node_modules/emoji-regex/README.md | 73 - .../node_modules/emoji-regex/es2015/index.js | 6 - .../node_modules/emoji-regex/es2015/text.js | 6 - .../node_modules/emoji-regex/index.d.ts | 23 - .../node_modules/emoji-regex/index.js | 6 - .../node_modules/emoji-regex/package.json | 50 - .../node_modules/emoji-regex/text.js | 6 - node_modules/string-width/package.json | 56 - node_modules/string-width/readme.md | 50 - node_modules/strip-ansi/index.d.ts | 17 - node_modules/strip-ansi/index.js | 4 - node_modules/strip-ansi/license | 9 - node_modules/strip-ansi/package.json | 54 - node_modules/strip-ansi/readme.md | 46 - node_modules/yallist/LICENSE | 15 - node_modules/yallist/README.md | 204 - node_modules/yallist/iterator.js | 8 - node_modules/yallist/package.json | 29 - node_modules/yallist/yallist.js | 426 - package.json | 25 - src/Helios.sol | 577 +- src/interfaces/IHelios.sol | 34 - src/libraries/Math2.sol | 86 + src/libraries/SafeTransferLib.sol | 374 + src/swappers/XYKswapper.sol | 125 - src/test/Helios.t.sol | 269 - src/utils/ERC6909.sol | 118 + src/utils/ReentrancyGuard.sol | 55 + test/Helios.t.sol | 17 + yarn.lock | 97 - 1070 files changed, 1549 insertions(+), 267197 deletions(-) create mode 100644 .github/workflows/ci.yml delete mode 100644 .github/workflows/lints.yml delete mode 100644 .github/workflows/tests.yml delete mode 100644 .gitpod.yml delete mode 160000 lib/ds-test create mode 160000 lib/solady delete mode 160000 lib/solbase delete mode 120000 node_modules/.bin/prettier delete mode 120000 node_modules/.bin/semver delete mode 100644 node_modules/.yarn-integrity delete mode 100644 node_modules/@solidity-parser/parser/LICENSE delete mode 100644 node_modules/@solidity-parser/parser/README.md delete mode 100644 node_modules/@solidity-parser/parser/dist/Solidity-JSLPOCIO.tokens delete mode 100644 node_modules/@solidity-parser/parser/dist/Solidity-WDNIKDDG.tokens delete mode 100644 node_modules/@solidity-parser/parser/dist/antlr/Solidity.tokens delete mode 100644 node_modules/@solidity-parser/parser/dist/antlr/SolidityLexer.tokens delete mode 100644 node_modules/@solidity-parser/parser/dist/index.cjs.js delete mode 100644 node_modules/@solidity-parser/parser/dist/index.cjs.js.map delete mode 100644 node_modules/@solidity-parser/parser/dist/index.iife.js delete mode 100644 node_modules/@solidity-parser/parser/dist/index.iife.js.map delete mode 100644 node_modules/@solidity-parser/parser/dist/src/ASTBuilder.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/src/ErrorListener.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/src/antlr/SolidityLexer.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/src/antlr/SolidityListener.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/src/antlr/SolidityParser.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/src/antlr/SolidityVisitor.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/src/ast-types.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/src/index.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/src/parser.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/src/tokens-string.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/src/tokens.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/src/types.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/test/ast.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/test/index.d.ts delete mode 100644 node_modules/@solidity-parser/parser/dist/test/utils.d.ts delete mode 100644 node_modules/@solidity-parser/parser/package.json delete mode 100644 node_modules/@solidity-parser/parser/src/.ASTBuilder.ts.swp delete mode 100644 node_modules/@solidity-parser/parser/src/ASTBuilder.ts delete mode 100644 node_modules/@solidity-parser/parser/src/ErrorListener.ts delete mode 100644 node_modules/@solidity-parser/parser/src/antlr/Solidity.interp delete mode 100644 node_modules/@solidity-parser/parser/src/antlr/Solidity.tokens delete mode 100644 node_modules/@solidity-parser/parser/src/antlr/SolidityLexer.interp delete mode 100644 node_modules/@solidity-parser/parser/src/antlr/SolidityLexer.tokens delete mode 100644 node_modules/@solidity-parser/parser/src/antlr/SolidityLexer.ts delete mode 100644 node_modules/@solidity-parser/parser/src/antlr/SolidityListener.ts delete mode 100644 node_modules/@solidity-parser/parser/src/antlr/SolidityParser.ts delete mode 100644 node_modules/@solidity-parser/parser/src/antlr/SolidityVisitor.ts delete mode 100644 node_modules/@solidity-parser/parser/src/ast-types.ts delete mode 100644 node_modules/@solidity-parser/parser/src/declarations.d.ts delete mode 100644 node_modules/@solidity-parser/parser/src/index.ts delete mode 100644 node_modules/@solidity-parser/parser/src/parser.ts delete mode 100644 node_modules/@solidity-parser/parser/src/tokens-string.js delete mode 100644 node_modules/@solidity-parser/parser/src/tokens.ts delete mode 100644 node_modules/@solidity-parser/parser/src/types.ts delete mode 100644 node_modules/ansi-regex/index.d.ts delete mode 100644 node_modules/ansi-regex/index.js delete mode 100644 node_modules/ansi-regex/license delete mode 100644 node_modules/ansi-regex/package.json delete mode 100644 node_modules/ansi-regex/readme.md delete mode 100644 node_modules/antlr4ts/ANTLRErrorListener.d.ts delete mode 100644 node_modules/antlr4ts/ANTLRErrorListener.js delete mode 100644 node_modules/antlr4ts/ANTLRErrorListener.js.map delete mode 100644 node_modules/antlr4ts/ANTLRErrorStrategy.d.ts delete mode 100644 node_modules/antlr4ts/ANTLRErrorStrategy.js delete mode 100644 node_modules/antlr4ts/ANTLRErrorStrategy.js.map delete mode 100644 node_modules/antlr4ts/ANTLRInputStream.d.ts delete mode 100644 node_modules/antlr4ts/ANTLRInputStream.js delete mode 100644 node_modules/antlr4ts/ANTLRInputStream.js.map delete mode 100644 node_modules/antlr4ts/BailErrorStrategy.d.ts delete mode 100644 node_modules/antlr4ts/BailErrorStrategy.js delete mode 100644 node_modules/antlr4ts/BailErrorStrategy.js.map delete mode 100644 node_modules/antlr4ts/BufferedTokenStream.d.ts delete mode 100644 node_modules/antlr4ts/BufferedTokenStream.js delete mode 100644 node_modules/antlr4ts/BufferedTokenStream.js.map delete mode 100644 node_modules/antlr4ts/CharStream.d.ts delete mode 100644 node_modules/antlr4ts/CharStream.js delete mode 100644 node_modules/antlr4ts/CharStream.js.map delete mode 100644 node_modules/antlr4ts/CharStreams.d.ts delete mode 100644 node_modules/antlr4ts/CharStreams.js delete mode 100644 node_modules/antlr4ts/CharStreams.js.map delete mode 100644 node_modules/antlr4ts/CodePointBuffer.d.ts delete mode 100644 node_modules/antlr4ts/CodePointBuffer.js delete mode 100644 node_modules/antlr4ts/CodePointBuffer.js.map delete mode 100644 node_modules/antlr4ts/CodePointCharStream.d.ts delete mode 100644 node_modules/antlr4ts/CodePointCharStream.js delete mode 100644 node_modules/antlr4ts/CodePointCharStream.js.map delete mode 100644 node_modules/antlr4ts/CommonToken.d.ts delete mode 100644 node_modules/antlr4ts/CommonToken.js delete mode 100644 node_modules/antlr4ts/CommonToken.js.map delete mode 100644 node_modules/antlr4ts/CommonTokenFactory.d.ts delete mode 100644 node_modules/antlr4ts/CommonTokenFactory.js delete mode 100644 node_modules/antlr4ts/CommonTokenFactory.js.map delete mode 100644 node_modules/antlr4ts/CommonTokenStream.d.ts delete mode 100644 node_modules/antlr4ts/CommonTokenStream.js delete mode 100644 node_modules/antlr4ts/CommonTokenStream.js.map delete mode 100644 node_modules/antlr4ts/ConsoleErrorListener.d.ts delete mode 100644 node_modules/antlr4ts/ConsoleErrorListener.js delete mode 100644 node_modules/antlr4ts/ConsoleErrorListener.js.map delete mode 100644 node_modules/antlr4ts/Decorators.d.ts delete mode 100644 node_modules/antlr4ts/Decorators.js delete mode 100644 node_modules/antlr4ts/Decorators.js.map delete mode 100644 node_modules/antlr4ts/DefaultErrorStrategy.d.ts delete mode 100644 node_modules/antlr4ts/DefaultErrorStrategy.js delete mode 100644 node_modules/antlr4ts/DefaultErrorStrategy.js.map delete mode 100644 node_modules/antlr4ts/Dependents.d.ts delete mode 100644 node_modules/antlr4ts/Dependents.js delete mode 100644 node_modules/antlr4ts/Dependents.js.map delete mode 100644 node_modules/antlr4ts/DiagnosticErrorListener.d.ts delete mode 100644 node_modules/antlr4ts/DiagnosticErrorListener.js delete mode 100644 node_modules/antlr4ts/DiagnosticErrorListener.js.map delete mode 100644 node_modules/antlr4ts/FailedPredicateException.d.ts delete mode 100644 node_modules/antlr4ts/FailedPredicateException.js delete mode 100644 node_modules/antlr4ts/FailedPredicateException.js.map delete mode 100644 node_modules/antlr4ts/InputMismatchException.d.ts delete mode 100644 node_modules/antlr4ts/InputMismatchException.js delete mode 100644 node_modules/antlr4ts/InputMismatchException.js.map delete mode 100644 node_modules/antlr4ts/IntStream.d.ts delete mode 100644 node_modules/antlr4ts/IntStream.js delete mode 100644 node_modules/antlr4ts/IntStream.js.map delete mode 100644 node_modules/antlr4ts/InterpreterRuleContext.d.ts delete mode 100644 node_modules/antlr4ts/InterpreterRuleContext.js delete mode 100644 node_modules/antlr4ts/InterpreterRuleContext.js.map delete mode 100644 node_modules/antlr4ts/LICENSE delete mode 100644 node_modules/antlr4ts/Lexer.d.ts delete mode 100644 node_modules/antlr4ts/Lexer.js delete mode 100644 node_modules/antlr4ts/Lexer.js.map delete mode 100644 node_modules/antlr4ts/LexerInterpreter.d.ts delete mode 100644 node_modules/antlr4ts/LexerInterpreter.js delete mode 100644 node_modules/antlr4ts/LexerInterpreter.js.map delete mode 100644 node_modules/antlr4ts/LexerNoViableAltException.d.ts delete mode 100644 node_modules/antlr4ts/LexerNoViableAltException.js delete mode 100644 node_modules/antlr4ts/LexerNoViableAltException.js.map delete mode 100644 node_modules/antlr4ts/ListTokenSource.d.ts delete mode 100644 node_modules/antlr4ts/ListTokenSource.js delete mode 100644 node_modules/antlr4ts/ListTokenSource.js.map delete mode 100644 node_modules/antlr4ts/NoViableAltException.d.ts delete mode 100644 node_modules/antlr4ts/NoViableAltException.js delete mode 100644 node_modules/antlr4ts/NoViableAltException.js.map delete mode 100644 node_modules/antlr4ts/Parser.d.ts delete mode 100644 node_modules/antlr4ts/Parser.js delete mode 100644 node_modules/antlr4ts/Parser.js.map delete mode 100644 node_modules/antlr4ts/ParserErrorListener.d.ts delete mode 100644 node_modules/antlr4ts/ParserErrorListener.js delete mode 100644 node_modules/antlr4ts/ParserErrorListener.js.map delete mode 100644 node_modules/antlr4ts/ParserInterpreter.d.ts delete mode 100644 node_modules/antlr4ts/ParserInterpreter.js delete mode 100644 node_modules/antlr4ts/ParserInterpreter.js.map delete mode 100644 node_modules/antlr4ts/ParserRuleContext.d.ts delete mode 100644 node_modules/antlr4ts/ParserRuleContext.js delete mode 100644 node_modules/antlr4ts/ParserRuleContext.js.map delete mode 100644 node_modules/antlr4ts/ProxyErrorListener.d.ts delete mode 100644 node_modules/antlr4ts/ProxyErrorListener.js delete mode 100644 node_modules/antlr4ts/ProxyErrorListener.js.map delete mode 100644 node_modules/antlr4ts/ProxyParserErrorListener.d.ts delete mode 100644 node_modules/antlr4ts/ProxyParserErrorListener.js delete mode 100644 node_modules/antlr4ts/ProxyParserErrorListener.js.map delete mode 100644 node_modules/antlr4ts/README.md delete mode 100644 node_modules/antlr4ts/RecognitionException.d.ts delete mode 100644 node_modules/antlr4ts/RecognitionException.js delete mode 100644 node_modules/antlr4ts/RecognitionException.js.map delete mode 100644 node_modules/antlr4ts/Recognizer.d.ts delete mode 100644 node_modules/antlr4ts/Recognizer.js delete mode 100644 node_modules/antlr4ts/Recognizer.js.map delete mode 100644 node_modules/antlr4ts/RuleContext.d.ts delete mode 100644 node_modules/antlr4ts/RuleContext.js delete mode 100644 node_modules/antlr4ts/RuleContext.js.map delete mode 100644 node_modules/antlr4ts/RuleContextWithAltNum.d.ts delete mode 100644 node_modules/antlr4ts/RuleContextWithAltNum.js delete mode 100644 node_modules/antlr4ts/RuleContextWithAltNum.js.map delete mode 100644 node_modules/antlr4ts/RuleDependency.d.ts delete mode 100644 node_modules/antlr4ts/RuleDependency.js delete mode 100644 node_modules/antlr4ts/RuleDependency.js.map delete mode 100644 node_modules/antlr4ts/RuleVersion.d.ts delete mode 100644 node_modules/antlr4ts/RuleVersion.js delete mode 100644 node_modules/antlr4ts/RuleVersion.js.map delete mode 100644 node_modules/antlr4ts/Token.d.ts delete mode 100644 node_modules/antlr4ts/Token.js delete mode 100644 node_modules/antlr4ts/Token.js.map delete mode 100644 node_modules/antlr4ts/TokenFactory.d.ts delete mode 100644 node_modules/antlr4ts/TokenFactory.js delete mode 100644 node_modules/antlr4ts/TokenFactory.js.map delete mode 100644 node_modules/antlr4ts/TokenSource.d.ts delete mode 100644 node_modules/antlr4ts/TokenSource.js delete mode 100644 node_modules/antlr4ts/TokenSource.js.map delete mode 100644 node_modules/antlr4ts/TokenStream.d.ts delete mode 100644 node_modules/antlr4ts/TokenStream.js delete mode 100644 node_modules/antlr4ts/TokenStream.js.map delete mode 100644 node_modules/antlr4ts/TokenStreamRewriter.d.ts delete mode 100644 node_modules/antlr4ts/TokenStreamRewriter.js delete mode 100644 node_modules/antlr4ts/TokenStreamRewriter.js.map delete mode 100644 node_modules/antlr4ts/Vocabulary.d.ts delete mode 100644 node_modules/antlr4ts/Vocabulary.js delete mode 100644 node_modules/antlr4ts/Vocabulary.js.map delete mode 100644 node_modules/antlr4ts/VocabularyImpl.d.ts delete mode 100644 node_modules/antlr4ts/VocabularyImpl.js delete mode 100644 node_modules/antlr4ts/VocabularyImpl.js.map delete mode 100644 node_modules/antlr4ts/WritableToken.d.ts delete mode 100644 node_modules/antlr4ts/WritableToken.js delete mode 100644 node_modules/antlr4ts/WritableToken.js.map delete mode 100644 node_modules/antlr4ts/atn/ATN.d.ts delete mode 100644 node_modules/antlr4ts/atn/ATN.js delete mode 100644 node_modules/antlr4ts/atn/ATN.js.map delete mode 100644 node_modules/antlr4ts/atn/ATNConfig.d.ts delete mode 100644 node_modules/antlr4ts/atn/ATNConfig.js delete mode 100644 node_modules/antlr4ts/atn/ATNConfig.js.map delete mode 100644 node_modules/antlr4ts/atn/ATNConfigSet.d.ts delete mode 100644 node_modules/antlr4ts/atn/ATNConfigSet.js delete mode 100644 node_modules/antlr4ts/atn/ATNConfigSet.js.map delete mode 100644 node_modules/antlr4ts/atn/ATNDeserializationOptions.d.ts delete mode 100644 node_modules/antlr4ts/atn/ATNDeserializationOptions.js delete mode 100644 node_modules/antlr4ts/atn/ATNDeserializationOptions.js.map delete mode 100644 node_modules/antlr4ts/atn/ATNDeserializer.d.ts delete mode 100644 node_modules/antlr4ts/atn/ATNDeserializer.js delete mode 100644 node_modules/antlr4ts/atn/ATNDeserializer.js.map delete mode 100644 node_modules/antlr4ts/atn/ATNSimulator.d.ts delete mode 100644 node_modules/antlr4ts/atn/ATNSimulator.js delete mode 100644 node_modules/antlr4ts/atn/ATNSimulator.js.map delete mode 100644 node_modules/antlr4ts/atn/ATNState.d.ts delete mode 100644 node_modules/antlr4ts/atn/ATNState.js delete mode 100644 node_modules/antlr4ts/atn/ATNState.js.map delete mode 100644 node_modules/antlr4ts/atn/ATNStateType.d.ts delete mode 100644 node_modules/antlr4ts/atn/ATNStateType.js delete mode 100644 node_modules/antlr4ts/atn/ATNStateType.js.map delete mode 100644 node_modules/antlr4ts/atn/ATNType.d.ts delete mode 100644 node_modules/antlr4ts/atn/ATNType.js delete mode 100644 node_modules/antlr4ts/atn/ATNType.js.map delete mode 100644 node_modules/antlr4ts/atn/AbstractPredicateTransition.d.ts delete mode 100644 node_modules/antlr4ts/atn/AbstractPredicateTransition.js delete mode 100644 node_modules/antlr4ts/atn/AbstractPredicateTransition.js.map delete mode 100644 node_modules/antlr4ts/atn/ActionTransition.d.ts delete mode 100644 node_modules/antlr4ts/atn/ActionTransition.js delete mode 100644 node_modules/antlr4ts/atn/ActionTransition.js.map delete mode 100644 node_modules/antlr4ts/atn/AmbiguityInfo.d.ts delete mode 100644 node_modules/antlr4ts/atn/AmbiguityInfo.js delete mode 100644 node_modules/antlr4ts/atn/AmbiguityInfo.js.map delete mode 100644 node_modules/antlr4ts/atn/AtomTransition.d.ts delete mode 100644 node_modules/antlr4ts/atn/AtomTransition.js delete mode 100644 node_modules/antlr4ts/atn/AtomTransition.js.map delete mode 100644 node_modules/antlr4ts/atn/BasicBlockStartState.d.ts delete mode 100644 node_modules/antlr4ts/atn/BasicBlockStartState.js delete mode 100644 node_modules/antlr4ts/atn/BasicBlockStartState.js.map delete mode 100644 node_modules/antlr4ts/atn/BasicState.d.ts delete mode 100644 node_modules/antlr4ts/atn/BasicState.js delete mode 100644 node_modules/antlr4ts/atn/BasicState.js.map delete mode 100644 node_modules/antlr4ts/atn/BlockEndState.d.ts delete mode 100644 node_modules/antlr4ts/atn/BlockEndState.js delete mode 100644 node_modules/antlr4ts/atn/BlockEndState.js.map delete mode 100644 node_modules/antlr4ts/atn/BlockStartState.d.ts delete mode 100644 node_modules/antlr4ts/atn/BlockStartState.js delete mode 100644 node_modules/antlr4ts/atn/BlockStartState.js.map delete mode 100644 node_modules/antlr4ts/atn/CodePointTransitions.d.ts delete mode 100644 node_modules/antlr4ts/atn/CodePointTransitions.js delete mode 100644 node_modules/antlr4ts/atn/CodePointTransitions.js.map delete mode 100644 node_modules/antlr4ts/atn/ConflictInfo.d.ts delete mode 100644 node_modules/antlr4ts/atn/ConflictInfo.js delete mode 100644 node_modules/antlr4ts/atn/ConflictInfo.js.map delete mode 100644 node_modules/antlr4ts/atn/ContextSensitivityInfo.d.ts delete mode 100644 node_modules/antlr4ts/atn/ContextSensitivityInfo.js delete mode 100644 node_modules/antlr4ts/atn/ContextSensitivityInfo.js.map delete mode 100644 node_modules/antlr4ts/atn/DecisionEventInfo.d.ts delete mode 100644 node_modules/antlr4ts/atn/DecisionEventInfo.js delete mode 100644 node_modules/antlr4ts/atn/DecisionEventInfo.js.map delete mode 100644 node_modules/antlr4ts/atn/DecisionInfo.d.ts delete mode 100644 node_modules/antlr4ts/atn/DecisionInfo.js delete mode 100644 node_modules/antlr4ts/atn/DecisionInfo.js.map delete mode 100644 node_modules/antlr4ts/atn/DecisionState.d.ts delete mode 100644 node_modules/antlr4ts/atn/DecisionState.js delete mode 100644 node_modules/antlr4ts/atn/DecisionState.js.map delete mode 100644 node_modules/antlr4ts/atn/EpsilonTransition.d.ts delete mode 100644 node_modules/antlr4ts/atn/EpsilonTransition.js delete mode 100644 node_modules/antlr4ts/atn/EpsilonTransition.js.map delete mode 100644 node_modules/antlr4ts/atn/ErrorInfo.d.ts delete mode 100644 node_modules/antlr4ts/atn/ErrorInfo.js delete mode 100644 node_modules/antlr4ts/atn/ErrorInfo.js.map delete mode 100644 node_modules/antlr4ts/atn/InvalidState.d.ts delete mode 100644 node_modules/antlr4ts/atn/InvalidState.js delete mode 100644 node_modules/antlr4ts/atn/InvalidState.js.map delete mode 100644 node_modules/antlr4ts/atn/LL1Analyzer.d.ts delete mode 100644 node_modules/antlr4ts/atn/LL1Analyzer.js delete mode 100644 node_modules/antlr4ts/atn/LL1Analyzer.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerATNSimulator.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerATNSimulator.js delete mode 100644 node_modules/antlr4ts/atn/LexerATNSimulator.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerAction.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerAction.js delete mode 100644 node_modules/antlr4ts/atn/LexerAction.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerActionExecutor.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerActionExecutor.js delete mode 100644 node_modules/antlr4ts/atn/LexerActionExecutor.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerActionType.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerActionType.js delete mode 100644 node_modules/antlr4ts/atn/LexerActionType.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerChannelAction.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerChannelAction.js delete mode 100644 node_modules/antlr4ts/atn/LexerChannelAction.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerCustomAction.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerCustomAction.js delete mode 100644 node_modules/antlr4ts/atn/LexerCustomAction.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerIndexedCustomAction.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerIndexedCustomAction.js delete mode 100644 node_modules/antlr4ts/atn/LexerIndexedCustomAction.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerModeAction.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerModeAction.js delete mode 100644 node_modules/antlr4ts/atn/LexerModeAction.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerMoreAction.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerMoreAction.js delete mode 100644 node_modules/antlr4ts/atn/LexerMoreAction.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerPopModeAction.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerPopModeAction.js delete mode 100644 node_modules/antlr4ts/atn/LexerPopModeAction.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerPushModeAction.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerPushModeAction.js delete mode 100644 node_modules/antlr4ts/atn/LexerPushModeAction.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerSkipAction.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerSkipAction.js delete mode 100644 node_modules/antlr4ts/atn/LexerSkipAction.js.map delete mode 100644 node_modules/antlr4ts/atn/LexerTypeAction.d.ts delete mode 100644 node_modules/antlr4ts/atn/LexerTypeAction.js delete mode 100644 node_modules/antlr4ts/atn/LexerTypeAction.js.map delete mode 100644 node_modules/antlr4ts/atn/LookaheadEventInfo.d.ts delete mode 100644 node_modules/antlr4ts/atn/LookaheadEventInfo.js delete mode 100644 node_modules/antlr4ts/atn/LookaheadEventInfo.js.map delete mode 100644 node_modules/antlr4ts/atn/LoopEndState.d.ts delete mode 100644 node_modules/antlr4ts/atn/LoopEndState.js delete mode 100644 node_modules/antlr4ts/atn/LoopEndState.js.map delete mode 100644 node_modules/antlr4ts/atn/NotSetTransition.d.ts delete mode 100644 node_modules/antlr4ts/atn/NotSetTransition.js delete mode 100644 node_modules/antlr4ts/atn/NotSetTransition.js.map delete mode 100644 node_modules/antlr4ts/atn/OrderedATNConfigSet.d.ts delete mode 100644 node_modules/antlr4ts/atn/OrderedATNConfigSet.js delete mode 100644 node_modules/antlr4ts/atn/OrderedATNConfigSet.js.map delete mode 100644 node_modules/antlr4ts/atn/ParseInfo.d.ts delete mode 100644 node_modules/antlr4ts/atn/ParseInfo.js delete mode 100644 node_modules/antlr4ts/atn/ParseInfo.js.map delete mode 100644 node_modules/antlr4ts/atn/ParserATNSimulator.d.ts delete mode 100644 node_modules/antlr4ts/atn/ParserATNSimulator.js delete mode 100644 node_modules/antlr4ts/atn/ParserATNSimulator.js.map delete mode 100644 node_modules/antlr4ts/atn/PlusBlockStartState.d.ts delete mode 100644 node_modules/antlr4ts/atn/PlusBlockStartState.js delete mode 100644 node_modules/antlr4ts/atn/PlusBlockStartState.js.map delete mode 100644 node_modules/antlr4ts/atn/PlusLoopbackState.d.ts delete mode 100644 node_modules/antlr4ts/atn/PlusLoopbackState.js delete mode 100644 node_modules/antlr4ts/atn/PlusLoopbackState.js.map delete mode 100644 node_modules/antlr4ts/atn/PrecedencePredicateTransition.d.ts delete mode 100644 node_modules/antlr4ts/atn/PrecedencePredicateTransition.js delete mode 100644 node_modules/antlr4ts/atn/PrecedencePredicateTransition.js.map delete mode 100644 node_modules/antlr4ts/atn/PredicateEvalInfo.d.ts delete mode 100644 node_modules/antlr4ts/atn/PredicateEvalInfo.js delete mode 100644 node_modules/antlr4ts/atn/PredicateEvalInfo.js.map delete mode 100644 node_modules/antlr4ts/atn/PredicateTransition.d.ts delete mode 100644 node_modules/antlr4ts/atn/PredicateTransition.js delete mode 100644 node_modules/antlr4ts/atn/PredicateTransition.js.map delete mode 100644 node_modules/antlr4ts/atn/PredictionContext.d.ts delete mode 100644 node_modules/antlr4ts/atn/PredictionContext.js delete mode 100644 node_modules/antlr4ts/atn/PredictionContext.js.map delete mode 100644 node_modules/antlr4ts/atn/PredictionContextCache.d.ts delete mode 100644 node_modules/antlr4ts/atn/PredictionContextCache.js delete mode 100644 node_modules/antlr4ts/atn/PredictionContextCache.js.map delete mode 100644 node_modules/antlr4ts/atn/PredictionMode.d.ts delete mode 100644 node_modules/antlr4ts/atn/PredictionMode.js delete mode 100644 node_modules/antlr4ts/atn/PredictionMode.js.map delete mode 100644 node_modules/antlr4ts/atn/ProfilingATNSimulator.d.ts delete mode 100644 node_modules/antlr4ts/atn/ProfilingATNSimulator.js delete mode 100644 node_modules/antlr4ts/atn/ProfilingATNSimulator.js.map delete mode 100644 node_modules/antlr4ts/atn/RangeTransition.d.ts delete mode 100644 node_modules/antlr4ts/atn/RangeTransition.js delete mode 100644 node_modules/antlr4ts/atn/RangeTransition.js.map delete mode 100644 node_modules/antlr4ts/atn/RuleStartState.d.ts delete mode 100644 node_modules/antlr4ts/atn/RuleStartState.js delete mode 100644 node_modules/antlr4ts/atn/RuleStartState.js.map delete mode 100644 node_modules/antlr4ts/atn/RuleStopState.d.ts delete mode 100644 node_modules/antlr4ts/atn/RuleStopState.js delete mode 100644 node_modules/antlr4ts/atn/RuleStopState.js.map delete mode 100644 node_modules/antlr4ts/atn/RuleTransition.d.ts delete mode 100644 node_modules/antlr4ts/atn/RuleTransition.js delete mode 100644 node_modules/antlr4ts/atn/RuleTransition.js.map delete mode 100644 node_modules/antlr4ts/atn/SemanticContext.d.ts delete mode 100644 node_modules/antlr4ts/atn/SemanticContext.js delete mode 100644 node_modules/antlr4ts/atn/SemanticContext.js.map delete mode 100644 node_modules/antlr4ts/atn/SetTransition.d.ts delete mode 100644 node_modules/antlr4ts/atn/SetTransition.js delete mode 100644 node_modules/antlr4ts/atn/SetTransition.js.map delete mode 100644 node_modules/antlr4ts/atn/SimulatorState.d.ts delete mode 100644 node_modules/antlr4ts/atn/SimulatorState.js delete mode 100644 node_modules/antlr4ts/atn/SimulatorState.js.map delete mode 100644 node_modules/antlr4ts/atn/StarBlockStartState.d.ts delete mode 100644 node_modules/antlr4ts/atn/StarBlockStartState.js delete mode 100644 node_modules/antlr4ts/atn/StarBlockStartState.js.map delete mode 100644 node_modules/antlr4ts/atn/StarLoopEntryState.d.ts delete mode 100644 node_modules/antlr4ts/atn/StarLoopEntryState.js delete mode 100644 node_modules/antlr4ts/atn/StarLoopEntryState.js.map delete mode 100644 node_modules/antlr4ts/atn/StarLoopbackState.d.ts delete mode 100644 node_modules/antlr4ts/atn/StarLoopbackState.js delete mode 100644 node_modules/antlr4ts/atn/StarLoopbackState.js.map delete mode 100644 node_modules/antlr4ts/atn/TokensStartState.d.ts delete mode 100644 node_modules/antlr4ts/atn/TokensStartState.js delete mode 100644 node_modules/antlr4ts/atn/TokensStartState.js.map delete mode 100644 node_modules/antlr4ts/atn/Transition.d.ts delete mode 100644 node_modules/antlr4ts/atn/Transition.js delete mode 100644 node_modules/antlr4ts/atn/Transition.js.map delete mode 100644 node_modules/antlr4ts/atn/TransitionType.d.ts delete mode 100644 node_modules/antlr4ts/atn/TransitionType.js delete mode 100644 node_modules/antlr4ts/atn/TransitionType.js.map delete mode 100644 node_modules/antlr4ts/atn/WildcardTransition.d.ts delete mode 100644 node_modules/antlr4ts/atn/WildcardTransition.js delete mode 100644 node_modules/antlr4ts/atn/WildcardTransition.js.map delete mode 100644 node_modules/antlr4ts/atn/index.d.ts delete mode 100644 node_modules/antlr4ts/atn/index.js delete mode 100644 node_modules/antlr4ts/atn/index.js.map delete mode 100644 node_modules/antlr4ts/dfa/AcceptStateInfo.d.ts delete mode 100644 node_modules/antlr4ts/dfa/AcceptStateInfo.js delete mode 100644 node_modules/antlr4ts/dfa/AcceptStateInfo.js.map delete mode 100644 node_modules/antlr4ts/dfa/DFA.d.ts delete mode 100644 node_modules/antlr4ts/dfa/DFA.js delete mode 100644 node_modules/antlr4ts/dfa/DFA.js.map delete mode 100644 node_modules/antlr4ts/dfa/DFASerializer.d.ts delete mode 100644 node_modules/antlr4ts/dfa/DFASerializer.js delete mode 100644 node_modules/antlr4ts/dfa/DFASerializer.js.map delete mode 100644 node_modules/antlr4ts/dfa/DFAState.d.ts delete mode 100644 node_modules/antlr4ts/dfa/DFAState.js delete mode 100644 node_modules/antlr4ts/dfa/DFAState.js.map delete mode 100644 node_modules/antlr4ts/dfa/LexerDFASerializer.d.ts delete mode 100644 node_modules/antlr4ts/dfa/LexerDFASerializer.js delete mode 100644 node_modules/antlr4ts/dfa/LexerDFASerializer.js.map delete mode 100644 node_modules/antlr4ts/dfa/index.d.ts delete mode 100644 node_modules/antlr4ts/dfa/index.js delete mode 100644 node_modules/antlr4ts/dfa/index.js.map delete mode 100644 node_modules/antlr4ts/index.d.ts delete mode 100644 node_modules/antlr4ts/index.js delete mode 100644 node_modules/antlr4ts/index.js.map delete mode 100644 node_modules/antlr4ts/misc/Args.d.ts delete mode 100644 node_modules/antlr4ts/misc/Args.js delete mode 100644 node_modules/antlr4ts/misc/Args.js.map delete mode 100644 node_modules/antlr4ts/misc/Array2DHashMap.d.ts delete mode 100644 node_modules/antlr4ts/misc/Array2DHashMap.js delete mode 100644 node_modules/antlr4ts/misc/Array2DHashMap.js.map delete mode 100644 node_modules/antlr4ts/misc/Array2DHashSet.d.ts delete mode 100644 node_modules/antlr4ts/misc/Array2DHashSet.js delete mode 100644 node_modules/antlr4ts/misc/Array2DHashSet.js.map delete mode 100644 node_modules/antlr4ts/misc/ArrayEqualityComparator.d.ts delete mode 100644 node_modules/antlr4ts/misc/ArrayEqualityComparator.js delete mode 100644 node_modules/antlr4ts/misc/ArrayEqualityComparator.js.map delete mode 100644 node_modules/antlr4ts/misc/Arrays.d.ts delete mode 100644 node_modules/antlr4ts/misc/Arrays.js delete mode 100644 node_modules/antlr4ts/misc/Arrays.js.map delete mode 100644 node_modules/antlr4ts/misc/BitSet.d.ts delete mode 100644 node_modules/antlr4ts/misc/BitSet.js delete mode 100644 node_modules/antlr4ts/misc/BitSet.js.map delete mode 100644 node_modules/antlr4ts/misc/Character.d.ts delete mode 100644 node_modules/antlr4ts/misc/Character.js delete mode 100644 node_modules/antlr4ts/misc/Character.js.map delete mode 100644 node_modules/antlr4ts/misc/DefaultEqualityComparator.d.ts delete mode 100644 node_modules/antlr4ts/misc/DefaultEqualityComparator.js delete mode 100644 node_modules/antlr4ts/misc/DefaultEqualityComparator.js.map delete mode 100644 node_modules/antlr4ts/misc/EqualityComparator.d.ts delete mode 100644 node_modules/antlr4ts/misc/EqualityComparator.js delete mode 100644 node_modules/antlr4ts/misc/EqualityComparator.js.map delete mode 100644 node_modules/antlr4ts/misc/IntSet.d.ts delete mode 100644 node_modules/antlr4ts/misc/IntSet.js delete mode 100644 node_modules/antlr4ts/misc/IntSet.js.map delete mode 100644 node_modules/antlr4ts/misc/IntegerList.d.ts delete mode 100644 node_modules/antlr4ts/misc/IntegerList.js delete mode 100644 node_modules/antlr4ts/misc/IntegerList.js.map delete mode 100644 node_modules/antlr4ts/misc/IntegerStack.d.ts delete mode 100644 node_modules/antlr4ts/misc/IntegerStack.js delete mode 100644 node_modules/antlr4ts/misc/IntegerStack.js.map delete mode 100644 node_modules/antlr4ts/misc/InterpreterDataReader.d.ts delete mode 100644 node_modules/antlr4ts/misc/InterpreterDataReader.js delete mode 100644 node_modules/antlr4ts/misc/InterpreterDataReader.js.map delete mode 100644 node_modules/antlr4ts/misc/Interval.d.ts delete mode 100644 node_modules/antlr4ts/misc/Interval.js delete mode 100644 node_modules/antlr4ts/misc/Interval.js.map delete mode 100644 node_modules/antlr4ts/misc/IntervalSet.d.ts delete mode 100644 node_modules/antlr4ts/misc/IntervalSet.js delete mode 100644 node_modules/antlr4ts/misc/IntervalSet.js.map delete mode 100644 node_modules/antlr4ts/misc/MultiMap.d.ts delete mode 100644 node_modules/antlr4ts/misc/MultiMap.js delete mode 100644 node_modules/antlr4ts/misc/MultiMap.js.map delete mode 100644 node_modules/antlr4ts/misc/MurmurHash.d.ts delete mode 100644 node_modules/antlr4ts/misc/MurmurHash.js delete mode 100644 node_modules/antlr4ts/misc/MurmurHash.js.map delete mode 100644 node_modules/antlr4ts/misc/ObjectEqualityComparator.d.ts delete mode 100644 node_modules/antlr4ts/misc/ObjectEqualityComparator.js delete mode 100644 node_modules/antlr4ts/misc/ObjectEqualityComparator.js.map delete mode 100644 node_modules/antlr4ts/misc/ParseCancellationException.d.ts delete mode 100644 node_modules/antlr4ts/misc/ParseCancellationException.js delete mode 100644 node_modules/antlr4ts/misc/ParseCancellationException.js.map delete mode 100644 node_modules/antlr4ts/misc/Stubs.d.ts delete mode 100644 node_modules/antlr4ts/misc/Stubs.js delete mode 100644 node_modules/antlr4ts/misc/Stubs.js.map delete mode 100644 node_modules/antlr4ts/misc/UUID.d.ts delete mode 100644 node_modules/antlr4ts/misc/UUID.js delete mode 100644 node_modules/antlr4ts/misc/UUID.js.map delete mode 100644 node_modules/antlr4ts/misc/Utils.d.ts delete mode 100644 node_modules/antlr4ts/misc/Utils.js delete mode 100644 node_modules/antlr4ts/misc/Utils.js.map delete mode 100644 node_modules/antlr4ts/misc/index.d.ts delete mode 100644 node_modules/antlr4ts/misc/index.js delete mode 100644 node_modules/antlr4ts/misc/index.js.map delete mode 100644 node_modules/antlr4ts/package.json delete mode 100644 node_modules/antlr4ts/tree/AbstractParseTreeVisitor.d.ts delete mode 100644 node_modules/antlr4ts/tree/AbstractParseTreeVisitor.js delete mode 100644 node_modules/antlr4ts/tree/AbstractParseTreeVisitor.js.map delete mode 100644 node_modules/antlr4ts/tree/ErrorNode.d.ts delete mode 100644 node_modules/antlr4ts/tree/ErrorNode.js delete mode 100644 node_modules/antlr4ts/tree/ErrorNode.js.map delete mode 100644 node_modules/antlr4ts/tree/ParseTree.d.ts delete mode 100644 node_modules/antlr4ts/tree/ParseTree.js delete mode 100644 node_modules/antlr4ts/tree/ParseTree.js.map delete mode 100644 node_modules/antlr4ts/tree/ParseTreeListener.d.ts delete mode 100644 node_modules/antlr4ts/tree/ParseTreeListener.js delete mode 100644 node_modules/antlr4ts/tree/ParseTreeListener.js.map delete mode 100644 node_modules/antlr4ts/tree/ParseTreeProperty.d.ts delete mode 100644 node_modules/antlr4ts/tree/ParseTreeProperty.js delete mode 100644 node_modules/antlr4ts/tree/ParseTreeProperty.js.map delete mode 100644 node_modules/antlr4ts/tree/ParseTreeVisitor.d.ts delete mode 100644 node_modules/antlr4ts/tree/ParseTreeVisitor.js delete mode 100644 node_modules/antlr4ts/tree/ParseTreeVisitor.js.map delete mode 100644 node_modules/antlr4ts/tree/ParseTreeWalker.d.ts delete mode 100644 node_modules/antlr4ts/tree/ParseTreeWalker.js delete mode 100644 node_modules/antlr4ts/tree/ParseTreeWalker.js.map delete mode 100644 node_modules/antlr4ts/tree/RuleNode.d.ts delete mode 100644 node_modules/antlr4ts/tree/RuleNode.js delete mode 100644 node_modules/antlr4ts/tree/RuleNode.js.map delete mode 100644 node_modules/antlr4ts/tree/SyntaxTree.d.ts delete mode 100644 node_modules/antlr4ts/tree/SyntaxTree.js delete mode 100644 node_modules/antlr4ts/tree/SyntaxTree.js.map delete mode 100644 node_modules/antlr4ts/tree/TerminalNode.d.ts delete mode 100644 node_modules/antlr4ts/tree/TerminalNode.js delete mode 100644 node_modules/antlr4ts/tree/TerminalNode.js.map delete mode 100644 node_modules/antlr4ts/tree/Tree.d.ts delete mode 100644 node_modules/antlr4ts/tree/Tree.js delete mode 100644 node_modules/antlr4ts/tree/Tree.js.map delete mode 100644 node_modules/antlr4ts/tree/Trees.d.ts delete mode 100644 node_modules/antlr4ts/tree/Trees.js delete mode 100644 node_modules/antlr4ts/tree/Trees.js.map delete mode 100644 node_modules/antlr4ts/tree/index.d.ts delete mode 100644 node_modules/antlr4ts/tree/index.js delete mode 100644 node_modules/antlr4ts/tree/index.js.map delete mode 100644 node_modules/antlr4ts/tree/pattern/Chunk.d.ts delete mode 100644 node_modules/antlr4ts/tree/pattern/Chunk.js delete mode 100644 node_modules/antlr4ts/tree/pattern/Chunk.js.map delete mode 100644 node_modules/antlr4ts/tree/pattern/ParseTreeMatch.d.ts delete mode 100644 node_modules/antlr4ts/tree/pattern/ParseTreeMatch.js delete mode 100644 node_modules/antlr4ts/tree/pattern/ParseTreeMatch.js.map delete mode 100644 node_modules/antlr4ts/tree/pattern/ParseTreePattern.d.ts delete mode 100644 node_modules/antlr4ts/tree/pattern/ParseTreePattern.js delete mode 100644 node_modules/antlr4ts/tree/pattern/ParseTreePattern.js.map delete mode 100644 node_modules/antlr4ts/tree/pattern/ParseTreePatternMatcher.d.ts delete mode 100644 node_modules/antlr4ts/tree/pattern/ParseTreePatternMatcher.js delete mode 100644 node_modules/antlr4ts/tree/pattern/ParseTreePatternMatcher.js.map delete mode 100644 node_modules/antlr4ts/tree/pattern/RuleTagToken.d.ts delete mode 100644 node_modules/antlr4ts/tree/pattern/RuleTagToken.js delete mode 100644 node_modules/antlr4ts/tree/pattern/RuleTagToken.js.map delete mode 100644 node_modules/antlr4ts/tree/pattern/TagChunk.d.ts delete mode 100644 node_modules/antlr4ts/tree/pattern/TagChunk.js delete mode 100644 node_modules/antlr4ts/tree/pattern/TagChunk.js.map delete mode 100644 node_modules/antlr4ts/tree/pattern/TextChunk.d.ts delete mode 100644 node_modules/antlr4ts/tree/pattern/TextChunk.js delete mode 100644 node_modules/antlr4ts/tree/pattern/TextChunk.js.map delete mode 100644 node_modules/antlr4ts/tree/pattern/TokenTagToken.d.ts delete mode 100644 node_modules/antlr4ts/tree/pattern/TokenTagToken.js delete mode 100644 node_modules/antlr4ts/tree/pattern/TokenTagToken.js.map delete mode 100644 node_modules/antlr4ts/tree/pattern/index.d.ts delete mode 100644 node_modules/antlr4ts/tree/pattern/index.js delete mode 100644 node_modules/antlr4ts/tree/pattern/index.js.map delete mode 100644 node_modules/antlr4ts/tree/xpath/XPath.d.ts delete mode 100644 node_modules/antlr4ts/tree/xpath/XPath.js delete mode 100644 node_modules/antlr4ts/tree/xpath/XPath.js.map delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathElement.d.ts delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathElement.js delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathElement.js.map delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathLexer.d.ts delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathLexer.js delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathLexer.js.map delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathLexerErrorListener.d.ts delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathLexerErrorListener.js delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathLexerErrorListener.js.map delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathRuleAnywhereElement.d.ts delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathRuleAnywhereElement.js delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathRuleAnywhereElement.js.map delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathRuleElement.d.ts delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathRuleElement.js delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathRuleElement.js.map delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathTokenAnywhereElement.d.ts delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathTokenAnywhereElement.js delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathTokenAnywhereElement.js.map delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathTokenElement.d.ts delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathTokenElement.js delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathTokenElement.js.map delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathWildcardAnywhereElement.d.ts delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathWildcardAnywhereElement.js delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathWildcardAnywhereElement.js.map delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathWildcardElement.d.ts delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathWildcardElement.js delete mode 100644 node_modules/antlr4ts/tree/xpath/XPathWildcardElement.js.map delete mode 100644 node_modules/antlr4ts/tree/xpath/index.d.ts delete mode 100644 node_modules/antlr4ts/tree/xpath/index.js delete mode 100644 node_modules/antlr4ts/tree/xpath/index.js.map delete mode 100644 node_modules/emoji-regex/LICENSE-MIT.txt delete mode 100644 node_modules/emoji-regex/README.md delete mode 100644 node_modules/emoji-regex/index.d.ts delete mode 100644 node_modules/emoji-regex/index.js delete mode 100644 node_modules/emoji-regex/package.json delete mode 100644 node_modules/escape-string-regexp/index.d.ts delete mode 100644 node_modules/escape-string-regexp/index.js delete mode 100644 node_modules/escape-string-regexp/license delete mode 100644 node_modules/escape-string-regexp/package.json delete mode 100644 node_modules/escape-string-regexp/readme.md delete mode 100644 node_modules/is-fullwidth-code-point/index.d.ts delete mode 100644 node_modules/is-fullwidth-code-point/index.js delete mode 100644 node_modules/is-fullwidth-code-point/license delete mode 100644 node_modules/is-fullwidth-code-point/package.json delete mode 100644 node_modules/is-fullwidth-code-point/readme.md delete mode 100644 node_modules/lru-cache/LICENSE delete mode 100644 node_modules/lru-cache/README.md delete mode 100644 node_modules/lru-cache/index.js delete mode 100644 node_modules/lru-cache/package.json delete mode 100644 node_modules/prettier-plugin-solidity/.eslintignore delete mode 100644 node_modules/prettier-plugin-solidity/.eslintrc delete mode 100644 node_modules/prettier-plugin-solidity/.github/FUNDING.yml delete mode 100644 node_modules/prettier-plugin-solidity/.github/dependabot.yml delete mode 100644 node_modules/prettier-plugin-solidity/.github/workflows/CI.yml delete mode 100644 node_modules/prettier-plugin-solidity/.nvmrc delete mode 100644 node_modules/prettier-plugin-solidity/.prettierignore delete mode 100644 node_modules/prettier-plugin-solidity/.prettierrc delete mode 100644 node_modules/prettier-plugin-solidity/CODE_OF_CONDUCT.md delete mode 100644 node_modules/prettier-plugin-solidity/HOW_TO_PUBLISH.md delete mode 100644 node_modules/prettier-plugin-solidity/LICENSE delete mode 100644 node_modules/prettier-plugin-solidity/README.md delete mode 100644 node_modules/prettier-plugin-solidity/STYLEGUIDE.md delete mode 100644 node_modules/prettier-plugin-solidity/assets/telegram-badge.svg delete mode 100644 node_modules/prettier-plugin-solidity/jest.config.js delete mode 120000 node_modules/prettier-plugin-solidity/node_modules/.bin/prettier delete mode 120000 node_modules/prettier-plugin-solidity/node_modules/.bin/semver delete mode 100644 node_modules/prettier-plugin-solidity/package.json delete mode 100644 node_modules/prettier-plugin-solidity/scripts/generateIndexes.js delete mode 100644 node_modules/prettier-plugin-solidity/scripts/makeData.js delete mode 100644 node_modules/prettier-plugin-solidity/src/binary-operator-printers/arithmetic.js delete mode 100644 node_modules/prettier-plugin-solidity/src/binary-operator-printers/assignment.js delete mode 100644 node_modules/prettier-plugin-solidity/src/binary-operator-printers/bit.js delete mode 100644 node_modules/prettier-plugin-solidity/src/binary-operator-printers/comparison.js delete mode 100644 node_modules/prettier-plugin-solidity/src/binary-operator-printers/exponentiation.js delete mode 100644 node_modules/prettier-plugin-solidity/src/binary-operator-printers/index.js delete mode 100644 node_modules/prettier-plugin-solidity/src/binary-operator-printers/logical.js delete mode 100644 node_modules/prettier-plugin-solidity/src/binary-operator-printers/shift.js delete mode 100644 node_modules/prettier-plugin-solidity/src/clean.js delete mode 100644 node_modules/prettier-plugin-solidity/src/comments/handler.js delete mode 100644 node_modules/prettier-plugin-solidity/src/comments/handlers/ContractDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/comments/ignore.js delete mode 100644 node_modules/prettier-plugin-solidity/src/comments/index.js delete mode 100644 node_modules/prettier-plugin-solidity/src/comments/printer.js delete mode 100644 node_modules/prettier-plugin-solidity/src/index.js delete mode 100644 node_modules/prettier-plugin-solidity/src/loc.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/ArrayTypeName.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/AssemblyAssignment.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/AssemblyBlock.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/AssemblyCall.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/AssemblyCase.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/AssemblyFor.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/AssemblyFunctionDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/AssemblyIf.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/AssemblyLocalDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/AssemblyMemberAccess.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/AssemblySwitch.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/BinaryOperation.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/Block.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/BooleanLiteral.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/Break.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/BreakStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/CatchClause.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/Conditional.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/ContinueStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/ContractDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/CustomErrorDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/DecimalNumber.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/DoWhileStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/ElementaryTypeName.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/EmitStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/EnumDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/EnumValue.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/EventDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/ExpressionStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/FileLevelConstant.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/ForStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/FunctionCall.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/FunctionDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/FunctionTypeName.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/HexLiteral.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/HexNumber.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/Identifier.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/IfStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/ImportDirective.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/IndexAccess.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/IndexRangeAccess.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/InheritanceSpecifier.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/InlineAssemblyStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/LabelDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/Mapping.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/MemberAccess.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/ModifierDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/ModifierInvocation.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/NameValueExpression.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/NameValueList.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/NewExpression.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/NumberLiteral.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/PragmaDirective.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/ReturnStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/RevertStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/SourceUnit.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/StateVariableDeclaration.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/StringLiteral.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/StructDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/ThrowStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/TryStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/TupleExpression.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/TypeDefinition.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/TypeNameExpression.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/UnaryOperation.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/UncheckedStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/UserDefinedTypeName.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/UsingForDeclaration.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/VariableDeclaration.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/VariableDeclarationStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/WhileStatement.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/index.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/print-comments.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/print-preserving-empty-lines.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/print-separated-item.js delete mode 100644 node_modules/prettier-plugin-solidity/src/nodes/print-separated-list.js delete mode 100644 node_modules/prettier-plugin-solidity/src/options.js delete mode 100644 node_modules/prettier-plugin-solidity/src/parser.js delete mode 100644 node_modules/prettier-plugin-solidity/src/prettier-comments/common/util.js delete mode 100644 node_modules/prettier-plugin-solidity/src/prettier-comments/index.js delete mode 100644 node_modules/prettier-plugin-solidity/src/prettier-comments/language-js/comments.js delete mode 100644 node_modules/prettier-plugin-solidity/src/printer.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/.prettierrc delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/format-test.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/require-prettier.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/require-standalone.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/setup.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/utils/check-parsers.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/utils/compile-contract.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/utils/consistent-end-of-line.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/utils/create-snapshot.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/utils/stringify-options-for-title.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/utils/visualize-end-of-line.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/config/utils/visualize-range.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/AddressPayable/AddressPayable.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/AddressPayable/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/AddressPayable/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/AllSolidityFeatures/AllSolidityFeatures.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/AllSolidityFeatures/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/AllSolidityFeatures/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Arrays/Arrays.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Arrays/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Arrays/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Assembly/Assembly.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Assembly/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Assembly/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BasicIterator/BasicIterator.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BasicIterator/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BasicIterator/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BinaryOperators/BinaryOperators.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BinaryOperators/Parentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BinaryOperators/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BinaryOperators/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BreakingChangesV0.7.4/BreakingChangesV0.7.4.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BreakingChangesV0.7.4/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BreakingChangesV0.7.4/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BreakingChangesV0.8.0/BreakingChangesV0.8.0.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BreakingChangesV0.8.0/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/BreakingChangesV0.8.0/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Comments/Comments.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Comments/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Comments/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Conditional/Conditional.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Conditional/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Conditional/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Constructors/Constructors.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Constructors/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Constructors/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ContractDefinitions/ContractDefinitions.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ContractDefinitions/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ContractDefinitions/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/CustomErrors/CustomErrors.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/CustomErrors/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/CustomErrors/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/EnumDefinitions/EnumDefinitions.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/EnumDefinitions/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/EnumDefinitions/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Etc/Etc.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Etc/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Etc/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ExplicitVariableTypes/ExplicitVariableTypes.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ExplicitVariableTypes/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ExplicitVariableTypes/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ForStatements/ForStatements.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ForStatements/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ForStatements/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/FunctionCalls/FunctionCalls.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/FunctionCalls/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/FunctionCalls/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/FunctionDefinitions/FunctionDefinitions.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/FunctionDefinitions/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/FunctionDefinitions/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/HexLiteral/HexLiteral.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/HexLiteral/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/HexLiteral/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/IfStatements/IfStatements.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/IfStatements/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/IfStatements/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Immutable/Immutable.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Immutable/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Immutable/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ImportDirective/ImportDirectives.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ImportDirective/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ImportDirective/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Inbox/Inbox.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Inbox/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Inbox/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/IndexOf/IndexOf.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/IndexOf/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/IndexOf/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/IndexRangeAccess/IndexRangeAccess.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/IndexRangeAccess/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/IndexRangeAccess/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/InheritanceSpecifier/InheritanceSpecifier.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/InheritanceSpecifier/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/InheritanceSpecifier/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Issues/Issue205.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Issues/Issue289.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Issues/Issue355.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Issues/Issue385.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Issues/Issue564.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Issues/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Issues/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/MemberAccess/MemberAccess.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/MemberAccess/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/MemberAccess/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ModifierDefinitions/ModifierDefinitions.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ModifierDefinitions/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ModifierDefinitions/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ModifierInvocations/ModifierInvocations.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ModifierInvocations/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/ModifierInvocations/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/MultipartStrings/MultipartStrings.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/MultipartStrings/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/MultipartStrings/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/NameValueExpression/NameValueExpression.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/NameValueExpression/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/NameValueExpression/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Ownable/Ownable.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Ownable/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Ownable/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/AddNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/BitAndNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/BitOrNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/BitXorNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/DivNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/ExpNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/LogicNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/ModNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/MulNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/ShiftLNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/ShiftRNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/SubNoParentheses.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Parentheses/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Pragma/Pragma.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Pragma/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Pragma/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/PrettierIgnore/PrettierIgnore.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/PrettierIgnore/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/PrettierIgnore/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Proxy/Proxy.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Proxy/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Proxy/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/RespectDefaultOptions/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/RespectDefaultOptions/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/RespectDefaultOptions/respect-default-options.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SampleCrowdsale/SampleCrowdsale.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SampleCrowdsale/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SampleCrowdsale/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SimpleAuction/SimpleAuction.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SimpleAuction/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SimpleAuction/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SimpleStorage/SimpleStorage.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SimpleStorage/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SimpleStorage/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SplittableCommodity/SplittableCommodity.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SplittableCommodity/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/SplittableCommodity/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StateVariableDeclarations/StateVariableDeclarations.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StateVariableDeclarations/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StateVariableDeclarations/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StringLiteral/StringLiteral.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StringLiteral/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StringLiteral/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StyleGuide/BlankLines.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StyleGuide/ControlStructures.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StyleGuide/FunctionDeclaration.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StyleGuide/Mappings.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StyleGuide/MaximumLineLength.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StyleGuide/OtherRecommendations.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StyleGuide/VariableDeclarations.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StyleGuide/WhitespaceInExpressions.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StyleGuide/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/StyleGuide/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/TryCatch/TryCatch.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/TryCatch/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/TryCatch/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Tupples/Tupples.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Tupples/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/Tupples/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/TypeDefinition/TypeDefinition.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/TypeDefinition/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/TypeDefinition/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/WhileStatements/WhileStatements.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/WhileStatements/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/WhileStatements/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/WrongCompiler/WrongCompiler.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/WrongCompiler/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/WrongCompiler/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/quotes/Quotes.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/quotes/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/quotes/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/strings/__snapshots__/jsfmt.spec.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/strings/jsfmt.spec.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/format/strings/strings.sol delete mode 100644 node_modules/prettier-plugin-solidity/tests/unit/binary-operator-printers/__snapshots__/index.test.js.snap delete mode 100644 node_modules/prettier-plugin-solidity/tests/unit/binary-operator-printers/index.test.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/unit/comments/printer.test.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/unit/prettier-version.test.js delete mode 100644 node_modules/prettier-plugin-solidity/tests/unit/printer.test.js delete mode 100644 node_modules/prettier/LICENSE delete mode 100644 node_modules/prettier/README.md delete mode 100755 node_modules/prettier/bin-prettier.js delete mode 100644 node_modules/prettier/doc.js delete mode 100644 node_modules/prettier/esm/parser-angular.mjs delete mode 100644 node_modules/prettier/esm/parser-babel.mjs delete mode 100644 node_modules/prettier/esm/parser-espree.mjs delete mode 100644 node_modules/prettier/esm/parser-flow.mjs delete mode 100644 node_modules/prettier/esm/parser-glimmer.mjs delete mode 100644 node_modules/prettier/esm/parser-graphql.mjs delete mode 100644 node_modules/prettier/esm/parser-html.mjs delete mode 100644 node_modules/prettier/esm/parser-markdown.mjs delete mode 100644 node_modules/prettier/esm/parser-meriyah.mjs delete mode 100644 node_modules/prettier/esm/parser-postcss.mjs delete mode 100644 node_modules/prettier/esm/parser-typescript.mjs delete mode 100644 node_modules/prettier/esm/parser-yaml.mjs delete mode 100644 node_modules/prettier/esm/standalone.mjs delete mode 100644 node_modules/prettier/index.js delete mode 100644 node_modules/prettier/package.json delete mode 100644 node_modules/prettier/parser-angular.js delete mode 100644 node_modules/prettier/parser-babel.js delete mode 100644 node_modules/prettier/parser-espree.js delete mode 100644 node_modules/prettier/parser-flow.js delete mode 100644 node_modules/prettier/parser-glimmer.js delete mode 100644 node_modules/prettier/parser-graphql.js delete mode 100644 node_modules/prettier/parser-html.js delete mode 100644 node_modules/prettier/parser-markdown.js delete mode 100644 node_modules/prettier/parser-meriyah.js delete mode 100644 node_modules/prettier/parser-postcss.js delete mode 100644 node_modules/prettier/parser-typescript.js delete mode 100644 node_modules/prettier/parser-yaml.js delete mode 100644 node_modules/prettier/standalone.js delete mode 100644 node_modules/prettier/third-party.js delete mode 100644 node_modules/semver/CHANGELOG.md delete mode 100644 node_modules/semver/LICENSE delete mode 100644 node_modules/semver/README.md delete mode 100755 node_modules/semver/bin/semver.js delete mode 100644 node_modules/semver/classes/comparator.js delete mode 100644 node_modules/semver/classes/index.js delete mode 100644 node_modules/semver/classes/range.js delete mode 100644 node_modules/semver/classes/semver.js delete mode 100644 node_modules/semver/functions/clean.js delete mode 100644 node_modules/semver/functions/cmp.js delete mode 100644 node_modules/semver/functions/coerce.js delete mode 100644 node_modules/semver/functions/compare-build.js delete mode 100644 node_modules/semver/functions/compare-loose.js delete mode 100644 node_modules/semver/functions/compare.js delete mode 100644 node_modules/semver/functions/diff.js delete mode 100644 node_modules/semver/functions/eq.js delete mode 100644 node_modules/semver/functions/gt.js delete mode 100644 node_modules/semver/functions/gte.js delete mode 100644 node_modules/semver/functions/inc.js delete mode 100644 node_modules/semver/functions/lt.js delete mode 100644 node_modules/semver/functions/lte.js delete mode 100644 node_modules/semver/functions/major.js delete mode 100644 node_modules/semver/functions/minor.js delete mode 100644 node_modules/semver/functions/neq.js delete mode 100644 node_modules/semver/functions/parse.js delete mode 100644 node_modules/semver/functions/patch.js delete mode 100644 node_modules/semver/functions/prerelease.js delete mode 100644 node_modules/semver/functions/rcompare.js delete mode 100644 node_modules/semver/functions/rsort.js delete mode 100644 node_modules/semver/functions/satisfies.js delete mode 100644 node_modules/semver/functions/sort.js delete mode 100644 node_modules/semver/functions/valid.js delete mode 100644 node_modules/semver/index.js delete mode 100644 node_modules/semver/internal/constants.js delete mode 100644 node_modules/semver/internal/debug.js delete mode 100644 node_modules/semver/internal/identifiers.js delete mode 100644 node_modules/semver/internal/parse-options.js delete mode 100644 node_modules/semver/internal/re.js delete mode 100644 node_modules/semver/package.json delete mode 100644 node_modules/semver/preload.js delete mode 100644 node_modules/semver/range.bnf delete mode 100644 node_modules/semver/ranges/gtr.js delete mode 100644 node_modules/semver/ranges/intersects.js delete mode 100644 node_modules/semver/ranges/ltr.js delete mode 100644 node_modules/semver/ranges/max-satisfying.js delete mode 100644 node_modules/semver/ranges/min-satisfying.js delete mode 100644 node_modules/semver/ranges/min-version.js delete mode 100644 node_modules/semver/ranges/outside.js delete mode 100644 node_modules/semver/ranges/simplify.js delete mode 100644 node_modules/semver/ranges/subset.js delete mode 100644 node_modules/semver/ranges/to-comparators.js delete mode 100644 node_modules/semver/ranges/valid.js delete mode 100644 node_modules/solidity-comments-extractor/.eslintrc delete mode 100644 node_modules/solidity-comments-extractor/.github/workflows/CI.yml delete mode 100644 node_modules/solidity-comments-extractor/.nvmrc delete mode 100644 node_modules/solidity-comments-extractor/.prettierignore delete mode 100644 node_modules/solidity-comments-extractor/.prettierrc delete mode 100644 node_modules/solidity-comments-extractor/README.md delete mode 100644 node_modules/solidity-comments-extractor/index.js delete mode 100644 node_modules/solidity-comments-extractor/package.json delete mode 100644 node_modules/solidity-comments-extractor/test/test.js delete mode 100644 node_modules/string-width/index.d.ts delete mode 100644 node_modules/string-width/index.js delete mode 100644 node_modules/string-width/license delete mode 100644 node_modules/string-width/node_modules/emoji-regex/LICENSE-MIT.txt delete mode 100644 node_modules/string-width/node_modules/emoji-regex/README.md delete mode 100644 node_modules/string-width/node_modules/emoji-regex/es2015/index.js delete mode 100644 node_modules/string-width/node_modules/emoji-regex/es2015/text.js delete mode 100644 node_modules/string-width/node_modules/emoji-regex/index.d.ts delete mode 100644 node_modules/string-width/node_modules/emoji-regex/index.js delete mode 100644 node_modules/string-width/node_modules/emoji-regex/package.json delete mode 100644 node_modules/string-width/node_modules/emoji-regex/text.js delete mode 100644 node_modules/string-width/package.json delete mode 100644 node_modules/string-width/readme.md delete mode 100644 node_modules/strip-ansi/index.d.ts delete mode 100644 node_modules/strip-ansi/index.js delete mode 100644 node_modules/strip-ansi/license delete mode 100644 node_modules/strip-ansi/package.json delete mode 100644 node_modules/strip-ansi/readme.md delete mode 100644 node_modules/yallist/LICENSE delete mode 100644 node_modules/yallist/README.md delete mode 100644 node_modules/yallist/iterator.js delete mode 100644 node_modules/yallist/package.json delete mode 100644 node_modules/yallist/yallist.js delete mode 100644 package.json delete mode 100644 src/interfaces/IHelios.sol create mode 100644 src/libraries/Math2.sol create mode 100644 src/libraries/SafeTransferLib.sol delete mode 100644 src/swappers/XYKswapper.sol delete mode 100644 src/test/Helios.t.sol create mode 100644 src/utils/ERC6909.sol create mode 100644 src/utils/ReentrancyGuard.sol create mode 100644 test/Helios.t.sol delete mode 100644 yarn.lock diff --git a/.gas-snapshot b/.gas-snapshot index 06d3ee1..c27caae 100644 --- a/.gas-snapshot +++ b/.gas-snapshot @@ -1,5 +1 @@ -HeliosTest:testHeliosCreation() (gas: 2862816) -HeliosTest:testXYKpairCreation() (gas: 233762) -HeliosTest:testXYKpairMultiHop(uint256) (runs: 256, μ: 56713, ~: 22944) -HeliosTest:testXYKpairNoFeeInvariance(uint256) (runs: 256, μ: 74197, ~: 7765) -HeliosTest:testXYKpairSwap(uint256) (runs: 256, μ: 21965, ~: 15453) +HeliosTest:testDeploy() (gas: 1681350) \ No newline at end of file diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..19e5634 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,36 @@ +name: ci + +on: [push] + +jobs: + tests: + name: Forge Testing + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + - uses: foundry-rs/foundry-toolchain@v1 + with: + version: nightly + - name: dependencies + run: forge install + - name: tests + run: forge test + + snapshot: + name: Forge Snapshot + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + with: + submodules: recursive + - uses: foundry-rs/foundry-toolchain@v1 + with: + version: nightly + - name: dependencies + run: forge install + - name: check contract sizes + run: forge build --sizes + - name: check gas snapshots + run: forge snapshot --check \ No newline at end of file diff --git a/.github/workflows/lints.yml b/.github/workflows/lints.yml deleted file mode 100644 index 7a566f2..0000000 --- a/.github/workflows/lints.yml +++ /dev/null @@ -1,23 +0,0 @@ -name: Lints - -on: [push] - -jobs: - lints: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - - - name: Install Foundry - uses: onbjerg/foundry-toolchain@v1 - with: - version: nightly - - - name: Install Dependencies - run: yarn - - name: Update Modules - run: yarn update - - name: Run lints - run: yarn lint \ No newline at end of file diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml deleted file mode 100644 index 7255d95..0000000 --- a/.github/workflows/tests.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: Tests - -on: [push] - -jobs: - tests: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - - name: Install Foundry - uses: onbjerg/foundry-toolchain@v1 - with: - version: nightly - - name: Install Dependencies - run: yarn - - name: Update Modules - run: yarn update - - name: Run Tests - run: FOUNDRY_PROFILE=ci yarn test \ No newline at end of file diff --git a/.gitignore b/.gitignore index 5dfe93f..bf3d16b 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,2 @@ -/cache -/node_modules -/out \ No newline at end of file +out +cache \ No newline at end of file diff --git a/.gitmodules b/.gitmodules index 1de6689..0f07815 100644 --- a/.gitmodules +++ b/.gitmodules @@ -1,9 +1,6 @@ -[submodule "lib/ds-test"] - path = lib/ds-test - url = https://github.com/dapphub/ds-test [submodule "lib/forge-std"] path = lib/forge-std url = https://github.com/foundry-rs/forge-std -[submodule "lib/solbase"] - path = lib/solbase - url = https://github.com/Sol-DAO/solbase +[submodule "lib/solady"] + path = lib/solady + url = https://github.com/vectorized/solady diff --git a/.gitpod.yml b/.gitpod.yml deleted file mode 100644 index 7169b91..0000000 --- a/.gitpod.yml +++ /dev/null @@ -1,6 +0,0 @@ -# This configuration file was automatically generated by Gitpod. -# Please adjust to your needs (see https://www.gitpod.io/docs/config-gitpod-file) -# and commit this file to your remote git repository to share the goodness with others. - -tasks: - - init: curl -L https://foundry.paradigm.xyz | bash && source /home/gitpod/.bashrc && foundryup \ No newline at end of file diff --git a/LICENSE b/LICENSE index 82e0bbc..29ebfa5 100644 --- a/LICENSE +++ b/LICENSE @@ -1,21 +1,661 @@ -MIT License - -Copyright (c) 2022 SolDAO. - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. \ No newline at end of file + GNU AFFERO GENERAL PUBLIC LICENSE + Version 3, 19 November 2007 + + Copyright (C) 2007 Free Software Foundation, Inc. + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + + Preamble + + The GNU Affero General Public License is a free, copyleft license for +software and other kinds of works, specifically designed to ensure +cooperation with the community in the case of network server software. + + The licenses for most software and other practical works are designed +to take away your freedom to share and change the works. By contrast, +our General Public Licenses are intended to guarantee your freedom to +share and change all versions of a program--to make sure it remains free +software for all its users. + + When we speak of free software, we are referring to freedom, not +price. Our General Public Licenses are designed to make sure that you +have the freedom to distribute copies of free software (and charge for +them if you wish), that you receive source code or can get it if you +want it, that you can change the software or use pieces of it in new +free programs, and that you know you can do these things. + + Developers that use our General Public Licenses protect your rights +with two steps: (1) assert copyright on the software, and (2) offer +you this License which gives you legal permission to copy, distribute +and/or modify the software. + + A secondary benefit of defending all users' freedom is that +improvements made in alternate versions of the program, if they +receive widespread use, become available for other developers to +incorporate. Many developers of free software are heartened and +encouraged by the resulting cooperation. However, in the case of +software used on network servers, this result may fail to come about. +The GNU General Public License permits making a modified version and +letting the public access it on a server without ever releasing its +source code to the public. + + The GNU Affero General Public License is designed specifically to +ensure that, in such cases, the modified source code becomes available +to the community. It requires the operator of a network server to +provide the source code of the modified version running there to the +users of that server. Therefore, public use of a modified version, on +a publicly accessible server, gives the public access to the source +code of the modified version. + + An older license, called the Affero General Public License and +published by Affero, was designed to accomplish similar goals. This is +a different license, not a version of the Affero GPL, but Affero has +released a new version of the Affero GPL which permits relicensing under +this license. + + The precise terms and conditions for copying, distribution and +modification follow. + + TERMS AND CONDITIONS + + 0. Definitions. + + "This License" refers to version 3 of the GNU Affero General Public License. + + "Copyright" also means copyright-like laws that apply to other kinds of +works, such as semiconductor masks. + + "The Program" refers to any copyrightable work licensed under this +License. Each licensee is addressed as "you". "Licensees" and +"recipients" may be individuals or organizations. + + To "modify" a work means to copy from or adapt all or part of the work +in a fashion requiring copyright permission, other than the making of an +exact copy. The resulting work is called a "modified version" of the +earlier work or a work "based on" the earlier work. + + A "covered work" means either the unmodified Program or a work based +on the Program. + + To "propagate" a work means to do anything with it that, without +permission, would make you directly or secondarily liable for +infringement under applicable copyright law, except executing it on a +computer or modifying a private copy. Propagation includes copying, +distribution (with or without modification), making available to the +public, and in some countries other activities as well. + + To "convey" a work means any kind of propagation that enables other +parties to make or receive copies. Mere interaction with a user through +a computer network, with no transfer of a copy, is not conveying. + + An interactive user interface displays "Appropriate Legal Notices" +to the extent that it includes a convenient and prominently visible +feature that (1) displays an appropriate copyright notice, and (2) +tells the user that there is no warranty for the work (except to the +extent that warranties are provided), that licensees may convey the +work under this License, and how to view a copy of this License. If +the interface presents a list of user commands or options, such as a +menu, a prominent item in the list meets this criterion. + + 1. Source Code. + + The "source code" for a work means the preferred form of the work +for making modifications to it. "Object code" means any non-source +form of a work. + + A "Standard Interface" means an interface that either is an official +standard defined by a recognized standards body, or, in the case of +interfaces specified for a particular programming language, one that +is widely used among developers working in that language. + + The "System Libraries" of an executable work include anything, other +than the work as a whole, that (a) is included in the normal form of +packaging a Major Component, but which is not part of that Major +Component, and (b) serves only to enable use of the work with that +Major Component, or to implement a Standard Interface for which an +implementation is available to the public in source code form. A +"Major Component", in this context, means a major essential component +(kernel, window system, and so on) of the specific operating system +(if any) on which the executable work runs, or a compiler used to +produce the work, or an object code interpreter used to run it. + + The "Corresponding Source" for a work in object code form means all +the source code needed to generate, install, and (for an executable +work) run the object code and to modify the work, including scripts to +control those activities. However, it does not include the work's +System Libraries, or general-purpose tools or generally available free +programs which are used unmodified in performing those activities but +which are not part of the work. For example, Corresponding Source +includes interface definition files associated with source files for +the work, and the source code for shared libraries and dynamically +linked subprograms that the work is specifically designed to require, +such as by intimate data communication or control flow between those +subprograms and other parts of the work. + + The Corresponding Source need not include anything that users +can regenerate automatically from other parts of the Corresponding +Source. + + The Corresponding Source for a work in source code form is that +same work. + + 2. Basic Permissions. + + All rights granted under this License are granted for the term of +copyright on the Program, and are irrevocable provided the stated +conditions are met. This License explicitly affirms your unlimited +permission to run the unmodified Program. The output from running a +covered work is covered by this License only if the output, given its +content, constitutes a covered work. This License acknowledges your +rights of fair use or other equivalent, as provided by copyright law. + + You may make, run and propagate covered works that you do not +convey, without conditions so long as your license otherwise remains +in force. You may convey covered works to others for the sole purpose +of having them make modifications exclusively for you, or provide you +with facilities for running those works, provided that you comply with +the terms of this License in conveying all material for which you do +not control copyright. Those thus making or running the covered works +for you must do so exclusively on your behalf, under your direction +and control, on terms that prohibit them from making any copies of +your copyrighted material outside their relationship with you. + + Conveying under any other circumstances is permitted solely under +the conditions stated below. Sublicensing is not allowed; section 10 +makes it unnecessary. + + 3. Protecting Users' Legal Rights From Anti-Circumvention Law. + + No covered work shall be deemed part of an effective technological +measure under any applicable law fulfilling obligations under article +11 of the WIPO copyright treaty adopted on 20 December 1996, or +similar laws prohibiting or restricting circumvention of such +measures. + + When you convey a covered work, you waive any legal power to forbid +circumvention of technological measures to the extent such circumvention +is effected by exercising rights under this License with respect to +the covered work, and you disclaim any intention to limit operation or +modification of the work as a means of enforcing, against the work's +users, your or third parties' legal rights to forbid circumvention of +technological measures. + + 4. Conveying Verbatim Copies. + + You may convey verbatim copies of the Program's source code as you +receive it, in any medium, provided that you conspicuously and +appropriately publish on each copy an appropriate copyright notice; +keep intact all notices stating that this License and any +non-permissive terms added in accord with section 7 apply to the code; +keep intact all notices of the absence of any warranty; and give all +recipients a copy of this License along with the Program. + + You may charge any price or no price for each copy that you convey, +and you may offer support or warranty protection for a fee. + + 5. Conveying Modified Source Versions. + + You may convey a work based on the Program, or the modifications to +produce it from the Program, in the form of source code under the +terms of section 4, provided that you also meet all of these conditions: + + a) The work must carry prominent notices stating that you modified + it, and giving a relevant date. + + b) The work must carry prominent notices stating that it is + released under this License and any conditions added under section + 7. This requirement modifies the requirement in section 4 to + "keep intact all notices". + + c) You must license the entire work, as a whole, under this + License to anyone who comes into possession of a copy. This + License will therefore apply, along with any applicable section 7 + additional terms, to the whole of the work, and all its parts, + regardless of how they are packaged. This License gives no + permission to license the work in any other way, but it does not + invalidate such permission if you have separately received it. + + d) If the work has interactive user interfaces, each must display + Appropriate Legal Notices; however, if the Program has interactive + interfaces that do not display Appropriate Legal Notices, your + work need not make them do so. + + A compilation of a covered work with other separate and independent +works, which are not by their nature extensions of the covered work, +and which are not combined with it such as to form a larger program, +in or on a volume of a storage or distribution medium, is called an +"aggregate" if the compilation and its resulting copyright are not +used to limit the access or legal rights of the compilation's users +beyond what the individual works permit. Inclusion of a covered work +in an aggregate does not cause this License to apply to the other +parts of the aggregate. + + 6. Conveying Non-Source Forms. + + You may convey a covered work in object code form under the terms +of sections 4 and 5, provided that you also convey the +machine-readable Corresponding Source under the terms of this License, +in one of these ways: + + a) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by the + Corresponding Source fixed on a durable physical medium + customarily used for software interchange. + + b) Convey the object code in, or embodied in, a physical product + (including a physical distribution medium), accompanied by a + written offer, valid for at least three years and valid for as + long as you offer spare parts or customer support for that product + model, to give anyone who possesses the object code either (1) a + copy of the Corresponding Source for all the software in the + product that is covered by this License, on a durable physical + medium customarily used for software interchange, for a price no + more than your reasonable cost of physically performing this + conveying of source, or (2) access to copy the + Corresponding Source from a network server at no charge. + + c) Convey individual copies of the object code with a copy of the + written offer to provide the Corresponding Source. This + alternative is allowed only occasionally and noncommercially, and + only if you received the object code with such an offer, in accord + with subsection 6b. + + d) Convey the object code by offering access from a designated + place (gratis or for a charge), and offer equivalent access to the + Corresponding Source in the same way through the same place at no + further charge. You need not require recipients to copy the + Corresponding Source along with the object code. If the place to + copy the object code is a network server, the Corresponding Source + may be on a different server (operated by you or a third party) + that supports equivalent copying facilities, provided you maintain + clear directions next to the object code saying where to find the + Corresponding Source. Regardless of what server hosts the + Corresponding Source, you remain obligated to ensure that it is + available for as long as needed to satisfy these requirements. + + e) Convey the object code using peer-to-peer transmission, provided + you inform other peers where the object code and Corresponding + Source of the work are being offered to the general public at no + charge under subsection 6d. + + A separable portion of the object code, whose source code is excluded +from the Corresponding Source as a System Library, need not be +included in conveying the object code work. + + A "User Product" is either (1) a "consumer product", which means any +tangible personal property which is normally used for personal, family, +or household purposes, or (2) anything designed or sold for incorporation +into a dwelling. In determining whether a product is a consumer product, +doubtful cases shall be resolved in favor of coverage. For a particular +product received by a particular user, "normally used" refers to a +typical or common use of that class of product, regardless of the status +of the particular user or of the way in which the particular user +actually uses, or expects or is expected to use, the product. A product +is a consumer product regardless of whether the product has substantial +commercial, industrial or non-consumer uses, unless such uses represent +the only significant mode of use of the product. + + "Installation Information" for a User Product means any methods, +procedures, authorization keys, or other information required to install +and execute modified versions of a covered work in that User Product from +a modified version of its Corresponding Source. The information must +suffice to ensure that the continued functioning of the modified object +code is in no case prevented or interfered with solely because +modification has been made. + + If you convey an object code work under this section in, or with, or +specifically for use in, a User Product, and the conveying occurs as +part of a transaction in which the right of possession and use of the +User Product is transferred to the recipient in perpetuity or for a +fixed term (regardless of how the transaction is characterized), the +Corresponding Source conveyed under this section must be accompanied +by the Installation Information. But this requirement does not apply +if neither you nor any third party retains the ability to install +modified object code on the User Product (for example, the work has +been installed in ROM). + + The requirement to provide Installation Information does not include a +requirement to continue to provide support service, warranty, or updates +for a work that has been modified or installed by the recipient, or for +the User Product in which it has been modified or installed. Access to a +network may be denied when the modification itself materially and +adversely affects the operation of the network or violates the rules and +protocols for communication across the network. + + Corresponding Source conveyed, and Installation Information provided, +in accord with this section must be in a format that is publicly +documented (and with an implementation available to the public in +source code form), and must require no special password or key for +unpacking, reading or copying. + + 7. Additional Terms. + + "Additional permissions" are terms that supplement the terms of this +License by making exceptions from one or more of its conditions. +Additional permissions that are applicable to the entire Program shall +be treated as though they were included in this License, to the extent +that they are valid under applicable law. If additional permissions +apply only to part of the Program, that part may be used separately +under those permissions, but the entire Program remains governed by +this License without regard to the additional permissions. + + When you convey a copy of a covered work, you may at your option +remove any additional permissions from that copy, or from any part of +it. (Additional permissions may be written to require their own +removal in certain cases when you modify the work.) You may place +additional permissions on material, added by you to a covered work, +for which you have or can give appropriate copyright permission. + + Notwithstanding any other provision of this License, for material you +add to a covered work, you may (if authorized by the copyright holders of +that material) supplement the terms of this License with terms: + + a) Disclaiming warranty or limiting liability differently from the + terms of sections 15 and 16 of this License; or + + b) Requiring preservation of specified reasonable legal notices or + author attributions in that material or in the Appropriate Legal + Notices displayed by works containing it; or + + c) Prohibiting misrepresentation of the origin of that material, or + requiring that modified versions of such material be marked in + reasonable ways as different from the original version; or + + d) Limiting the use for publicity purposes of names of licensors or + authors of the material; or + + e) Declining to grant rights under trademark law for use of some + trade names, trademarks, or service marks; or + + f) Requiring indemnification of licensors and authors of that + material by anyone who conveys the material (or modified versions of + it) with contractual assumptions of liability to the recipient, for + any liability that these contractual assumptions directly impose on + those licensors and authors. + + All other non-permissive additional terms are considered "further +restrictions" within the meaning of section 10. If the Program as you +received it, or any part of it, contains a notice stating that it is +governed by this License along with a term that is a further +restriction, you may remove that term. If a license document contains +a further restriction but permits relicensing or conveying under this +License, you may add to a covered work material governed by the terms +of that license document, provided that the further restriction does +not survive such relicensing or conveying. + + If you add terms to a covered work in accord with this section, you +must place, in the relevant source files, a statement of the +additional terms that apply to those files, or a notice indicating +where to find the applicable terms. + + Additional terms, permissive or non-permissive, may be stated in the +form of a separately written license, or stated as exceptions; +the above requirements apply either way. + + 8. Termination. + + You may not propagate or modify a covered work except as expressly +provided under this License. Any attempt otherwise to propagate or +modify it is void, and will automatically terminate your rights under +this License (including any patent licenses granted under the third +paragraph of section 11). + + However, if you cease all violation of this License, then your +license from a particular copyright holder is reinstated (a) +provisionally, unless and until the copyright holder explicitly and +finally terminates your license, and (b) permanently, if the copyright +holder fails to notify you of the violation by some reasonable means +prior to 60 days after the cessation. + + Moreover, your license from a particular copyright holder is +reinstated permanently if the copyright holder notifies you of the +violation by some reasonable means, this is the first time you have +received notice of violation of this License (for any work) from that +copyright holder, and you cure the violation prior to 30 days after +your receipt of the notice. + + Termination of your rights under this section does not terminate the +licenses of parties who have received copies or rights from you under +this License. If your rights have been terminated and not permanently +reinstated, you do not qualify to receive new licenses for the same +material under section 10. + + 9. Acceptance Not Required for Having Copies. + + You are not required to accept this License in order to receive or +run a copy of the Program. Ancillary propagation of a covered work +occurring solely as a consequence of using peer-to-peer transmission +to receive a copy likewise does not require acceptance. However, +nothing other than this License grants you permission to propagate or +modify any covered work. These actions infringe copyright if you do +not accept this License. Therefore, by modifying or propagating a +covered work, you indicate your acceptance of this License to do so. + + 10. Automatic Licensing of Downstream Recipients. + + Each time you convey a covered work, the recipient automatically +receives a license from the original licensors, to run, modify and +propagate that work, subject to this License. You are not responsible +for enforcing compliance by third parties with this License. + + An "entity transaction" is a transaction transferring control of an +organization, or substantially all assets of one, or subdividing an +organization, or merging organizations. If propagation of a covered +work results from an entity transaction, each party to that +transaction who receives a copy of the work also receives whatever +licenses to the work the party's predecessor in interest had or could +give under the previous paragraph, plus a right to possession of the +Corresponding Source of the work from the predecessor in interest, if +the predecessor has it or can get it with reasonable efforts. + + You may not impose any further restrictions on the exercise of the +rights granted or affirmed under this License. For example, you may +not impose a license fee, royalty, or other charge for exercise of +rights granted under this License, and you may not initiate litigation +(including a cross-claim or counterclaim in a lawsuit) alleging that +any patent claim is infringed by making, using, selling, offering for +sale, or importing the Program or any portion of it. + + 11. Patents. + + A "contributor" is a copyright holder who authorizes use under this +License of the Program or a work on which the Program is based. The +work thus licensed is called the contributor's "contributor version". + + A contributor's "essential patent claims" are all patent claims +owned or controlled by the contributor, whether already acquired or +hereafter acquired, that would be infringed by some manner, permitted +by this License, of making, using, or selling its contributor version, +but do not include claims that would be infringed only as a +consequence of further modification of the contributor version. For +purposes of this definition, "control" includes the right to grant +patent sublicenses in a manner consistent with the requirements of +this License. + + Each contributor grants you a non-exclusive, worldwide, royalty-free +patent license under the contributor's essential patent claims, to +make, use, sell, offer for sale, import and otherwise run, modify and +propagate the contents of its contributor version. + + In the following three paragraphs, a "patent license" is any express +agreement or commitment, however denominated, not to enforce a patent +(such as an express permission to practice a patent or covenant not to +sue for patent infringement). To "grant" such a patent license to a +party means to make such an agreement or commitment not to enforce a +patent against the party. + + If you convey a covered work, knowingly relying on a patent license, +and the Corresponding Source of the work is not available for anyone +to copy, free of charge and under the terms of this License, through a +publicly available network server or other readily accessible means, +then you must either (1) cause the Corresponding Source to be so +available, or (2) arrange to deprive yourself of the benefit of the +patent license for this particular work, or (3) arrange, in a manner +consistent with the requirements of this License, to extend the patent +license to downstream recipients. "Knowingly relying" means you have +actual knowledge that, but for the patent license, your conveying the +covered work in a country, or your recipient's use of the covered work +in a country, would infringe one or more identifiable patents in that +country that you have reason to believe are valid. + + If, pursuant to or in connection with a single transaction or +arrangement, you convey, or propagate by procuring conveyance of, a +covered work, and grant a patent license to some of the parties +receiving the covered work authorizing them to use, propagate, modify +or convey a specific copy of the covered work, then the patent license +you grant is automatically extended to all recipients of the covered +work and works based on it. + + A patent license is "discriminatory" if it does not include within +the scope of its coverage, prohibits the exercise of, or is +conditioned on the non-exercise of one or more of the rights that are +specifically granted under this License. You may not convey a covered +work if you are a party to an arrangement with a third party that is +in the business of distributing software, under which you make payment +to the third party based on the extent of your activity of conveying +the work, and under which the third party grants, to any of the +parties who would receive the covered work from you, a discriminatory +patent license (a) in connection with copies of the covered work +conveyed by you (or copies made from those copies), or (b) primarily +for and in connection with specific products or compilations that +contain the covered work, unless you entered into that arrangement, +or that patent license was granted, prior to 28 March 2007. + + Nothing in this License shall be construed as excluding or limiting +any implied license or other defenses to infringement that may +otherwise be available to you under applicable patent law. + + 12. No Surrender of Others' Freedom. + + If conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot convey a +covered work so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you may +not convey it at all. For example, if you agree to terms that obligate you +to collect a royalty for further conveying from those to whom you convey +the Program, the only way you could satisfy both those terms and this +License would be to refrain entirely from conveying the Program. + + 13. Remote Network Interaction; Use with the GNU General Public License. + + Notwithstanding any other provision of this License, if you modify the +Program, your modified version must prominently offer all users +interacting with it remotely through a computer network (if your version +supports such interaction) an opportunity to receive the Corresponding +Source of your version by providing access to the Corresponding Source +from a network server at no charge, through some standard or customary +means of facilitating copying of software. This Corresponding Source +shall include the Corresponding Source for any work covered by version 3 +of the GNU General Public License that is incorporated pursuant to the +following paragraph. + + Notwithstanding any other provision of this License, you have +permission to link or combine any covered work with a work licensed +under version 3 of the GNU General Public License into a single +combined work, and to convey the resulting work. The terms of this +License will continue to apply to the part which is the covered work, +but the work with which it is combined will remain governed by version +3 of the GNU General Public License. + + 14. Revised Versions of this License. + + The Free Software Foundation may publish revised and/or new versions of +the GNU Affero General Public License from time to time. Such new versions +will be similar in spirit to the present version, but may differ in detail to +address new problems or concerns. + + Each version is given a distinguishing version number. If the +Program specifies that a certain numbered version of the GNU Affero General +Public License "or any later version" applies to it, you have the +option of following the terms and conditions either of that numbered +version or of any later version published by the Free Software +Foundation. If the Program does not specify a version number of the +GNU Affero General Public License, you may choose any version ever published +by the Free Software Foundation. + + If the Program specifies that a proxy can decide which future +versions of the GNU Affero General Public License can be used, that proxy's +public statement of acceptance of a version permanently authorizes you +to choose that version for the Program. + + Later license versions may give you additional or different +permissions. However, no additional obligations are imposed on any +author or copyright holder as a result of your choosing to follow a +later version. + + 15. Disclaimer of Warranty. + + THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY +APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT +HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY +OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, +THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM +IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF +ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. Limitation of Liability. + + IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING +WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS +THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY +GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE +USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF +DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD +PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), +EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF +SUCH DAMAGES. + + 17. Interpretation of Sections 15 and 16. + + If the disclaimer of warranty and limitation of liability provided +above cannot be given local legal effect according to their terms, +reviewing courts shall apply local law that most closely approximates +an absolute waiver of all civil liability in connection with the +Program, unless a warranty or assumption of liability accompanies a +copy of the Program in return for a fee. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Programs + + If you develop a new program, and you want it to be of the greatest +possible use to the public, the best way to achieve this is to make it +free software which everyone can redistribute and change under these terms. + + To do so, attach the following notices to the program. It is safest +to attach them to the start of each source file to most effectively +state the exclusion of warranty; and each file should have at least +the "copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This program is free software: you can redistribute it and/or modify + it under the terms of the GNU Affero General Public License as published + by the Free Software Foundation, either version 3 of the License, or + (at your option) any later version. + + This program is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + GNU Affero General Public License for more details. + + You should have received a copy of the GNU Affero General Public License + along with this program. If not, see . + +Also add information on how to contact you by electronic and paper mail. + + If your software can interact with users remotely through a computer +network, you should also make sure that it provides a way for users to +get its source. For example, if your program is a web application, its +interface could display a "Source" link that leads users to an archive +of the code. There are many ways you could offer source, and different +solutions will be better for different programs; see section 13 for the +specific requirements. + + You should also get your employer (if you work as a programmer) or school, +if any, to sign a "copyright disclaimer" for the program, if necessary. +For more information on this, and how to apply and follow the GNU AGPL, see +. \ No newline at end of file diff --git a/README.md b/README.md index 00def78..1efbabf 100644 --- a/README.md +++ b/README.md @@ -1,22 +1,8 @@ -# Helios -> ERC-1155-based Exchange - -LPs are tracked under [multi-token standard](https://eips.ethereum.org/EIPS/eip-1155). - -Swapping is routed from this same contract. Easy peasy. - -Math is handled by external helper contracts called `swapper`. - -Currently, pairs are supported. Can use raw ETH too. - -## To-do -- [X] Core contracts -- [X] XYK swapper -- [ ] Concentrated swapper -- [ ] Incentives - - - +# 𖤓 Helios +> ERC-6909 Singleton Exchange +LPs are tracked under the [minimal multi-token interface](https://eips.ethereum.org/EIPS/eip-6909). +Swapping uses the Uniswap V2 *xyk* curve and some Sushiswap updates to the classic constant product pool. +Such as allowing single-sided LP. Otherwise, Helios also allows for swaps in ERC-1155 and ERC-6909 tokens. \ No newline at end of file diff --git a/foundry.toml b/foundry.toml index 9b09234..3968d9e 100644 --- a/foundry.toml +++ b/foundry.toml @@ -1,12 +1,18 @@ [profile.default] -solc = "0.8.17" -bytecode_hash = "none" -optimizer_runs = 1000000 -via_ir = true +solc_version = "0.8.23" +evm_version = "shanghai" + +optimizer = true +optimizer_runs = 9_999_999 remappings = [ - "@solbase=lib/solbase/src/" + "@solady=lib/solady/", + "@forge=lib/forge-std/src/" ] -[profile.intense.fuzz] -runs = 10000 \ No newline at end of file +[fmt] +line_length = 100 + +[rpc_endpoints] +main = "https://rpc.ankr.com/eth" +opti = "https://rpc.ankr.com/optimism" \ No newline at end of file diff --git a/lib/ds-test b/lib/ds-test deleted file mode 160000 index 9310e87..0000000 --- a/lib/ds-test +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 9310e879db8ba3ea6d5c6489a579118fd264a3f5 diff --git a/lib/forge-std b/lib/forge-std index f36dab2..4513bc2 160000 --- a/lib/forge-std +++ b/lib/forge-std @@ -1 +1 @@ -Subproject commit f36dab24d63d1c1945a05ed375ce341d3c1a49ed +Subproject commit 4513bc2063f23c57bee6558799584b518d387a39 diff --git a/lib/solady b/lib/solady new file mode 160000 index 0000000..7593af5 --- /dev/null +++ b/lib/solady @@ -0,0 +1 @@ +Subproject commit 7593af59974a1bb12ebe10d87b95693c293234ed diff --git a/lib/solbase b/lib/solbase deleted file mode 160000 index 7e98fcb..0000000 --- a/lib/solbase +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 7e98fcbf75679c1f02f2be94462fbadd002a01f7 diff --git a/node_modules/.bin/prettier b/node_modules/.bin/prettier deleted file mode 120000 index a478df3..0000000 --- a/node_modules/.bin/prettier +++ /dev/null @@ -1 +0,0 @@ -../prettier/bin-prettier.js \ No newline at end of file diff --git a/node_modules/.bin/semver b/node_modules/.bin/semver deleted file mode 120000 index 5aaadf4..0000000 --- a/node_modules/.bin/semver +++ /dev/null @@ -1 +0,0 @@ -../semver/bin/semver.js \ No newline at end of file diff --git a/node_modules/.yarn-integrity b/node_modules/.yarn-integrity deleted file mode 100644 index 93dad4c..0000000 --- a/node_modules/.yarn-integrity +++ /dev/null @@ -1,31 +0,0 @@ -{ - "systemParams": "linux-x64-93", - "modulesFolders": [ - "node_modules" - ], - "flags": [], - "linkedModules": [], - "topLevelPatterns": [ - "prettier-plugin-solidity@^1.0.0-beta.13", - "prettier@^2.3.1" - ], - "lockfileEntries": { - "@solidity-parser/parser@^0.14.0": "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.14.1.tgz#179afb29f4e295a77cc141151f26b3848abc3c46", - "ansi-regex@^5.0.1": "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.1.tgz#082cb2c89c9fe8659a311a53bd6a4dc5301db304", - "antlr4ts@^0.5.0-alpha.4": "https://registry.yarnpkg.com/antlr4ts/-/antlr4ts-0.5.0-alpha.4.tgz#71702865a87478ed0b40c0709f422cf14d51652a", - "emoji-regex@^10.0.0": "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-10.0.0.tgz#96559e19f82231b436403e059571241d627c42b8", - "emoji-regex@^8.0.0": "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37", - "escape-string-regexp@^4.0.0": "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz#14ba83a5d373e3d311e5afca29cf5bfad965bf34", - "is-fullwidth-code-point@^3.0.0": "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d", - "lru-cache@^6.0.0": "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94", - "prettier-plugin-solidity@^1.0.0-beta.13": "https://registry.yarnpkg.com/prettier-plugin-solidity/-/prettier-plugin-solidity-1.0.0-beta.19.tgz#7c3607fc4028f5e6a425259ff03e45eedf733df3", - "prettier@^2.3.1": "https://registry.yarnpkg.com/prettier/-/prettier-2.5.1.tgz#fff75fa9d519c54cf0fce328c1017d94546bc56a", - "semver@^7.3.5": "https://registry.yarnpkg.com/semver/-/semver-7.3.5.tgz#0b621c879348d8998e4b0e4be94b3f12e6018ef7", - "solidity-comments-extractor@^0.0.7": "https://registry.yarnpkg.com/solidity-comments-extractor/-/solidity-comments-extractor-0.0.7.tgz#99d8f1361438f84019795d928b931f4e5c39ca19", - "string-width@^4.2.3": "https://registry.yarnpkg.com/string-width/-/string-width-4.2.3.tgz#269c7117d27b05ad2e536830a8ec895ef9c6d010", - "strip-ansi@^6.0.1": "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.1.tgz#9e26c63d30f53443e9489495b2105d37b67a85d9", - "yallist@^4.0.0": "https://registry.yarnpkg.com/yallist/-/yallist-4.0.0.tgz#9bb92790d9c0effec63be73519e11a35019a3a72" - }, - "files": [], - "artifacts": {} -} \ No newline at end of file diff --git a/node_modules/@solidity-parser/parser/LICENSE b/node_modules/@solidity-parser/parser/LICENSE deleted file mode 100644 index 8bff8a1..0000000 --- a/node_modules/@solidity-parser/parser/LICENSE +++ /dev/null @@ -1,21 +0,0 @@ -MIT License - -Copyright (c) 2017-2018 Federico Bond - -Permission is hereby granted, free of charge, to any person obtaining a copy -of this software and associated documentation files (the "Software"), to deal -in the Software without restriction, including without limitation the rights -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell -copies of the Software, and to permit persons to whom the Software is -furnished to do so, subject to the following conditions: - -The above copyright notice and this permission notice shall be included in all -copies or substantial portions of the Software. - -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. diff --git a/node_modules/@solidity-parser/parser/README.md b/node_modules/@solidity-parser/parser/README.md deleted file mode 100644 index fe65fa6..0000000 --- a/node_modules/@solidity-parser/parser/README.md +++ /dev/null @@ -1,142 +0,0 @@ -# Solidity Parser for JavaScript - -[![npm version](https://badge.fury.io/js/%40solidity-parser%2Fparser.svg)](https://badge.fury.io/js/%40solidity-parser%2Fparser) - -A JavaScript package for parsing [Solidity](https://solidity.readthedocs.io/) code using [ANTLR (ANother Tool for Language Recognition)](https://www.antlr.org/) grammar. - -This is a fork of [@federicobond](https://github.com/federicobond)'s original [repo](https://github.com/federicobond/solidity-parser-antlr), -with some extra features taken from [Consensys Diligence's alternative fork](https://github.com/consensys/solidity-parser-antlr). - -## Installation - -The following installation options assume [Node.js](https://nodejs.org/en/download/) has already been installed. - -Using [Node Package Manager (npm)](https://www.npmjs.com/). - -``` -npm install @solidity-parser/parser -``` - -Using [yarn](https://yarnpkg.com/) - -``` -yarn add @solidity-parser/parser -``` - -## Usage - -```javascript -const parser = require('@solidity-parser/parser'); - -const input = ` - contract test { - uint256 a; - function f() {} - } -` -try { - const ast = parser.parse(input) - console.log(ast) -} catch (e) { - if (e instanceof parser.ParserError) { - console.error(e.errors) - } -} -``` - -The `parse` method also accepts a second argument which lets you specify the -following options, in a style similar to the _esprima_ API: - -| Key | Type | Default | Description | -|----------|---------|---------|------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| tolerant | Boolean | false | When set to `true` it will collect syntax errors and place them in a list under the key `errors` inside the root node of the returned AST. Otherwise, it will raise a `parser.ParserError`. | -| loc | Boolean | false | When set to `true`, it will add location information to each node, with start and stop keys that contain the corresponding line and column numbers. Column numbers start from 0, lines start from 1. | -| range | Boolean | false | When set to `true`, it will add range information to each node, which consists of a two-element array with start and stop character indexes in the input. | - - -### Example with location information - -```javascript -parser.parse('contract test { uint a; }', { loc: true }) - -// { type: 'SourceUnit', -// children: -// [ { type: 'ContractDefinition', -// name: 'test', -// baseContracts: [], -// subNodes: [Array], -// kind: 'contract', -// loc: [Object] } ], -// loc: { start: { line: 1, column: 0 }, end: { line: 1, column: 24 } } } - -``` - -### Example using a visitor to walk over the AST - -```javascript -var ast = parser.parse('contract test { uint a; }') - -// output the path of each import found -parser.visit(ast, { - ImportDirective: function(node) { - console.log(node.path) - } -}) -``` - -## Usage in the browser - -A browser-friendly version is available in `dist/index.iife.js` (along with its sourcemaps file) in the published version. - -If you are using webpack, keep in mind that minimizing your bundle will mangle function names, breaking the parser. To fix this you can just set `optimization.minimize` to `false`. - -## Contribution - -This project is dependant on the [@solidity-parser/antlr](https://github.com/solidity-parser/antlr) repository via a git submodule. To clone this repository and the submodule, run - -``` -git clone --recursive -``` - -If you have already cloned this repo, you can load the submodule with - -``` -git submodule update --init -``` - -This project can be linked to a forked `@solidity-parser/antlr` project by editing the url in the [.gitmodules](.gitmodules) file to point to the forked repo and running - -``` -git submodule sync -``` - -The Solidity ANTLR file [Solidity.g4](./antlr/Solidity.g4) can be built with the following. This will also download the ANTLR Java Archive (jar) file to `antlr/antlr4.jar` if it doesn't already exist. The generated ANTLR tokens and JavaScript files are copied the [src](./src) folder. - -``` -yarn run antlr -``` - -The files to be distributed with the npm package are in the `dist` folder and built by running - -``` -yarn run build -``` - -The [mocha](https://mochajs.org/) tests under the [test](./test) folder can be run with the following. This includes parsing the [test.sol](./test/test.sol) Solidity file. - -``` -yarn run test -``` - -## Used by - -* [Hardhat](https://hardhat.org/) -* [sol2uml](https://github.com/naddison36/sol2uml) -* [Solhint](https://github.com/protofire/solhint/) -* [solidity-coverage](https://github.com/sc-forks/solidity-coverage) -* [prettier-solidity](https://github.com/prettier-solidity/prettier-plugin-solidity/) -* [eth-gas-reporter](https://github.com/cgewecke/eth-gas-reporter) - -## License - -[MIT](./LICENSE) diff --git a/node_modules/@solidity-parser/parser/dist/Solidity-JSLPOCIO.tokens b/node_modules/@solidity-parser/parser/dist/Solidity-JSLPOCIO.tokens deleted file mode 100644 index 58e78b7..0000000 --- a/node_modules/@solidity-parser/parser/dist/Solidity-JSLPOCIO.tokens +++ /dev/null @@ -1,249 +0,0 @@ -T__0=1 -T__1=2 -T__2=3 -T__3=4 -T__4=5 -T__5=6 -T__6=7 -T__7=8 -T__8=9 -T__9=10 -T__10=11 -T__11=12 -T__12=13 -T__13=14 -T__14=15 -T__15=16 -T__16=17 -T__17=18 -T__18=19 -T__19=20 -T__20=21 -T__21=22 -T__22=23 -T__23=24 -T__24=25 -T__25=26 -T__26=27 -T__27=28 -T__28=29 -T__29=30 -T__30=31 -T__31=32 -T__32=33 -T__33=34 -T__34=35 -T__35=36 -T__36=37 -T__37=38 -T__38=39 -T__39=40 -T__40=41 -T__41=42 -T__42=43 -T__43=44 -T__44=45 -T__45=46 -T__46=47 -T__47=48 -T__48=49 -T__49=50 -T__50=51 -T__51=52 -T__52=53 -T__53=54 -T__54=55 -T__55=56 -T__56=57 -T__57=58 -T__58=59 -T__59=60 -T__60=61 -T__61=62 -T__62=63 -T__63=64 -T__64=65 -T__65=66 -T__66=67 -T__67=68 -T__68=69 -T__69=70 -T__70=71 -T__71=72 -T__72=73 -T__73=74 -T__74=75 -T__75=76 -T__76=77 -T__77=78 -T__78=79 -T__79=80 -T__80=81 -T__81=82 -T__82=83 -T__83=84 -T__84=85 -T__85=86 -T__86=87 -T__87=88 -T__88=89 -T__89=90 -T__90=91 -T__91=92 -T__92=93 -T__93=94 -T__94=95 -T__95=96 -T__96=97 -Int=98 -Uint=99 -Byte=100 -Fixed=101 -Ufixed=102 -BooleanLiteral=103 -DecimalNumber=104 -HexNumber=105 -NumberUnit=106 -HexLiteralFragment=107 -ReservedKeyword=108 -AnonymousKeyword=109 -BreakKeyword=110 -ConstantKeyword=111 -ImmutableKeyword=112 -ContinueKeyword=113 -LeaveKeyword=114 -ExternalKeyword=115 -IndexedKeyword=116 -InternalKeyword=117 -PayableKeyword=118 -PrivateKeyword=119 -PublicKeyword=120 -VirtualKeyword=121 -PureKeyword=122 -TypeKeyword=123 -ViewKeyword=124 -ConstructorKeyword=125 -FallbackKeyword=126 -ReceiveKeyword=127 -Identifier=128 -StringLiteralFragment=129 -VersionLiteral=130 -WS=131 -COMMENT=132 -LINE_COMMENT=133 -'pragma'=1 -';'=2 -'||'=3 -'^'=4 -'~'=5 -'>='=6 -'>'=7 -'<'=8 -'<='=9 -'='=10 -'as'=11 -'import'=12 -'*'=13 -'from'=14 -'{'=15 -','=16 -'}'=17 -'abstract'=18 -'contract'=19 -'interface'=20 -'library'=21 -'is'=22 -'('=23 -')'=24 -'error'=25 -'using'=26 -'for'=27 -'struct'=28 -'modifier'=29 -'function'=30 -'returns'=31 -'event'=32 -'enum'=33 -'['=34 -']'=35 -'address'=36 -'.'=37 -'mapping'=38 -'=>'=39 -'memory'=40 -'storage'=41 -'calldata'=42 -'if'=43 -'else'=44 -'try'=45 -'catch'=46 -'while'=47 -'unchecked'=48 -'assembly'=49 -'do'=50 -'return'=51 -'throw'=52 -'emit'=53 -'revert'=54 -'var'=55 -'bool'=56 -'string'=57 -'byte'=58 -'++'=59 -'--'=60 -'new'=61 -':'=62 -'+'=63 -'-'=64 -'after'=65 -'delete'=66 -'!'=67 -'**'=68 -'/'=69 -'%'=70 -'<<'=71 -'>>'=72 -'&'=73 -'|'=74 -'=='=75 -'!='=76 -'&&'=77 -'?'=78 -'|='=79 -'^='=80 -'&='=81 -'<<='=82 -'>>='=83 -'+='=84 -'-='=85 -'*='=86 -'/='=87 -'%='=88 -'let'=89 -':='=90 -'=:'=91 -'switch'=92 -'case'=93 -'default'=94 -'->'=95 -'callback'=96 -'override'=97 -'anonymous'=109 -'break'=110 -'constant'=111 -'immutable'=112 -'continue'=113 -'leave'=114 -'external'=115 -'indexed'=116 -'internal'=117 -'payable'=118 -'private'=119 -'public'=120 -'virtual'=121 -'pure'=122 -'type'=123 -'view'=124 -'constructor'=125 -'fallback'=126 -'receive'=127 diff --git a/node_modules/@solidity-parser/parser/dist/Solidity-WDNIKDDG.tokens b/node_modules/@solidity-parser/parser/dist/Solidity-WDNIKDDG.tokens deleted file mode 100644 index 12aae3c..0000000 --- a/node_modules/@solidity-parser/parser/dist/Solidity-WDNIKDDG.tokens +++ /dev/null @@ -1,245 +0,0 @@ -T__0=1 -T__1=2 -T__2=3 -T__3=4 -T__4=5 -T__5=6 -T__6=7 -T__7=8 -T__8=9 -T__9=10 -T__10=11 -T__11=12 -T__12=13 -T__13=14 -T__14=15 -T__15=16 -T__16=17 -T__17=18 -T__18=19 -T__19=20 -T__20=21 -T__21=22 -T__22=23 -T__23=24 -T__24=25 -T__25=26 -T__26=27 -T__27=28 -T__28=29 -T__29=30 -T__30=31 -T__31=32 -T__32=33 -T__33=34 -T__34=35 -T__35=36 -T__36=37 -T__37=38 -T__38=39 -T__39=40 -T__40=41 -T__41=42 -T__42=43 -T__43=44 -T__44=45 -T__45=46 -T__46=47 -T__47=48 -T__48=49 -T__49=50 -T__50=51 -T__51=52 -T__52=53 -T__53=54 -T__54=55 -T__55=56 -T__56=57 -T__57=58 -T__58=59 -T__59=60 -T__60=61 -T__61=62 -T__62=63 -T__63=64 -T__64=65 -T__65=66 -T__66=67 -T__67=68 -T__68=69 -T__69=70 -T__70=71 -T__71=72 -T__72=73 -T__73=74 -T__74=75 -T__75=76 -T__76=77 -T__77=78 -T__78=79 -T__79=80 -T__80=81 -T__81=82 -T__82=83 -T__83=84 -T__84=85 -T__85=86 -T__86=87 -T__87=88 -T__88=89 -T__89=90 -T__90=91 -T__91=92 -T__92=93 -T__93=94 -T__94=95 -Int=96 -Uint=97 -Byte=98 -Fixed=99 -Ufixed=100 -BooleanLiteral=101 -DecimalNumber=102 -HexNumber=103 -NumberUnit=104 -HexLiteralFragment=105 -ReservedKeyword=106 -AnonymousKeyword=107 -BreakKeyword=108 -ConstantKeyword=109 -ImmutableKeyword=110 -ContinueKeyword=111 -LeaveKeyword=112 -ExternalKeyword=113 -IndexedKeyword=114 -InternalKeyword=115 -PayableKeyword=116 -PrivateKeyword=117 -PublicKeyword=118 -VirtualKeyword=119 -PureKeyword=120 -TypeKeyword=121 -ViewKeyword=122 -ConstructorKeyword=123 -FallbackKeyword=124 -ReceiveKeyword=125 -Identifier=126 -StringLiteralFragment=127 -VersionLiteral=128 -WS=129 -COMMENT=130 -LINE_COMMENT=131 -'pragma'=1 -';'=2 -'||'=3 -'^'=4 -'~'=5 -'>='=6 -'>'=7 -'<'=8 -'<='=9 -'='=10 -'as'=11 -'import'=12 -'*'=13 -'from'=14 -'{'=15 -','=16 -'}'=17 -'abstract'=18 -'contract'=19 -'interface'=20 -'library'=21 -'is'=22 -'('=23 -')'=24 -'using'=25 -'for'=26 -'struct'=27 -'modifier'=28 -'function'=29 -'returns'=30 -'event'=31 -'enum'=32 -'['=33 -']'=34 -'address'=35 -'.'=36 -'mapping'=37 -'=>'=38 -'memory'=39 -'storage'=40 -'calldata'=41 -'if'=42 -'else'=43 -'try'=44 -'catch'=45 -'while'=46 -'unchecked'=47 -'assembly'=48 -'do'=49 -'return'=50 -'throw'=51 -'emit'=52 -'var'=53 -'bool'=54 -'string'=55 -'byte'=56 -'++'=57 -'--'=58 -'new'=59 -':'=60 -'+'=61 -'-'=62 -'after'=63 -'delete'=64 -'!'=65 -'**'=66 -'/'=67 -'%'=68 -'<<'=69 -'>>'=70 -'&'=71 -'|'=72 -'=='=73 -'!='=74 -'&&'=75 -'?'=76 -'|='=77 -'^='=78 -'&='=79 -'<<='=80 -'>>='=81 -'+='=82 -'-='=83 -'*='=84 -'/='=85 -'%='=86 -'let'=87 -':='=88 -'=:'=89 -'switch'=90 -'case'=91 -'default'=92 -'->'=93 -'callback'=94 -'override'=95 -'anonymous'=107 -'break'=108 -'constant'=109 -'immutable'=110 -'continue'=111 -'leave'=112 -'external'=113 -'indexed'=114 -'internal'=115 -'payable'=116 -'private'=117 -'public'=118 -'virtual'=119 -'pure'=120 -'type'=121 -'view'=122 -'constructor'=123 -'fallback'=124 -'receive'=125 diff --git a/node_modules/@solidity-parser/parser/dist/antlr/Solidity.tokens b/node_modules/@solidity-parser/parser/dist/antlr/Solidity.tokens deleted file mode 100644 index 58e78b7..0000000 --- a/node_modules/@solidity-parser/parser/dist/antlr/Solidity.tokens +++ /dev/null @@ -1,249 +0,0 @@ -T__0=1 -T__1=2 -T__2=3 -T__3=4 -T__4=5 -T__5=6 -T__6=7 -T__7=8 -T__8=9 -T__9=10 -T__10=11 -T__11=12 -T__12=13 -T__13=14 -T__14=15 -T__15=16 -T__16=17 -T__17=18 -T__18=19 -T__19=20 -T__20=21 -T__21=22 -T__22=23 -T__23=24 -T__24=25 -T__25=26 -T__26=27 -T__27=28 -T__28=29 -T__29=30 -T__30=31 -T__31=32 -T__32=33 -T__33=34 -T__34=35 -T__35=36 -T__36=37 -T__37=38 -T__38=39 -T__39=40 -T__40=41 -T__41=42 -T__42=43 -T__43=44 -T__44=45 -T__45=46 -T__46=47 -T__47=48 -T__48=49 -T__49=50 -T__50=51 -T__51=52 -T__52=53 -T__53=54 -T__54=55 -T__55=56 -T__56=57 -T__57=58 -T__58=59 -T__59=60 -T__60=61 -T__61=62 -T__62=63 -T__63=64 -T__64=65 -T__65=66 -T__66=67 -T__67=68 -T__68=69 -T__69=70 -T__70=71 -T__71=72 -T__72=73 -T__73=74 -T__74=75 -T__75=76 -T__76=77 -T__77=78 -T__78=79 -T__79=80 -T__80=81 -T__81=82 -T__82=83 -T__83=84 -T__84=85 -T__85=86 -T__86=87 -T__87=88 -T__88=89 -T__89=90 -T__90=91 -T__91=92 -T__92=93 -T__93=94 -T__94=95 -T__95=96 -T__96=97 -Int=98 -Uint=99 -Byte=100 -Fixed=101 -Ufixed=102 -BooleanLiteral=103 -DecimalNumber=104 -HexNumber=105 -NumberUnit=106 -HexLiteralFragment=107 -ReservedKeyword=108 -AnonymousKeyword=109 -BreakKeyword=110 -ConstantKeyword=111 -ImmutableKeyword=112 -ContinueKeyword=113 -LeaveKeyword=114 -ExternalKeyword=115 -IndexedKeyword=116 -InternalKeyword=117 -PayableKeyword=118 -PrivateKeyword=119 -PublicKeyword=120 -VirtualKeyword=121 -PureKeyword=122 -TypeKeyword=123 -ViewKeyword=124 -ConstructorKeyword=125 -FallbackKeyword=126 -ReceiveKeyword=127 -Identifier=128 -StringLiteralFragment=129 -VersionLiteral=130 -WS=131 -COMMENT=132 -LINE_COMMENT=133 -'pragma'=1 -';'=2 -'||'=3 -'^'=4 -'~'=5 -'>='=6 -'>'=7 -'<'=8 -'<='=9 -'='=10 -'as'=11 -'import'=12 -'*'=13 -'from'=14 -'{'=15 -','=16 -'}'=17 -'abstract'=18 -'contract'=19 -'interface'=20 -'library'=21 -'is'=22 -'('=23 -')'=24 -'error'=25 -'using'=26 -'for'=27 -'struct'=28 -'modifier'=29 -'function'=30 -'returns'=31 -'event'=32 -'enum'=33 -'['=34 -']'=35 -'address'=36 -'.'=37 -'mapping'=38 -'=>'=39 -'memory'=40 -'storage'=41 -'calldata'=42 -'if'=43 -'else'=44 -'try'=45 -'catch'=46 -'while'=47 -'unchecked'=48 -'assembly'=49 -'do'=50 -'return'=51 -'throw'=52 -'emit'=53 -'revert'=54 -'var'=55 -'bool'=56 -'string'=57 -'byte'=58 -'++'=59 -'--'=60 -'new'=61 -':'=62 -'+'=63 -'-'=64 -'after'=65 -'delete'=66 -'!'=67 -'**'=68 -'/'=69 -'%'=70 -'<<'=71 -'>>'=72 -'&'=73 -'|'=74 -'=='=75 -'!='=76 -'&&'=77 -'?'=78 -'|='=79 -'^='=80 -'&='=81 -'<<='=82 -'>>='=83 -'+='=84 -'-='=85 -'*='=86 -'/='=87 -'%='=88 -'let'=89 -':='=90 -'=:'=91 -'switch'=92 -'case'=93 -'default'=94 -'->'=95 -'callback'=96 -'override'=97 -'anonymous'=109 -'break'=110 -'constant'=111 -'immutable'=112 -'continue'=113 -'leave'=114 -'external'=115 -'indexed'=116 -'internal'=117 -'payable'=118 -'private'=119 -'public'=120 -'virtual'=121 -'pure'=122 -'type'=123 -'view'=124 -'constructor'=125 -'fallback'=126 -'receive'=127 diff --git a/node_modules/@solidity-parser/parser/dist/antlr/SolidityLexer.tokens b/node_modules/@solidity-parser/parser/dist/antlr/SolidityLexer.tokens deleted file mode 100644 index 58e78b7..0000000 --- a/node_modules/@solidity-parser/parser/dist/antlr/SolidityLexer.tokens +++ /dev/null @@ -1,249 +0,0 @@ -T__0=1 -T__1=2 -T__2=3 -T__3=4 -T__4=5 -T__5=6 -T__6=7 -T__7=8 -T__8=9 -T__9=10 -T__10=11 -T__11=12 -T__12=13 -T__13=14 -T__14=15 -T__15=16 -T__16=17 -T__17=18 -T__18=19 -T__19=20 -T__20=21 -T__21=22 -T__22=23 -T__23=24 -T__24=25 -T__25=26 -T__26=27 -T__27=28 -T__28=29 -T__29=30 -T__30=31 -T__31=32 -T__32=33 -T__33=34 -T__34=35 -T__35=36 -T__36=37 -T__37=38 -T__38=39 -T__39=40 -T__40=41 -T__41=42 -T__42=43 -T__43=44 -T__44=45 -T__45=46 -T__46=47 -T__47=48 -T__48=49 -T__49=50 -T__50=51 -T__51=52 -T__52=53 -T__53=54 -T__54=55 -T__55=56 -T__56=57 -T__57=58 -T__58=59 -T__59=60 -T__60=61 -T__61=62 -T__62=63 -T__63=64 -T__64=65 -T__65=66 -T__66=67 -T__67=68 -T__68=69 -T__69=70 -T__70=71 -T__71=72 -T__72=73 -T__73=74 -T__74=75 -T__75=76 -T__76=77 -T__77=78 -T__78=79 -T__79=80 -T__80=81 -T__81=82 -T__82=83 -T__83=84 -T__84=85 -T__85=86 -T__86=87 -T__87=88 -T__88=89 -T__89=90 -T__90=91 -T__91=92 -T__92=93 -T__93=94 -T__94=95 -T__95=96 -T__96=97 -Int=98 -Uint=99 -Byte=100 -Fixed=101 -Ufixed=102 -BooleanLiteral=103 -DecimalNumber=104 -HexNumber=105 -NumberUnit=106 -HexLiteralFragment=107 -ReservedKeyword=108 -AnonymousKeyword=109 -BreakKeyword=110 -ConstantKeyword=111 -ImmutableKeyword=112 -ContinueKeyword=113 -LeaveKeyword=114 -ExternalKeyword=115 -IndexedKeyword=116 -InternalKeyword=117 -PayableKeyword=118 -PrivateKeyword=119 -PublicKeyword=120 -VirtualKeyword=121 -PureKeyword=122 -TypeKeyword=123 -ViewKeyword=124 -ConstructorKeyword=125 -FallbackKeyword=126 -ReceiveKeyword=127 -Identifier=128 -StringLiteralFragment=129 -VersionLiteral=130 -WS=131 -COMMENT=132 -LINE_COMMENT=133 -'pragma'=1 -';'=2 -'||'=3 -'^'=4 -'~'=5 -'>='=6 -'>'=7 -'<'=8 -'<='=9 -'='=10 -'as'=11 -'import'=12 -'*'=13 -'from'=14 -'{'=15 -','=16 -'}'=17 -'abstract'=18 -'contract'=19 -'interface'=20 -'library'=21 -'is'=22 -'('=23 -')'=24 -'error'=25 -'using'=26 -'for'=27 -'struct'=28 -'modifier'=29 -'function'=30 -'returns'=31 -'event'=32 -'enum'=33 -'['=34 -']'=35 -'address'=36 -'.'=37 -'mapping'=38 -'=>'=39 -'memory'=40 -'storage'=41 -'calldata'=42 -'if'=43 -'else'=44 -'try'=45 -'catch'=46 -'while'=47 -'unchecked'=48 -'assembly'=49 -'do'=50 -'return'=51 -'throw'=52 -'emit'=53 -'revert'=54 -'var'=55 -'bool'=56 -'string'=57 -'byte'=58 -'++'=59 -'--'=60 -'new'=61 -':'=62 -'+'=63 -'-'=64 -'after'=65 -'delete'=66 -'!'=67 -'**'=68 -'/'=69 -'%'=70 -'<<'=71 -'>>'=72 -'&'=73 -'|'=74 -'=='=75 -'!='=76 -'&&'=77 -'?'=78 -'|='=79 -'^='=80 -'&='=81 -'<<='=82 -'>>='=83 -'+='=84 -'-='=85 -'*='=86 -'/='=87 -'%='=88 -'let'=89 -':='=90 -'=:'=91 -'switch'=92 -'case'=93 -'default'=94 -'->'=95 -'callback'=96 -'override'=97 -'anonymous'=109 -'break'=110 -'constant'=111 -'immutable'=112 -'continue'=113 -'leave'=114 -'external'=115 -'indexed'=116 -'internal'=117 -'payable'=118 -'private'=119 -'public'=120 -'virtual'=121 -'pure'=122 -'type'=123 -'view'=124 -'constructor'=125 -'fallback'=126 -'receive'=127 diff --git a/node_modules/@solidity-parser/parser/dist/index.cjs.js b/node_modules/@solidity-parser/parser/dist/index.cjs.js deleted file mode 100644 index f5fdbf9..0000000 --- a/node_modules/@solidity-parser/parser/dist/index.cjs.js +++ /dev/null @@ -1,36940 +0,0 @@ -var __create = Object.create; -var __defProp = Object.defineProperty; -var __getProtoOf = Object.getPrototypeOf; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropSymbols = Object.getOwnPropertySymbols; -var __propIsEnum = Object.prototype.propertyIsEnumerable; -var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, {enumerable: true, configurable: true, writable: true, value}) : obj[key] = value; -var __objSpread = (a, b) => { - for (var prop in b || (b = {})) - if (__hasOwnProp.call(b, prop)) - __defNormalProp(a, prop, b[prop]); - if (__getOwnPropSymbols) - for (var prop of __getOwnPropSymbols(b)) { - if (__propIsEnum.call(b, prop)) - __defNormalProp(a, prop, b[prop]); - } - return a; -}; -var __markAsModule = (target) => __defProp(target, "__esModule", {value: true}); -var __commonJS = (cb, mod) => () => (mod || cb((mod = {exports: {}}).exports, mod), mod.exports); -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, {get: all[name], enumerable: true}); -}; -var __reExport = (target, module2, desc) => { - if (module2 && typeof module2 === "object" || typeof module2 === "function") { - for (let key of __getOwnPropNames(module2)) - if (!__hasOwnProp.call(target, key) && key !== "default") - __defProp(target, key, {get: () => module2[key], enumerable: !(desc = __getOwnPropDesc(module2, key)) || desc.enumerable}); - } - return target; -}; -var __toModule = (module2) => { - return __reExport(__markAsModule(__defProp(module2 != null ? __create(__getProtoOf(module2)) : {}, "default", module2 && module2.__esModule && "default" in module2 ? {get: () => module2.default, enumerable: true} : {value: module2, enumerable: true})), module2); -}; - -// node_modules/antlr4ts/ANTLRErrorListener.js -var require_ANTLRErrorListener = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); -}); - -// node_modules/antlr4ts/ANTLRErrorStrategy.js -var require_ANTLRErrorStrategy = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); -}); - -// node_modules/antlr4ts/Decorators.js -var require_Decorators = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.SuppressWarnings = exports.Override = exports.Nullable = exports.NotNull = void 0; - function NotNull(target, propertyKey, propertyDescriptor) { - } - exports.NotNull = NotNull; - function Nullable(target, propertyKey, propertyDescriptor) { - } - exports.Nullable = Nullable; - function Override(target, propertyKey, propertyDescriptor) { - } - exports.Override = Override; - function SuppressWarnings(options) { - return (target, propertyKey, descriptor) => { - }; - } - exports.SuppressWarnings = SuppressWarnings; -}); - -// node_modules/antlr4ts/IntStream.js -var require_IntStream = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.IntStream = void 0; - var IntStream; - (function(IntStream2) { - IntStream2.EOF = -1; - IntStream2.UNKNOWN_SOURCE_NAME = ""; - })(IntStream = exports.IntStream || (exports.IntStream = {})); -}); - -// node_modules/antlr4ts/ANTLRInputStream.js -var require_ANTLRInputStream = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ANTLRInputStream = void 0; - var assert = require("assert"); - var Decorators_1 = require_Decorators(); - var IntStream_1 = require_IntStream(); - var ANTLRInputStream2 = class { - constructor(input) { - this.p = 0; - this.data = input; - this.n = input.length; - } - reset() { - this.p = 0; - } - consume() { - if (this.p >= this.n) { - assert(this.LA(1) === IntStream_1.IntStream.EOF); - throw new Error("cannot consume EOF"); - } - if (this.p < this.n) { - this.p++; - } - } - LA(i) { - if (i === 0) { - return 0; - } - if (i < 0) { - i++; - if (this.p + i - 1 < 0) { - return IntStream_1.IntStream.EOF; - } - } - if (this.p + i - 1 >= this.n) { - return IntStream_1.IntStream.EOF; - } - return this.data.charCodeAt(this.p + i - 1); - } - LT(i) { - return this.LA(i); - } - get index() { - return this.p; - } - get size() { - return this.n; - } - mark() { - return -1; - } - release(marker) { - } - seek(index) { - if (index <= this.p) { - this.p = index; - return; - } - index = Math.min(index, this.n); - while (this.p < index) { - this.consume(); - } - } - getText(interval) { - let start = interval.a; - let stop = interval.b; - if (stop >= this.n) { - stop = this.n - 1; - } - let count = stop - start + 1; - if (start >= this.n) { - return ""; - } - return this.data.substr(start, count); - } - get sourceName() { - if (!this.name) { - return IntStream_1.IntStream.UNKNOWN_SOURCE_NAME; - } - return this.name; - } - toString() { - return this.data; - } - }; - __decorate([ - Decorators_1.Override - ], ANTLRInputStream2.prototype, "consume", null); - __decorate([ - Decorators_1.Override - ], ANTLRInputStream2.prototype, "LA", null); - __decorate([ - Decorators_1.Override - ], ANTLRInputStream2.prototype, "index", null); - __decorate([ - Decorators_1.Override - ], ANTLRInputStream2.prototype, "size", null); - __decorate([ - Decorators_1.Override - ], ANTLRInputStream2.prototype, "mark", null); - __decorate([ - Decorators_1.Override - ], ANTLRInputStream2.prototype, "release", null); - __decorate([ - Decorators_1.Override - ], ANTLRInputStream2.prototype, "seek", null); - __decorate([ - Decorators_1.Override - ], ANTLRInputStream2.prototype, "getText", null); - __decorate([ - Decorators_1.Override - ], ANTLRInputStream2.prototype, "sourceName", null); - __decorate([ - Decorators_1.Override - ], ANTLRInputStream2.prototype, "toString", null); - exports.ANTLRInputStream = ANTLRInputStream2; -}); - -// node_modules/antlr4ts/atn/ATNState.js -var require_ATNState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ATNState = void 0; - var Decorators_1 = require_Decorators(); - var ATNState = class { - constructor() { - this.stateNumber = ATNState.INVALID_STATE_NUMBER; - this.ruleIndex = 0; - this.epsilonOnlyTransitions = false; - this.transitions = []; - this.optimizedTransitions = this.transitions; - } - getStateNumber() { - return this.stateNumber; - } - get nonStopStateNumber() { - return this.getStateNumber(); - } - hashCode() { - return this.stateNumber; - } - equals(o) { - if (o instanceof ATNState) { - return this.stateNumber === o.stateNumber; - } - return false; - } - get isNonGreedyExitState() { - return false; - } - toString() { - return String(this.stateNumber); - } - getTransitions() { - return this.transitions.slice(0); - } - get numberOfTransitions() { - return this.transitions.length; - } - addTransition(e, index) { - if (this.transitions.length === 0) { - this.epsilonOnlyTransitions = e.isEpsilon; - } else if (this.epsilonOnlyTransitions !== e.isEpsilon) { - this.epsilonOnlyTransitions = false; - throw new Error("ATN state " + this.stateNumber + " has both epsilon and non-epsilon transitions."); - } - this.transitions.splice(index !== void 0 ? index : this.transitions.length, 0, e); - } - transition(i) { - return this.transitions[i]; - } - setTransition(i, e) { - this.transitions[i] = e; - } - removeTransition(index) { - return this.transitions.splice(index, 1)[0]; - } - get onlyHasEpsilonTransitions() { - return this.epsilonOnlyTransitions; - } - setRuleIndex(ruleIndex) { - this.ruleIndex = ruleIndex; - } - get isOptimized() { - return this.optimizedTransitions !== this.transitions; - } - get numberOfOptimizedTransitions() { - return this.optimizedTransitions.length; - } - getOptimizedTransition(i) { - return this.optimizedTransitions[i]; - } - addOptimizedTransition(e) { - if (!this.isOptimized) { - this.optimizedTransitions = new Array(); - } - this.optimizedTransitions.push(e); - } - setOptimizedTransition(i, e) { - if (!this.isOptimized) { - throw new Error("This ATNState is not optimized."); - } - this.optimizedTransitions[i] = e; - } - removeOptimizedTransition(i) { - if (!this.isOptimized) { - throw new Error("This ATNState is not optimized."); - } - this.optimizedTransitions.splice(i, 1); - } - }; - __decorate([ - Decorators_1.Override - ], ATNState.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], ATNState.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], ATNState.prototype, "toString", null); - exports.ATNState = ATNState; - (function(ATNState2) { - ATNState2.INVALID_STATE_NUMBER = -1; - })(ATNState = exports.ATNState || (exports.ATNState = {})); -}); - -// node_modules/antlr4ts/atn/ATNStateType.js -var require_ATNStateType = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ATNStateType = void 0; - var ATNStateType; - (function(ATNStateType2) { - ATNStateType2[ATNStateType2["INVALID_TYPE"] = 0] = "INVALID_TYPE"; - ATNStateType2[ATNStateType2["BASIC"] = 1] = "BASIC"; - ATNStateType2[ATNStateType2["RULE_START"] = 2] = "RULE_START"; - ATNStateType2[ATNStateType2["BLOCK_START"] = 3] = "BLOCK_START"; - ATNStateType2[ATNStateType2["PLUS_BLOCK_START"] = 4] = "PLUS_BLOCK_START"; - ATNStateType2[ATNStateType2["STAR_BLOCK_START"] = 5] = "STAR_BLOCK_START"; - ATNStateType2[ATNStateType2["TOKEN_START"] = 6] = "TOKEN_START"; - ATNStateType2[ATNStateType2["RULE_STOP"] = 7] = "RULE_STOP"; - ATNStateType2[ATNStateType2["BLOCK_END"] = 8] = "BLOCK_END"; - ATNStateType2[ATNStateType2["STAR_LOOP_BACK"] = 9] = "STAR_LOOP_BACK"; - ATNStateType2[ATNStateType2["STAR_LOOP_ENTRY"] = 10] = "STAR_LOOP_ENTRY"; - ATNStateType2[ATNStateType2["PLUS_LOOP_BACK"] = 11] = "PLUS_LOOP_BACK"; - ATNStateType2[ATNStateType2["LOOP_END"] = 12] = "LOOP_END"; - })(ATNStateType = exports.ATNStateType || (exports.ATNStateType = {})); -}); - -// node_modules/antlr4ts/RecognitionException.js -var require_RecognitionException = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RecognitionException = void 0; - var RecognitionException2 = class extends Error { - constructor(recognizer, input, ctx, message) { - super(message); - this._offendingState = -1; - this._recognizer = recognizer; - this.input = input; - this.ctx = ctx; - if (recognizer) { - this._offendingState = recognizer.state; - } - } - get offendingState() { - return this._offendingState; - } - setOffendingState(offendingState) { - this._offendingState = offendingState; - } - get expectedTokens() { - if (this._recognizer) { - return this._recognizer.atn.getExpectedTokens(this._offendingState, this.ctx); - } - return void 0; - } - get context() { - return this.ctx; - } - get inputStream() { - return this.input; - } - getOffendingToken(recognizer) { - if (recognizer && recognizer !== this._recognizer) { - return void 0; - } - return this.offendingToken; - } - setOffendingToken(recognizer, offendingToken) { - if (recognizer === this._recognizer) { - this.offendingToken = offendingToken; - } - } - get recognizer() { - return this._recognizer; - } - }; - exports.RecognitionException = RecognitionException2; -}); - -// node_modules/antlr4ts/atn/Transition.js -var require_Transition = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Transition = void 0; - var Decorators_1 = require_Decorators(); - var Transition = class Transition { - constructor(target) { - if (target == null) { - throw new Error("target cannot be null."); - } - this.target = target; - } - get isEpsilon() { - return false; - } - get label() { - return void 0; - } - }; - Transition.serializationNames = [ - "INVALID", - "EPSILON", - "RANGE", - "RULE", - "PREDICATE", - "ATOM", - "ACTION", - "SET", - "NOT_SET", - "WILDCARD", - "PRECEDENCE" - ]; - __decorate([ - Decorators_1.NotNull - ], Transition.prototype, "target", void 0); - Transition = __decorate([ - __param(0, Decorators_1.NotNull) - ], Transition); - exports.Transition = Transition; -}); - -// node_modules/antlr4ts/atn/AbstractPredicateTransition.js -var require_AbstractPredicateTransition = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.AbstractPredicateTransition = void 0; - var Transition_1 = require_Transition(); - var AbstractPredicateTransition = class extends Transition_1.Transition { - constructor(target) { - super(target); - } - }; - exports.AbstractPredicateTransition = AbstractPredicateTransition; -}); - -// node_modules/antlr4ts/misc/MurmurHash.js -var require_MurmurHash = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.MurmurHash = void 0; - var MurmurHash; - (function(MurmurHash2) { - const DEFAULT_SEED = 0; - function initialize(seed = DEFAULT_SEED) { - return seed; - } - MurmurHash2.initialize = initialize; - function update(hash, value) { - const c1 = 3432918353; - const c2 = 461845907; - const r1 = 15; - const r2 = 13; - const m = 5; - const n = 3864292196; - if (value == null) { - value = 0; - } else if (typeof value === "string") { - value = hashString(value); - } else if (typeof value === "object") { - value = value.hashCode(); - } - let k = value; - k = Math.imul(k, c1); - k = k << r1 | k >>> 32 - r1; - k = Math.imul(k, c2); - hash = hash ^ k; - hash = hash << r2 | hash >>> 32 - r2; - hash = Math.imul(hash, m) + n; - return hash & 4294967295; - } - MurmurHash2.update = update; - function finish(hash, numberOfWords) { - hash = hash ^ numberOfWords * 4; - hash = hash ^ hash >>> 16; - hash = Math.imul(hash, 2246822507); - hash = hash ^ hash >>> 13; - hash = Math.imul(hash, 3266489909); - hash = hash ^ hash >>> 16; - return hash; - } - MurmurHash2.finish = finish; - function hashCode(data, seed = DEFAULT_SEED) { - let hash = initialize(seed); - let length = 0; - for (let value of data) { - hash = update(hash, value); - length++; - } - hash = finish(hash, length); - return hash; - } - MurmurHash2.hashCode = hashCode; - function hashString(str) { - let len = str.length; - if (len === 0) { - return 0; - } - let hash = 0; - for (let i = 0; i < len; i++) { - let c = str.charCodeAt(i); - hash = (hash << 5 >>> 0) - hash + c; - hash |= 0; - } - return hash; - } - })(MurmurHash = exports.MurmurHash || (exports.MurmurHash = {})); -}); - -// node_modules/antlr4ts/misc/ObjectEqualityComparator.js -var require_ObjectEqualityComparator = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ObjectEqualityComparator = void 0; - var Decorators_1 = require_Decorators(); - var ObjectEqualityComparator = class { - hashCode(obj) { - if (obj == null) { - return 0; - } - return obj.hashCode(); - } - equals(a, b) { - if (a == null) { - return b == null; - } - return a.equals(b); - } - }; - ObjectEqualityComparator.INSTANCE = new ObjectEqualityComparator(); - __decorate([ - Decorators_1.Override - ], ObjectEqualityComparator.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], ObjectEqualityComparator.prototype, "equals", null); - exports.ObjectEqualityComparator = ObjectEqualityComparator; -}); - -// node_modules/antlr4ts/misc/DefaultEqualityComparator.js -var require_DefaultEqualityComparator = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.DefaultEqualityComparator = void 0; - var Decorators_1 = require_Decorators(); - var MurmurHash_1 = require_MurmurHash(); - var ObjectEqualityComparator_1 = require_ObjectEqualityComparator(); - var DefaultEqualityComparator = class { - hashCode(obj) { - if (obj == null) { - return 0; - } else if (typeof obj === "string" || typeof obj === "number") { - return MurmurHash_1.MurmurHash.hashCode([obj]); - } else { - return ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE.hashCode(obj); - } - } - equals(a, b) { - if (a == null) { - return b == null; - } else if (typeof a === "string" || typeof a === "number") { - return a === b; - } else { - return ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE.equals(a, b); - } - } - }; - DefaultEqualityComparator.INSTANCE = new DefaultEqualityComparator(); - __decorate([ - Decorators_1.Override - ], DefaultEqualityComparator.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], DefaultEqualityComparator.prototype, "equals", null); - exports.DefaultEqualityComparator = DefaultEqualityComparator; -}); - -// node_modules/antlr4ts/misc/Array2DHashSet.js -var require_Array2DHashSet = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Array2DHashSet = void 0; - var assert = require("assert"); - var DefaultEqualityComparator_1 = require_DefaultEqualityComparator(); - var Decorators_1 = require_Decorators(); - var MurmurHash_1 = require_MurmurHash(); - var INITAL_CAPACITY = 16; - var LOAD_FACTOR = 0.75; - var Array2DHashSet = class { - constructor(comparatorOrSet, initialCapacity = INITAL_CAPACITY) { - this.n = 0; - this.threshold = Math.floor(INITAL_CAPACITY * LOAD_FACTOR); - if (comparatorOrSet instanceof Array2DHashSet) { - this.comparator = comparatorOrSet.comparator; - this.buckets = comparatorOrSet.buckets.slice(0); - for (let i = 0; i < this.buckets.length; i++) { - let bucket = this.buckets[i]; - if (bucket) { - this.buckets[i] = bucket.slice(0); - } - } - this.n = comparatorOrSet.n; - this.threshold = comparatorOrSet.threshold; - } else { - this.comparator = comparatorOrSet || DefaultEqualityComparator_1.DefaultEqualityComparator.INSTANCE; - this.buckets = this.createBuckets(initialCapacity); - } - } - getOrAdd(o) { - if (this.n > this.threshold) { - this.expand(); - } - return this.getOrAddImpl(o); - } - getOrAddImpl(o) { - let b = this.getBucket(o); - let bucket = this.buckets[b]; - if (!bucket) { - bucket = [o]; - this.buckets[b] = bucket; - this.n++; - return o; - } - for (let existing of bucket) { - if (this.comparator.equals(existing, o)) { - return existing; - } - } - bucket.push(o); - this.n++; - return o; - } - get(o) { - if (o == null) { - return o; - } - let b = this.getBucket(o); - let bucket = this.buckets[b]; - if (!bucket) { - return void 0; - } - for (let e of bucket) { - if (this.comparator.equals(e, o)) { - return e; - } - } - return void 0; - } - getBucket(o) { - let hash = this.comparator.hashCode(o); - let b = hash & this.buckets.length - 1; - return b; - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(); - for (let bucket of this.buckets) { - if (bucket == null) { - continue; - } - for (let o of bucket) { - if (o == null) { - break; - } - hash = MurmurHash_1.MurmurHash.update(hash, this.comparator.hashCode(o)); - } - } - hash = MurmurHash_1.MurmurHash.finish(hash, this.size); - return hash; - } - equals(o) { - if (o === this) { - return true; - } - if (!(o instanceof Array2DHashSet)) { - return false; - } - if (o.size !== this.size) { - return false; - } - let same = this.containsAll(o); - return same; - } - expand() { - let old = this.buckets; - let newCapacity = this.buckets.length * 2; - let newTable = this.createBuckets(newCapacity); - this.buckets = newTable; - this.threshold = Math.floor(newCapacity * LOAD_FACTOR); - let oldSize = this.size; - for (let bucket of old) { - if (!bucket) { - continue; - } - for (let o of bucket) { - let b = this.getBucket(o); - let newBucket = this.buckets[b]; - if (!newBucket) { - newBucket = []; - this.buckets[b] = newBucket; - } - newBucket.push(o); - } - } - assert(this.n === oldSize); - } - add(t) { - let existing = this.getOrAdd(t); - return existing === t; - } - get size() { - return this.n; - } - get isEmpty() { - return this.n === 0; - } - contains(o) { - return this.containsFast(this.asElementType(o)); - } - containsFast(obj) { - if (obj == null) { - return false; - } - return this.get(obj) != null; - } - *[Symbol.iterator]() { - yield* this.toArray(); - } - toArray() { - const a = new Array(this.size); - let i = 0; - for (let bucket of this.buckets) { - if (bucket == null) { - continue; - } - for (let o of bucket) { - if (o == null) { - break; - } - a[i++] = o; - } - } - return a; - } - containsAll(collection) { - if (collection instanceof Array2DHashSet) { - let s = collection; - for (let bucket of s.buckets) { - if (bucket == null) { - continue; - } - for (let o of bucket) { - if (o == null) { - break; - } - if (!this.containsFast(this.asElementType(o))) { - return false; - } - } - } - } else { - for (let o of collection) { - if (!this.containsFast(this.asElementType(o))) { - return false; - } - } - } - return true; - } - addAll(c) { - let changed = false; - for (let o of c) { - let existing = this.getOrAdd(o); - if (existing !== o) { - changed = true; - } - } - return changed; - } - clear() { - this.buckets = this.createBuckets(INITAL_CAPACITY); - this.n = 0; - this.threshold = Math.floor(INITAL_CAPACITY * LOAD_FACTOR); - } - toString() { - if (this.size === 0) { - return "{}"; - } - let buf = "{"; - let first = true; - for (let bucket of this.buckets) { - if (bucket == null) { - continue; - } - for (let o of bucket) { - if (o == null) { - break; - } - if (first) { - first = false; - } else { - buf += ", "; - } - buf += o.toString(); - } - } - buf += "}"; - return buf; - } - toTableString() { - let buf = ""; - for (let bucket of this.buckets) { - if (bucket == null) { - buf += "null\n"; - continue; - } - buf += "["; - let first = true; - for (let o of bucket) { - if (first) { - first = false; - } else { - buf += " "; - } - if (o == null) { - buf += "_"; - } else { - buf += o.toString(); - } - } - buf += "]\n"; - } - return buf; - } - asElementType(o) { - return o; - } - createBuckets(capacity) { - return new Array(capacity); - } - }; - __decorate([ - Decorators_1.NotNull - ], Array2DHashSet.prototype, "comparator", void 0); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, "add", null); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, "size", null); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, "isEmpty", null); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, "contains", null); - __decorate([ - __param(0, Decorators_1.Nullable) - ], Array2DHashSet.prototype, "containsFast", null); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, Symbol.iterator, null); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, "toArray", null); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, "containsAll", null); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, "addAll", null); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, "clear", null); - __decorate([ - Decorators_1.Override - ], Array2DHashSet.prototype, "toString", null); - __decorate([ - Decorators_1.SuppressWarnings("unchecked") - ], Array2DHashSet.prototype, "asElementType", null); - __decorate([ - Decorators_1.SuppressWarnings("unchecked") - ], Array2DHashSet.prototype, "createBuckets", null); - exports.Array2DHashSet = Array2DHashSet; -}); - -// node_modules/antlr4ts/misc/ArrayEqualityComparator.js -var require_ArrayEqualityComparator = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ArrayEqualityComparator = void 0; - var Decorators_1 = require_Decorators(); - var MurmurHash_1 = require_MurmurHash(); - var ObjectEqualityComparator_1 = require_ObjectEqualityComparator(); - var ArrayEqualityComparator = class { - hashCode(obj) { - if (obj == null) { - return 0; - } - return MurmurHash_1.MurmurHash.hashCode(obj, 0); - } - equals(a, b) { - if (a == null) { - return b == null; - } else if (b == null) { - return false; - } - if (a.length !== b.length) { - return false; - } - for (let i = 0; i < a.length; i++) { - if (!ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE.equals(a[i], b[i])) { - return false; - } - } - return true; - } - }; - ArrayEqualityComparator.INSTANCE = new ArrayEqualityComparator(); - __decorate([ - Decorators_1.Override - ], ArrayEqualityComparator.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], ArrayEqualityComparator.prototype, "equals", null); - exports.ArrayEqualityComparator = ArrayEqualityComparator; -}); - -// node_modules/antlr4ts/misc/Utils.js -var require_Utils = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.toCharArray = exports.toMap = exports.equals = exports.join = exports.escapeWhitespace = void 0; - function escapeWhitespace(s, escapeSpaces) { - return escapeSpaces ? s.replace(/ /, "\xB7") : s.replace(/\t/, "\\t").replace(/\n/, "\\n").replace(/\r/, "\\r"); - } - exports.escapeWhitespace = escapeWhitespace; - function join3(collection, separator) { - let buf = ""; - let first = true; - for (let current of collection) { - if (first) { - first = false; - } else { - buf += separator; - } - buf += current; - } - return buf; - } - exports.join = join3; - function equals(x, y) { - if (x === y) { - return true; - } - if (x === void 0 || y === void 0) { - return false; - } - return x.equals(y); - } - exports.equals = equals; - function toMap(keys) { - let m = new Map(); - for (let i = 0; i < keys.length; i++) { - m.set(keys[i], i); - } - return m; - } - exports.toMap = toMap; - function toCharArray3(str) { - if (typeof str === "string") { - let result = new Uint16Array(str.length); - for (let i = 0; i < str.length; i++) { - result[i] = str.charCodeAt(i); - } - return result; - } else { - return str.toCharArray(); - } - } - exports.toCharArray = toCharArray3; -}); - -// node_modules/antlr4ts/atn/SemanticContext.js -var require_SemanticContext = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.SemanticContext = void 0; - var Array2DHashSet_1 = require_Array2DHashSet(); - var ArrayEqualityComparator_1 = require_ArrayEqualityComparator(); - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var ObjectEqualityComparator_1 = require_ObjectEqualityComparator(); - var Utils3 = require_Utils(); - function max(items) { - let result; - for (let current of items) { - if (result === void 0) { - result = current; - continue; - } - let comparison = result.compareTo(current); - if (comparison < 0) { - result = current; - } - } - return result; - } - function min(items) { - let result; - for (let current of items) { - if (result === void 0) { - result = current; - continue; - } - let comparison = result.compareTo(current); - if (comparison > 0) { - result = current; - } - } - return result; - } - var SemanticContext = class { - static get NONE() { - if (SemanticContext._NONE === void 0) { - SemanticContext._NONE = new SemanticContext.Predicate(); - } - return SemanticContext._NONE; - } - evalPrecedence(parser, parserCallStack) { - return this; - } - static and(a, b) { - if (!a || a === SemanticContext.NONE) { - return b; - } - if (b === SemanticContext.NONE) { - return a; - } - let result = new SemanticContext.AND(a, b); - if (result.opnds.length === 1) { - return result.opnds[0]; - } - return result; - } - static or(a, b) { - if (!a) { - return b; - } - if (a === SemanticContext.NONE || b === SemanticContext.NONE) { - return SemanticContext.NONE; - } - let result = new SemanticContext.OR(a, b); - if (result.opnds.length === 1) { - return result.opnds[0]; - } - return result; - } - }; - exports.SemanticContext = SemanticContext; - (function(SemanticContext2) { - const AND_HASHCODE = 40363613; - const OR_HASHCODE = 486279973; - function filterPrecedencePredicates(collection) { - let result = []; - for (let i = 0; i < collection.length; i++) { - let context = collection[i]; - if (context instanceof SemanticContext2.PrecedencePredicate) { - result.push(context); - collection.splice(i, 1); - i--; - } - } - return result; - } - class Predicate extends SemanticContext2 { - constructor(ruleIndex = -1, predIndex = -1, isCtxDependent = false) { - super(); - this.ruleIndex = ruleIndex; - this.predIndex = predIndex; - this.isCtxDependent = isCtxDependent; - } - eval(parser, parserCallStack) { - let localctx = this.isCtxDependent ? parserCallStack : void 0; - return parser.sempred(localctx, this.ruleIndex, this.predIndex); - } - hashCode() { - let hashCode = MurmurHash_1.MurmurHash.initialize(); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, this.ruleIndex); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, this.predIndex); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, this.isCtxDependent ? 1 : 0); - hashCode = MurmurHash_1.MurmurHash.finish(hashCode, 3); - return hashCode; - } - equals(obj) { - if (!(obj instanceof Predicate)) { - return false; - } - if (this === obj) { - return true; - } - return this.ruleIndex === obj.ruleIndex && this.predIndex === obj.predIndex && this.isCtxDependent === obj.isCtxDependent; - } - toString() { - return "{" + this.ruleIndex + ":" + this.predIndex + "}?"; - } - } - __decorate([ - Decorators_1.Override - ], Predicate.prototype, "eval", null); - __decorate([ - Decorators_1.Override - ], Predicate.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], Predicate.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], Predicate.prototype, "toString", null); - SemanticContext2.Predicate = Predicate; - class PrecedencePredicate extends SemanticContext2 { - constructor(precedence) { - super(); - this.precedence = precedence; - } - eval(parser, parserCallStack) { - return parser.precpred(parserCallStack, this.precedence); - } - evalPrecedence(parser, parserCallStack) { - if (parser.precpred(parserCallStack, this.precedence)) { - return SemanticContext2.NONE; - } else { - return void 0; - } - } - compareTo(o) { - return this.precedence - o.precedence; - } - hashCode() { - let hashCode = 1; - hashCode = 31 * hashCode + this.precedence; - return hashCode; - } - equals(obj) { - if (!(obj instanceof PrecedencePredicate)) { - return false; - } - if (this === obj) { - return true; - } - return this.precedence === obj.precedence; - } - toString() { - return "{" + this.precedence + ">=prec}?"; - } - } - __decorate([ - Decorators_1.Override - ], PrecedencePredicate.prototype, "eval", null); - __decorate([ - Decorators_1.Override - ], PrecedencePredicate.prototype, "evalPrecedence", null); - __decorate([ - Decorators_1.Override - ], PrecedencePredicate.prototype, "compareTo", null); - __decorate([ - Decorators_1.Override - ], PrecedencePredicate.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], PrecedencePredicate.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], PrecedencePredicate.prototype, "toString", null); - SemanticContext2.PrecedencePredicate = PrecedencePredicate; - class Operator extends SemanticContext2 { - } - SemanticContext2.Operator = Operator; - let AND = class AND2 extends Operator { - constructor(a, b) { - super(); - let operands = new Array2DHashSet_1.Array2DHashSet(ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE); - if (a instanceof AND2) { - operands.addAll(a.opnds); - } else { - operands.add(a); - } - if (b instanceof AND2) { - operands.addAll(b.opnds); - } else { - operands.add(b); - } - this.opnds = operands.toArray(); - let precedencePredicates = filterPrecedencePredicates(this.opnds); - let reduced = min(precedencePredicates); - if (reduced) { - this.opnds.push(reduced); - } - } - get operands() { - return this.opnds; - } - equals(obj) { - if (this === obj) { - return true; - } - if (!(obj instanceof AND2)) { - return false; - } - return ArrayEqualityComparator_1.ArrayEqualityComparator.INSTANCE.equals(this.opnds, obj.opnds); - } - hashCode() { - return MurmurHash_1.MurmurHash.hashCode(this.opnds, AND_HASHCODE); - } - eval(parser, parserCallStack) { - for (let opnd of this.opnds) { - if (!opnd.eval(parser, parserCallStack)) { - return false; - } - } - return true; - } - evalPrecedence(parser, parserCallStack) { - let differs = false; - let operands = []; - for (let context of this.opnds) { - let evaluated = context.evalPrecedence(parser, parserCallStack); - differs = differs || evaluated !== context; - if (evaluated == null) { - return void 0; - } else if (evaluated !== SemanticContext2.NONE) { - operands.push(evaluated); - } - } - if (!differs) { - return this; - } - if (operands.length === 0) { - return SemanticContext2.NONE; - } - let result = operands[0]; - for (let i = 1; i < operands.length; i++) { - result = SemanticContext2.and(result, operands[i]); - } - return result; - } - toString() { - return Utils3.join(this.opnds, "&&"); - } - }; - __decorate([ - Decorators_1.Override - ], AND.prototype, "operands", null); - __decorate([ - Decorators_1.Override - ], AND.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], AND.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], AND.prototype, "eval", null); - __decorate([ - Decorators_1.Override - ], AND.prototype, "evalPrecedence", null); - __decorate([ - Decorators_1.Override - ], AND.prototype, "toString", null); - AND = __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], AND); - SemanticContext2.AND = AND; - let OR = class OR2 extends Operator { - constructor(a, b) { - super(); - let operands = new Array2DHashSet_1.Array2DHashSet(ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE); - if (a instanceof OR2) { - operands.addAll(a.opnds); - } else { - operands.add(a); - } - if (b instanceof OR2) { - operands.addAll(b.opnds); - } else { - operands.add(b); - } - this.opnds = operands.toArray(); - let precedencePredicates = filterPrecedencePredicates(this.opnds); - let reduced = max(precedencePredicates); - if (reduced) { - this.opnds.push(reduced); - } - } - get operands() { - return this.opnds; - } - equals(obj) { - if (this === obj) { - return true; - } - if (!(obj instanceof OR2)) { - return false; - } - return ArrayEqualityComparator_1.ArrayEqualityComparator.INSTANCE.equals(this.opnds, obj.opnds); - } - hashCode() { - return MurmurHash_1.MurmurHash.hashCode(this.opnds, OR_HASHCODE); - } - eval(parser, parserCallStack) { - for (let opnd of this.opnds) { - if (opnd.eval(parser, parserCallStack)) { - return true; - } - } - return false; - } - evalPrecedence(parser, parserCallStack) { - let differs = false; - let operands = []; - for (let context of this.opnds) { - let evaluated = context.evalPrecedence(parser, parserCallStack); - differs = differs || evaluated !== context; - if (evaluated === SemanticContext2.NONE) { - return SemanticContext2.NONE; - } else if (evaluated) { - operands.push(evaluated); - } - } - if (!differs) { - return this; - } - if (operands.length === 0) { - return void 0; - } - let result = operands[0]; - for (let i = 1; i < operands.length; i++) { - result = SemanticContext2.or(result, operands[i]); - } - return result; - } - toString() { - return Utils3.join(this.opnds, "||"); - } - }; - __decorate([ - Decorators_1.Override - ], OR.prototype, "operands", null); - __decorate([ - Decorators_1.Override - ], OR.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], OR.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], OR.prototype, "eval", null); - __decorate([ - Decorators_1.Override - ], OR.prototype, "evalPrecedence", null); - __decorate([ - Decorators_1.Override - ], OR.prototype, "toString", null); - OR = __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], OR); - SemanticContext2.OR = OR; - })(SemanticContext = exports.SemanticContext || (exports.SemanticContext = {})); -}); - -// node_modules/antlr4ts/atn/PredicateTransition.js -var require_PredicateTransition = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.PredicateTransition = void 0; - var AbstractPredicateTransition_1 = require_AbstractPredicateTransition(); - var Decorators_1 = require_Decorators(); - var SemanticContext_1 = require_SemanticContext(); - var PredicateTransition = class PredicateTransition extends AbstractPredicateTransition_1.AbstractPredicateTransition { - constructor(target, ruleIndex, predIndex, isCtxDependent) { - super(target); - this.ruleIndex = ruleIndex; - this.predIndex = predIndex; - this.isCtxDependent = isCtxDependent; - } - get serializationType() { - return 4; - } - get isEpsilon() { - return true; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return false; - } - get predicate() { - return new SemanticContext_1.SemanticContext.Predicate(this.ruleIndex, this.predIndex, this.isCtxDependent); - } - toString() { - return "pred_" + this.ruleIndex + ":" + this.predIndex; - } - }; - __decorate([ - Decorators_1.Override - ], PredicateTransition.prototype, "serializationType", null); - __decorate([ - Decorators_1.Override - ], PredicateTransition.prototype, "isEpsilon", null); - __decorate([ - Decorators_1.Override - ], PredicateTransition.prototype, "matches", null); - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull - ], PredicateTransition.prototype, "toString", null); - PredicateTransition = __decorate([ - __param(0, Decorators_1.NotNull) - ], PredicateTransition); - exports.PredicateTransition = PredicateTransition; -}); - -// node_modules/antlr4ts/FailedPredicateException.js -var require_FailedPredicateException = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.FailedPredicateException = void 0; - var RecognitionException_1 = require_RecognitionException(); - var Decorators_1 = require_Decorators(); - var PredicateTransition_1 = require_PredicateTransition(); - var FailedPredicateException2 = class FailedPredicateException3 extends RecognitionException_1.RecognitionException { - constructor(recognizer, predicate, message) { - super(recognizer, recognizer.inputStream, recognizer.context, FailedPredicateException3.formatMessage(predicate, message)); - let s = recognizer.interpreter.atn.states[recognizer.state]; - let trans = s.transition(0); - if (trans instanceof PredicateTransition_1.PredicateTransition) { - this._ruleIndex = trans.ruleIndex; - this._predicateIndex = trans.predIndex; - } else { - this._ruleIndex = 0; - this._predicateIndex = 0; - } - this._predicate = predicate; - super.setOffendingToken(recognizer, recognizer.currentToken); - } - get ruleIndex() { - return this._ruleIndex; - } - get predicateIndex() { - return this._predicateIndex; - } - get predicate() { - return this._predicate; - } - static formatMessage(predicate, message) { - if (message) { - return message; - } - return `failed predicate: {${predicate}}?`; - } - }; - __decorate([ - Decorators_1.NotNull - ], FailedPredicateException2, "formatMessage", null); - FailedPredicateException2 = __decorate([ - __param(0, Decorators_1.NotNull) - ], FailedPredicateException2); - exports.FailedPredicateException = FailedPredicateException2; -}); - -// node_modules/antlr4ts/InputMismatchException.js -var require_InputMismatchException = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.InputMismatchException = void 0; - var RecognitionException_1 = require_RecognitionException(); - var Decorators_1 = require_Decorators(); - var InputMismatchException = class InputMismatchException extends RecognitionException_1.RecognitionException { - constructor(recognizer, state, context) { - if (context === void 0) { - context = recognizer.context; - } - super(recognizer, recognizer.inputStream, context); - if (state !== void 0) { - this.setOffendingState(state); - } - this.setOffendingToken(recognizer, recognizer.currentToken); - } - }; - InputMismatchException = __decorate([ - __param(0, Decorators_1.NotNull) - ], InputMismatchException); - exports.InputMismatchException = InputMismatchException; -}); - -// node_modules/antlr4ts/misc/Arrays.js -var require_Arrays = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Arrays = void 0; - var Arrays; - (function(Arrays2) { - function binarySearch(array, key, fromIndex, toIndex) { - return binarySearch0(array, fromIndex !== void 0 ? fromIndex : 0, toIndex !== void 0 ? toIndex : array.length, key); - } - Arrays2.binarySearch = binarySearch; - function binarySearch0(array, fromIndex, toIndex, key) { - let low = fromIndex; - let high = toIndex - 1; - while (low <= high) { - let mid = low + high >>> 1; - let midVal = array[mid]; - if (midVal < key) { - low = mid + 1; - } else if (midVal > key) { - high = mid - 1; - } else { - return mid; - } - } - return -(low + 1); - } - function toString(array) { - let result = "["; - let first = true; - for (let element of array) { - if (first) { - first = false; - } else { - result += ", "; - } - if (element === null) { - result += "null"; - } else if (element === void 0) { - result += "undefined"; - } else { - result += element; - } - } - result += "]"; - return result; - } - Arrays2.toString = toString; - })(Arrays = exports.Arrays || (exports.Arrays = {})); -}); - -// node_modules/antlr4ts/misc/IntegerList.js -var require_IntegerList = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.IntegerList = void 0; - var Arrays_1 = require_Arrays(); - var Decorators_1 = require_Decorators(); - var EMPTY_DATA = new Int32Array(0); - var INITIAL_SIZE = 4; - var MAX_ARRAY_SIZE = (1 << 31 >>> 0) - 1 - 8; - var IntegerList = class { - constructor(arg) { - if (!arg) { - this._data = EMPTY_DATA; - this._size = 0; - } else if (arg instanceof IntegerList) { - this._data = arg._data.slice(0); - this._size = arg._size; - } else if (typeof arg === "number") { - if (arg === 0) { - this._data = EMPTY_DATA; - this._size = 0; - } else { - this._data = new Int32Array(arg); - this._size = 0; - } - } else { - this._data = EMPTY_DATA; - this._size = 0; - for (let value of arg) { - this.add(value); - } - } - } - add(value) { - if (this._data.length === this._size) { - this.ensureCapacity(this._size + 1); - } - this._data[this._size] = value; - this._size++; - } - addAll(list) { - if (Array.isArray(list)) { - this.ensureCapacity(this._size + list.length); - this._data.subarray(this._size, this._size + list.length).set(list); - this._size += list.length; - } else if (list instanceof IntegerList) { - this.ensureCapacity(this._size + list._size); - this._data.subarray(this._size, this._size + list.size).set(list._data); - this._size += list._size; - } else { - this.ensureCapacity(this._size + list.size); - let current = 0; - for (let xi of list) { - this._data[this._size + current] = xi; - current++; - } - this._size += list.size; - } - } - get(index) { - if (index < 0 || index >= this._size) { - throw RangeError(); - } - return this._data[index]; - } - contains(value) { - for (let i = 0; i < this._size; i++) { - if (this._data[i] === value) { - return true; - } - } - return false; - } - set(index, value) { - if (index < 0 || index >= this._size) { - throw RangeError(); - } - let previous = this._data[index]; - this._data[index] = value; - return previous; - } - removeAt(index) { - let value = this.get(index); - this._data.copyWithin(index, index + 1, this._size); - this._data[this._size - 1] = 0; - this._size--; - return value; - } - removeRange(fromIndex, toIndex) { - if (fromIndex < 0 || toIndex < 0 || fromIndex > this._size || toIndex > this._size) { - throw RangeError(); - } - if (fromIndex > toIndex) { - throw RangeError(); - } - this._data.copyWithin(toIndex, fromIndex, this._size); - this._data.fill(0, this._size - (toIndex - fromIndex), this._size); - this._size -= toIndex - fromIndex; - } - get isEmpty() { - return this._size === 0; - } - get size() { - return this._size; - } - trimToSize() { - if (this._data.length === this._size) { - return; - } - this._data = this._data.slice(0, this._size); - } - clear() { - this._data.fill(0, 0, this._size); - this._size = 0; - } - toArray() { - if (this._size === 0) { - return []; - } - return Array.from(this._data.subarray(0, this._size)); - } - sort() { - this._data.subarray(0, this._size).sort(); - } - equals(o) { - if (o === this) { - return true; - } - if (!(o instanceof IntegerList)) { - return false; - } - if (this._size !== o._size) { - return false; - } - for (let i = 0; i < this._size; i++) { - if (this._data[i] !== o._data[i]) { - return false; - } - } - return true; - } - hashCode() { - let hashCode = 1; - for (let i = 0; i < this._size; i++) { - hashCode = 31 * hashCode + this._data[i]; - } - return hashCode; - } - toString() { - return this._data.toString(); - } - binarySearch(key, fromIndex, toIndex) { - if (fromIndex === void 0) { - fromIndex = 0; - } - if (toIndex === void 0) { - toIndex = this._size; - } - if (fromIndex < 0 || toIndex < 0 || fromIndex > this._size || toIndex > this._size) { - throw new RangeError(); - } - if (fromIndex > toIndex) { - throw new RangeError(); - } - return Arrays_1.Arrays.binarySearch(this._data, key, fromIndex, toIndex); - } - ensureCapacity(capacity) { - if (capacity < 0 || capacity > MAX_ARRAY_SIZE) { - throw new RangeError(); - } - let newLength; - if (this._data.length === 0) { - newLength = INITIAL_SIZE; - } else { - newLength = this._data.length; - } - while (newLength < capacity) { - newLength = newLength * 2; - if (newLength < 0 || newLength > MAX_ARRAY_SIZE) { - newLength = MAX_ARRAY_SIZE; - } - } - let tmp = new Int32Array(newLength); - tmp.set(this._data); - this._data = tmp; - } - toCharArray() { - let resultArray = new Uint16Array(this._size); - let resultIdx = 0; - let calculatedPreciseResultSize = false; - for (let i = 0; i < this._size; i++) { - let codePoint = this._data[i]; - if (codePoint >= 0 && codePoint < 65536) { - resultArray[resultIdx] = codePoint; - resultIdx++; - continue; - } - if (!calculatedPreciseResultSize) { - let newResultArray = new Uint16Array(this.charArraySize()); - newResultArray.set(resultArray, 0); - resultArray = newResultArray; - calculatedPreciseResultSize = true; - } - let pair = String.fromCodePoint(codePoint); - resultArray[resultIdx] = pair.charCodeAt(0); - resultArray[resultIdx + 1] = pair.charCodeAt(1); - resultIdx += 2; - } - return resultArray; - } - charArraySize() { - let result = 0; - for (let i = 0; i < this._size; i++) { - result += this._data[i] >= 65536 ? 2 : 1; - } - return result; - } - }; - __decorate([ - Decorators_1.NotNull - ], IntegerList.prototype, "_data", void 0); - __decorate([ - Decorators_1.Override - ], IntegerList.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], IntegerList.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], IntegerList.prototype, "toString", null); - exports.IntegerList = IntegerList; -}); - -// node_modules/antlr4ts/misc/Interval.js -var require_Interval = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Interval = void 0; - var Decorators_1 = require_Decorators(); - var INTERVAL_POOL_MAX_VALUE = 1e3; - var Interval = class { - constructor(a, b) { - this.a = a; - this.b = b; - } - static get INVALID() { - return Interval._INVALID; - } - static of(a, b) { - if (a !== b || a < 0 || a > INTERVAL_POOL_MAX_VALUE) { - return new Interval(a, b); - } - if (Interval.cache[a] == null) { - Interval.cache[a] = new Interval(a, a); - } - return Interval.cache[a]; - } - get length() { - if (this.b < this.a) { - return 0; - } - return this.b - this.a + 1; - } - equals(o) { - if (o === this) { - return true; - } else if (!(o instanceof Interval)) { - return false; - } - return this.a === o.a && this.b === o.b; - } - hashCode() { - let hash = 23; - hash = hash * 31 + this.a; - hash = hash * 31 + this.b; - return hash; - } - startsBeforeDisjoint(other) { - return this.a < other.a && this.b < other.a; - } - startsBeforeNonDisjoint(other) { - return this.a <= other.a && this.b >= other.a; - } - startsAfter(other) { - return this.a > other.a; - } - startsAfterDisjoint(other) { - return this.a > other.b; - } - startsAfterNonDisjoint(other) { - return this.a > other.a && this.a <= other.b; - } - disjoint(other) { - return this.startsBeforeDisjoint(other) || this.startsAfterDisjoint(other); - } - adjacent(other) { - return this.a === other.b + 1 || this.b === other.a - 1; - } - properlyContains(other) { - return other.a >= this.a && other.b <= this.b; - } - union(other) { - return Interval.of(Math.min(this.a, other.a), Math.max(this.b, other.b)); - } - intersection(other) { - return Interval.of(Math.max(this.a, other.a), Math.min(this.b, other.b)); - } - differenceNotProperlyContained(other) { - let diff; - if (other.startsBeforeNonDisjoint(this)) { - diff = Interval.of(Math.max(this.a, other.b + 1), this.b); - } else if (other.startsAfterNonDisjoint(this)) { - diff = Interval.of(this.a, other.a - 1); - } - return diff; - } - toString() { - return this.a + ".." + this.b; - } - }; - Interval._INVALID = new Interval(-1, -2); - Interval.cache = new Array(INTERVAL_POOL_MAX_VALUE + 1); - __decorate([ - Decorators_1.Override - ], Interval.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], Interval.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], Interval.prototype, "toString", null); - exports.Interval = Interval; -}); - -// node_modules/antlr4ts/Token.js -var require_Token = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Token = void 0; - var IntStream_1 = require_IntStream(); - var Token2; - (function(Token3) { - Token3.INVALID_TYPE = 0; - Token3.EPSILON = -2; - Token3.MIN_USER_TOKEN_TYPE = 1; - Token3.EOF = IntStream_1.IntStream.EOF; - Token3.DEFAULT_CHANNEL = 0; - Token3.HIDDEN_CHANNEL = 1; - Token3.MIN_USER_CHANNEL_VALUE = 2; - })(Token2 = exports.Token || (exports.Token = {})); -}); - -// node_modules/antlr4ts/CommonToken.js -var require_CommonToken = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.CommonToken = void 0; - var Interval_1 = require_Interval(); - var Decorators_1 = require_Decorators(); - var Token_1 = require_Token(); - var CommonToken = class CommonToken2 { - constructor(type, text, source = CommonToken2.EMPTY_SOURCE, channel = Token_1.Token.DEFAULT_CHANNEL, start = 0, stop = 0) { - this._line = 0; - this._charPositionInLine = -1; - this._channel = Token_1.Token.DEFAULT_CHANNEL; - this.index = -1; - this._text = text; - this._type = type; - this.source = source; - this._channel = channel; - this.start = start; - this.stop = stop; - if (source.source != null) { - this._line = source.source.line; - this._charPositionInLine = source.source.charPositionInLine; - } - } - static fromToken(oldToken) { - let result = new CommonToken2(oldToken.type, void 0, CommonToken2.EMPTY_SOURCE, oldToken.channel, oldToken.startIndex, oldToken.stopIndex); - result._line = oldToken.line; - result.index = oldToken.tokenIndex; - result._charPositionInLine = oldToken.charPositionInLine; - if (oldToken instanceof CommonToken2) { - result._text = oldToken._text; - result.source = oldToken.source; - } else { - result._text = oldToken.text; - result.source = {source: oldToken.tokenSource, stream: oldToken.inputStream}; - } - return result; - } - get type() { - return this._type; - } - set type(type) { - this._type = type; - } - get line() { - return this._line; - } - set line(line) { - this._line = line; - } - get text() { - if (this._text != null) { - return this._text; - } - let input = this.inputStream; - if (input == null) { - return void 0; - } - let n = input.size; - if (this.start < n && this.stop < n) { - return input.getText(Interval_1.Interval.of(this.start, this.stop)); - } else { - return ""; - } - } - set text(text) { - this._text = text; - } - get charPositionInLine() { - return this._charPositionInLine; - } - set charPositionInLine(charPositionInLine) { - this._charPositionInLine = charPositionInLine; - } - get channel() { - return this._channel; - } - set channel(channel) { - this._channel = channel; - } - get startIndex() { - return this.start; - } - set startIndex(start) { - this.start = start; - } - get stopIndex() { - return this.stop; - } - set stopIndex(stop) { - this.stop = stop; - } - get tokenIndex() { - return this.index; - } - set tokenIndex(index) { - this.index = index; - } - get tokenSource() { - return this.source.source; - } - get inputStream() { - return this.source.stream; - } - toString(recognizer) { - let channelStr = ""; - if (this._channel > 0) { - channelStr = ",channel=" + this._channel; - } - let txt = this.text; - if (txt != null) { - txt = txt.replace(/\n/g, "\\n"); - txt = txt.replace(/\r/g, "\\r"); - txt = txt.replace(/\t/g, "\\t"); - } else { - txt = ""; - } - let typeString = String(this._type); - if (recognizer) { - typeString = recognizer.vocabulary.getDisplayName(this._type); - } - return "[@" + this.tokenIndex + "," + this.start + ":" + this.stop + "='" + txt + "',<" + typeString + ">" + channelStr + "," + this._line + ":" + this.charPositionInLine + "]"; - } - }; - CommonToken.EMPTY_SOURCE = {source: void 0, stream: void 0}; - __decorate([ - Decorators_1.NotNull - ], CommonToken.prototype, "source", void 0); - __decorate([ - Decorators_1.Override - ], CommonToken.prototype, "type", null); - __decorate([ - Decorators_1.Override - ], CommonToken.prototype, "line", null); - __decorate([ - Decorators_1.Override - ], CommonToken.prototype, "text", null); - __decorate([ - Decorators_1.Override - ], CommonToken.prototype, "charPositionInLine", null); - __decorate([ - Decorators_1.Override - ], CommonToken.prototype, "channel", null); - __decorate([ - Decorators_1.Override - ], CommonToken.prototype, "startIndex", null); - __decorate([ - Decorators_1.Override - ], CommonToken.prototype, "stopIndex", null); - __decorate([ - Decorators_1.Override - ], CommonToken.prototype, "tokenIndex", null); - __decorate([ - Decorators_1.Override - ], CommonToken.prototype, "tokenSource", null); - __decorate([ - Decorators_1.Override - ], CommonToken.prototype, "inputStream", null); - __decorate([ - Decorators_1.Override - ], CommonToken.prototype, "toString", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], CommonToken, "fromToken", null); - CommonToken = __decorate([ - __param(2, Decorators_1.NotNull) - ], CommonToken); - exports.CommonToken = CommonToken; -}); - -// node_modules/antlr4ts/CommonTokenFactory.js -var require_CommonTokenFactory = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.CommonTokenFactory = void 0; - var CommonToken_1 = require_CommonToken(); - var Interval_1 = require_Interval(); - var Decorators_1 = require_Decorators(); - var CommonTokenFactory = class { - constructor(copyText = false) { - this.copyText = copyText; - } - create(source, type, text, channel, start, stop, line, charPositionInLine) { - let t = new CommonToken_1.CommonToken(type, text, source, channel, start, stop); - t.line = line; - t.charPositionInLine = charPositionInLine; - if (text == null && this.copyText && source.stream != null) { - t.text = source.stream.getText(Interval_1.Interval.of(start, stop)); - } - return t; - } - createSimple(type, text) { - return new CommonToken_1.CommonToken(type, text); - } - }; - __decorate([ - Decorators_1.Override - ], CommonTokenFactory.prototype, "create", null); - __decorate([ - Decorators_1.Override - ], CommonTokenFactory.prototype, "createSimple", null); - exports.CommonTokenFactory = CommonTokenFactory; - (function(CommonTokenFactory2) { - CommonTokenFactory2.DEFAULT = new CommonTokenFactory2(); - })(CommonTokenFactory = exports.CommonTokenFactory || (exports.CommonTokenFactory = {})); -}); - -// node_modules/antlr4ts/misc/IntegerStack.js -var require_IntegerStack = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.IntegerStack = void 0; - var IntegerList_1 = require_IntegerList(); - var IntegerStack = class extends IntegerList_1.IntegerList { - constructor(arg) { - super(arg); - } - push(value) { - this.add(value); - } - pop() { - return this.removeAt(this.size - 1); - } - peek() { - return this.get(this.size - 1); - } - }; - exports.IntegerStack = IntegerStack; -}); - -// node_modules/antlr4ts/dfa/AcceptStateInfo.js -var require_AcceptStateInfo = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.AcceptStateInfo = void 0; - var AcceptStateInfo = class { - constructor(prediction, lexerActionExecutor) { - this._prediction = prediction; - this._lexerActionExecutor = lexerActionExecutor; - } - get prediction() { - return this._prediction; - } - get lexerActionExecutor() { - return this._lexerActionExecutor; - } - }; - exports.AcceptStateInfo = AcceptStateInfo; -}); - -// node_modules/antlr4ts/misc/Array2DHashMap.js -var require_Array2DHashMap = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Array2DHashMap = void 0; - var Array2DHashSet_1 = require_Array2DHashSet(); - var MapKeyEqualityComparator = class { - constructor(keyComparator) { - this.keyComparator = keyComparator; - } - hashCode(obj) { - return this.keyComparator.hashCode(obj.key); - } - equals(a, b) { - return this.keyComparator.equals(a.key, b.key); - } - }; - var Array2DHashMap = class { - constructor(keyComparer) { - if (keyComparer instanceof Array2DHashMap) { - this.backingStore = new Array2DHashSet_1.Array2DHashSet(keyComparer.backingStore); - } else { - this.backingStore = new Array2DHashSet_1.Array2DHashSet(new MapKeyEqualityComparator(keyComparer)); - } - } - clear() { - this.backingStore.clear(); - } - containsKey(key) { - return this.backingStore.contains({key}); - } - get(key) { - let bucket = this.backingStore.get({key}); - if (!bucket) { - return void 0; - } - return bucket.value; - } - get isEmpty() { - return this.backingStore.isEmpty; - } - put(key, value) { - let element = this.backingStore.get({key, value}); - let result; - if (!element) { - this.backingStore.add({key, value}); - } else { - result = element.value; - element.value = value; - } - return result; - } - putIfAbsent(key, value) { - let element = this.backingStore.get({key, value}); - let result; - if (!element) { - this.backingStore.add({key, value}); - } else { - result = element.value; - } - return result; - } - get size() { - return this.backingStore.size; - } - hashCode() { - return this.backingStore.hashCode(); - } - equals(o) { - if (!(o instanceof Array2DHashMap)) { - return false; - } - return this.backingStore.equals(o.backingStore); - } - }; - exports.Array2DHashMap = Array2DHashMap; -}); - -// node_modules/antlr4ts/atn/DecisionState.js -var require_DecisionState = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.DecisionState = void 0; - var ATNState_1 = require_ATNState(); - var DecisionState = class extends ATNState_1.ATNState { - constructor() { - super(...arguments); - this.decision = -1; - this.nonGreedy = false; - this.sll = false; - } - }; - exports.DecisionState = DecisionState; -}); - -// node_modules/antlr4ts/atn/PredictionContextCache.js -var require_PredictionContextCache = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.PredictionContextCache = void 0; - var Array2DHashMap_1 = require_Array2DHashMap(); - var Decorators_1 = require_Decorators(); - var ObjectEqualityComparator_1 = require_ObjectEqualityComparator(); - var PredictionContext_1 = require_PredictionContext(); - var assert = require("assert"); - var PredictionContextCache = class { - constructor(enableCache = true) { - this.contexts = new Array2DHashMap_1.Array2DHashMap(ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE); - this.childContexts = new Array2DHashMap_1.Array2DHashMap(ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE); - this.joinContexts = new Array2DHashMap_1.Array2DHashMap(ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE); - this.enableCache = enableCache; - } - getAsCached(context) { - if (!this.enableCache) { - return context; - } - let result = this.contexts.get(context); - if (!result) { - result = context; - this.contexts.put(context, context); - } - return result; - } - getChild(context, invokingState) { - if (!this.enableCache) { - return context.getChild(invokingState); - } - let operands = new PredictionContextCache.PredictionContextAndInt(context, invokingState); - let result = this.childContexts.get(operands); - if (!result) { - result = context.getChild(invokingState); - result = this.getAsCached(result); - this.childContexts.put(operands, result); - } - return result; - } - join(x, y) { - if (!this.enableCache) { - return PredictionContext_1.PredictionContext.join(x, y, this); - } - let operands = new PredictionContextCache.IdentityCommutativePredictionContextOperands(x, y); - let result = this.joinContexts.get(operands); - if (result) { - return result; - } - result = PredictionContext_1.PredictionContext.join(x, y, this); - result = this.getAsCached(result); - this.joinContexts.put(operands, result); - return result; - } - }; - exports.PredictionContextCache = PredictionContextCache; - PredictionContextCache.UNCACHED = new PredictionContextCache(false); - (function(PredictionContextCache2) { - class PredictionContextAndInt { - constructor(obj, value) { - this.obj = obj; - this.value = value; - } - equals(obj) { - if (!(obj instanceof PredictionContextAndInt)) { - return false; - } else if (obj === this) { - return true; - } - let other = obj; - return this.value === other.value && (this.obj === other.obj || this.obj != null && this.obj.equals(other.obj)); - } - hashCode() { - let hashCode = 5; - hashCode = 7 * hashCode + (this.obj != null ? this.obj.hashCode() : 0); - hashCode = 7 * hashCode + this.value; - return hashCode; - } - } - __decorate([ - Decorators_1.Override - ], PredictionContextAndInt.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], PredictionContextAndInt.prototype, "hashCode", null); - PredictionContextCache2.PredictionContextAndInt = PredictionContextAndInt; - class IdentityCommutativePredictionContextOperands { - constructor(x, y) { - assert(x != null); - assert(y != null); - this._x = x; - this._y = y; - } - get x() { - return this._x; - } - get y() { - return this._y; - } - equals(o) { - if (!(o instanceof IdentityCommutativePredictionContextOperands)) { - return false; - } else if (this === o) { - return true; - } - let other = o; - return this._x === other._x && this._y === other._y || this._x === other._y && this._y === other._x; - } - hashCode() { - return this._x.hashCode() ^ this._y.hashCode(); - } - } - __decorate([ - Decorators_1.Override - ], IdentityCommutativePredictionContextOperands.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], IdentityCommutativePredictionContextOperands.prototype, "hashCode", null); - PredictionContextCache2.IdentityCommutativePredictionContextOperands = IdentityCommutativePredictionContextOperands; - })(PredictionContextCache = exports.PredictionContextCache || (exports.PredictionContextCache = {})); -}); - -// node_modules/antlr4ts/atn/PredictionContext.js -var require_PredictionContext = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.SingletonPredictionContext = exports.PredictionContext = void 0; - var Array2DHashMap_1 = require_Array2DHashMap(); - var Array2DHashSet_1 = require_Array2DHashSet(); - var Arrays_1 = require_Arrays(); - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var PredictionContextCache_1 = require_PredictionContextCache(); - var assert = require("assert"); - var INITIAL_HASH = 1; - var PredictionContext = class { - constructor(cachedHashCode) { - this.cachedHashCode = cachedHashCode; - } - static calculateEmptyHashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(INITIAL_HASH); - hash = MurmurHash_1.MurmurHash.finish(hash, 0); - return hash; - } - static calculateSingleHashCode(parent, returnState) { - let hash = MurmurHash_1.MurmurHash.initialize(INITIAL_HASH); - hash = MurmurHash_1.MurmurHash.update(hash, parent); - hash = MurmurHash_1.MurmurHash.update(hash, returnState); - hash = MurmurHash_1.MurmurHash.finish(hash, 2); - return hash; - } - static calculateHashCode(parents, returnStates) { - let hash = MurmurHash_1.MurmurHash.initialize(INITIAL_HASH); - for (let parent of parents) { - hash = MurmurHash_1.MurmurHash.update(hash, parent); - } - for (let returnState of returnStates) { - hash = MurmurHash_1.MurmurHash.update(hash, returnState); - } - hash = MurmurHash_1.MurmurHash.finish(hash, 2 * parents.length); - return hash; - } - static fromRuleContext(atn, outerContext, fullContext = true) { - if (outerContext.isEmpty) { - return fullContext ? PredictionContext.EMPTY_FULL : PredictionContext.EMPTY_LOCAL; - } - let parent; - if (outerContext._parent) { - parent = PredictionContext.fromRuleContext(atn, outerContext._parent, fullContext); - } else { - parent = fullContext ? PredictionContext.EMPTY_FULL : PredictionContext.EMPTY_LOCAL; - } - let state = atn.states[outerContext.invokingState]; - let transition = state.transition(0); - return parent.getChild(transition.followState.stateNumber); - } - static addEmptyContext(context) { - return context.addEmptyContext(); - } - static removeEmptyContext(context) { - return context.removeEmptyContext(); - } - static join(context0, context1, contextCache = PredictionContextCache_1.PredictionContextCache.UNCACHED) { - if (context0 === context1) { - return context0; - } - if (context0.isEmpty) { - return PredictionContext.isEmptyLocal(context0) ? context0 : PredictionContext.addEmptyContext(context1); - } else if (context1.isEmpty) { - return PredictionContext.isEmptyLocal(context1) ? context1 : PredictionContext.addEmptyContext(context0); - } - let context0size = context0.size; - let context1size = context1.size; - if (context0size === 1 && context1size === 1 && context0.getReturnState(0) === context1.getReturnState(0)) { - let merged = contextCache.join(context0.getParent(0), context1.getParent(0)); - if (merged === context0.getParent(0)) { - return context0; - } else if (merged === context1.getParent(0)) { - return context1; - } else { - return merged.getChild(context0.getReturnState(0)); - } - } - let count = 0; - let parentsList = new Array(context0size + context1size); - let returnStatesList = new Array(parentsList.length); - let leftIndex = 0; - let rightIndex = 0; - let canReturnLeft = true; - let canReturnRight = true; - while (leftIndex < context0size && rightIndex < context1size) { - if (context0.getReturnState(leftIndex) === context1.getReturnState(rightIndex)) { - parentsList[count] = contextCache.join(context0.getParent(leftIndex), context1.getParent(rightIndex)); - returnStatesList[count] = context0.getReturnState(leftIndex); - canReturnLeft = canReturnLeft && parentsList[count] === context0.getParent(leftIndex); - canReturnRight = canReturnRight && parentsList[count] === context1.getParent(rightIndex); - leftIndex++; - rightIndex++; - } else if (context0.getReturnState(leftIndex) < context1.getReturnState(rightIndex)) { - parentsList[count] = context0.getParent(leftIndex); - returnStatesList[count] = context0.getReturnState(leftIndex); - canReturnRight = false; - leftIndex++; - } else { - assert(context1.getReturnState(rightIndex) < context0.getReturnState(leftIndex)); - parentsList[count] = context1.getParent(rightIndex); - returnStatesList[count] = context1.getReturnState(rightIndex); - canReturnLeft = false; - rightIndex++; - } - count++; - } - while (leftIndex < context0size) { - parentsList[count] = context0.getParent(leftIndex); - returnStatesList[count] = context0.getReturnState(leftIndex); - leftIndex++; - canReturnRight = false; - count++; - } - while (rightIndex < context1size) { - parentsList[count] = context1.getParent(rightIndex); - returnStatesList[count] = context1.getReturnState(rightIndex); - rightIndex++; - canReturnLeft = false; - count++; - } - if (canReturnLeft) { - return context0; - } else if (canReturnRight) { - return context1; - } - if (count < parentsList.length) { - parentsList = parentsList.slice(0, count); - returnStatesList = returnStatesList.slice(0, count); - } - if (parentsList.length === 0) { - return PredictionContext.EMPTY_FULL; - } else if (parentsList.length === 1) { - return new SingletonPredictionContext(parentsList[0], returnStatesList[0]); - } else { - return new ArrayPredictionContext(parentsList, returnStatesList); - } - } - static isEmptyLocal(context) { - return context === PredictionContext.EMPTY_LOCAL; - } - static getCachedContext(context, contextCache, visited) { - if (context.isEmpty) { - return context; - } - let existing = visited.get(context); - if (existing) { - return existing; - } - existing = contextCache.get(context); - if (existing) { - visited.put(context, existing); - return existing; - } - let changed = false; - let parents = new Array(context.size); - for (let i = 0; i < parents.length; i++) { - let parent = PredictionContext.getCachedContext(context.getParent(i), contextCache, visited); - if (changed || parent !== context.getParent(i)) { - if (!changed) { - parents = new Array(context.size); - for (let j = 0; j < context.size; j++) { - parents[j] = context.getParent(j); - } - changed = true; - } - parents[i] = parent; - } - } - if (!changed) { - existing = contextCache.putIfAbsent(context, context); - visited.put(context, existing != null ? existing : context); - return context; - } - let updated; - if (parents.length === 1) { - updated = new SingletonPredictionContext(parents[0], context.getReturnState(0)); - } else { - let returnStates = new Array(context.size); - for (let i = 0; i < context.size; i++) { - returnStates[i] = context.getReturnState(i); - } - updated = new ArrayPredictionContext(parents, returnStates, context.hashCode()); - } - existing = contextCache.putIfAbsent(updated, updated); - visited.put(updated, existing || updated); - visited.put(context, existing || updated); - return updated; - } - appendSingleContext(returnContext, contextCache) { - return this.appendContext(PredictionContext.EMPTY_FULL.getChild(returnContext), contextCache); - } - getChild(returnState) { - return new SingletonPredictionContext(this, returnState); - } - hashCode() { - return this.cachedHashCode; - } - toStrings(recognizer, currentState, stop = PredictionContext.EMPTY_FULL) { - let result = []; - outer: - for (let perm = 0; ; perm++) { - let offset = 0; - let last = true; - let p = this; - let stateNumber = currentState; - let localBuffer = ""; - localBuffer += "["; - while (!p.isEmpty && p !== stop) { - let index = 0; - if (p.size > 0) { - let bits = 1; - while (1 << bits >>> 0 < p.size) { - bits++; - } - let mask = (1 << bits >>> 0) - 1; - index = perm >> offset & mask; - last = last && index >= p.size - 1; - if (index >= p.size) { - continue outer; - } - offset += bits; - } - if (recognizer) { - if (localBuffer.length > 1) { - localBuffer += " "; - } - let atn = recognizer.atn; - let s = atn.states[stateNumber]; - let ruleName = recognizer.ruleNames[s.ruleIndex]; - localBuffer += ruleName; - } else if (p.getReturnState(index) !== PredictionContext.EMPTY_FULL_STATE_KEY) { - if (!p.isEmpty) { - if (localBuffer.length > 1) { - localBuffer += " "; - } - localBuffer += p.getReturnState(index); - } - } - stateNumber = p.getReturnState(index); - p = p.getParent(index); - } - localBuffer += "]"; - result.push(localBuffer); - if (last) { - break; - } - } - return result; - } - }; - __decorate([ - Decorators_1.Override - ], PredictionContext.prototype, "hashCode", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], PredictionContext, "join", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], PredictionContext, "getCachedContext", null); - exports.PredictionContext = PredictionContext; - var EmptyPredictionContext = class extends PredictionContext { - constructor(fullContext) { - super(PredictionContext.calculateEmptyHashCode()); - this.fullContext = fullContext; - } - get isFullContext() { - return this.fullContext; - } - addEmptyContext() { - return this; - } - removeEmptyContext() { - throw new Error("Cannot remove the empty context from itself."); - } - getParent(index) { - throw new Error("index out of bounds"); - } - getReturnState(index) { - throw new Error("index out of bounds"); - } - findReturnState(returnState) { - return -1; - } - get size() { - return 0; - } - appendSingleContext(returnContext, contextCache) { - return contextCache.getChild(this, returnContext); - } - appendContext(suffix, contextCache) { - return suffix; - } - get isEmpty() { - return true; - } - get hasEmpty() { - return true; - } - equals(o) { - return this === o; - } - toStrings(recognizer, currentState, stop) { - return ["[]"]; - } - }; - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "addEmptyContext", null); - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "removeEmptyContext", null); - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "getParent", null); - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "getReturnState", null); - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "findReturnState", null); - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "size", null); - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "appendSingleContext", null); - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "appendContext", null); - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "isEmpty", null); - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "hasEmpty", null); - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], EmptyPredictionContext.prototype, "toStrings", null); - var ArrayPredictionContext = class ArrayPredictionContext2 extends PredictionContext { - constructor(parents, returnStates, hashCode) { - super(hashCode || PredictionContext.calculateHashCode(parents, returnStates)); - assert(parents.length === returnStates.length); - assert(returnStates.length > 1 || returnStates[0] !== PredictionContext.EMPTY_FULL_STATE_KEY, "Should be using PredictionContext.EMPTY instead."); - this.parents = parents; - this.returnStates = returnStates; - } - getParent(index) { - return this.parents[index]; - } - getReturnState(index) { - return this.returnStates[index]; - } - findReturnState(returnState) { - return Arrays_1.Arrays.binarySearch(this.returnStates, returnState); - } - get size() { - return this.returnStates.length; - } - get isEmpty() { - return false; - } - get hasEmpty() { - return this.returnStates[this.returnStates.length - 1] === PredictionContext.EMPTY_FULL_STATE_KEY; - } - addEmptyContext() { - if (this.hasEmpty) { - return this; - } - let parents2 = this.parents.slice(0); - let returnStates2 = this.returnStates.slice(0); - parents2.push(PredictionContext.EMPTY_FULL); - returnStates2.push(PredictionContext.EMPTY_FULL_STATE_KEY); - return new ArrayPredictionContext2(parents2, returnStates2); - } - removeEmptyContext() { - if (!this.hasEmpty) { - return this; - } - if (this.returnStates.length === 2) { - return new SingletonPredictionContext(this.parents[0], this.returnStates[0]); - } else { - let parents2 = this.parents.slice(0, this.parents.length - 1); - let returnStates2 = this.returnStates.slice(0, this.returnStates.length - 1); - return new ArrayPredictionContext2(parents2, returnStates2); - } - } - appendContext(suffix, contextCache) { - return ArrayPredictionContext2.appendContextImpl(this, suffix, new PredictionContext.IdentityHashMap()); - } - static appendContextImpl(context, suffix, visited) { - if (suffix.isEmpty) { - if (PredictionContext.isEmptyLocal(suffix)) { - if (context.hasEmpty) { - return PredictionContext.EMPTY_LOCAL; - } - throw new Error("what to do here?"); - } - return context; - } - if (suffix.size !== 1) { - throw new Error("Appending a tree suffix is not yet supported."); - } - let result = visited.get(context); - if (!result) { - if (context.isEmpty) { - result = suffix; - } else { - let parentCount = context.size; - if (context.hasEmpty) { - parentCount--; - } - let updatedParents = new Array(parentCount); - let updatedReturnStates = new Array(parentCount); - for (let i = 0; i < parentCount; i++) { - updatedReturnStates[i] = context.getReturnState(i); - } - for (let i = 0; i < parentCount; i++) { - updatedParents[i] = ArrayPredictionContext2.appendContextImpl(context.getParent(i), suffix, visited); - } - if (updatedParents.length === 1) { - result = new SingletonPredictionContext(updatedParents[0], updatedReturnStates[0]); - } else { - assert(updatedParents.length > 1); - result = new ArrayPredictionContext2(updatedParents, updatedReturnStates); - } - if (context.hasEmpty) { - result = PredictionContext.join(result, suffix); - } - } - visited.put(context, result); - } - return result; - } - equals(o) { - if (this === o) { - return true; - } else if (!(o instanceof ArrayPredictionContext2)) { - return false; - } - if (this.hashCode() !== o.hashCode()) { - return false; - } - let other = o; - return this.equalsImpl(other, new Array2DHashSet_1.Array2DHashSet()); - } - equalsImpl(other, visited) { - let selfWorkList = []; - let otherWorkList = []; - selfWorkList.push(this); - otherWorkList.push(other); - while (true) { - let currentSelf = selfWorkList.pop(); - let currentOther = otherWorkList.pop(); - if (!currentSelf || !currentOther) { - break; - } - let operands = new PredictionContextCache_1.PredictionContextCache.IdentityCommutativePredictionContextOperands(currentSelf, currentOther); - if (!visited.add(operands)) { - continue; - } - let selfSize = operands.x.size; - if (selfSize === 0) { - if (!operands.x.equals(operands.y)) { - return false; - } - continue; - } - let otherSize = operands.y.size; - if (selfSize !== otherSize) { - return false; - } - for (let i = 0; i < selfSize; i++) { - if (operands.x.getReturnState(i) !== operands.y.getReturnState(i)) { - return false; - } - let selfParent = operands.x.getParent(i); - let otherParent = operands.y.getParent(i); - if (selfParent.hashCode() !== otherParent.hashCode()) { - return false; - } - if (selfParent !== otherParent) { - selfWorkList.push(selfParent); - otherWorkList.push(otherParent); - } - } - } - return true; - } - }; - __decorate([ - Decorators_1.NotNull - ], ArrayPredictionContext.prototype, "parents", void 0); - __decorate([ - Decorators_1.NotNull - ], ArrayPredictionContext.prototype, "returnStates", void 0); - __decorate([ - Decorators_1.Override - ], ArrayPredictionContext.prototype, "getParent", null); - __decorate([ - Decorators_1.Override - ], ArrayPredictionContext.prototype, "getReturnState", null); - __decorate([ - Decorators_1.Override - ], ArrayPredictionContext.prototype, "findReturnState", null); - __decorate([ - Decorators_1.Override - ], ArrayPredictionContext.prototype, "size", null); - __decorate([ - Decorators_1.Override - ], ArrayPredictionContext.prototype, "isEmpty", null); - __decorate([ - Decorators_1.Override - ], ArrayPredictionContext.prototype, "hasEmpty", null); - __decorate([ - Decorators_1.Override - ], ArrayPredictionContext.prototype, "addEmptyContext", null); - __decorate([ - Decorators_1.Override - ], ArrayPredictionContext.prototype, "removeEmptyContext", null); - __decorate([ - Decorators_1.Override - ], ArrayPredictionContext.prototype, "appendContext", null); - __decorate([ - Decorators_1.Override - ], ArrayPredictionContext.prototype, "equals", null); - ArrayPredictionContext = __decorate([ - __param(0, Decorators_1.NotNull) - ], ArrayPredictionContext); - var SingletonPredictionContext = class SingletonPredictionContext2 extends PredictionContext { - constructor(parent, returnState) { - super(PredictionContext.calculateSingleHashCode(parent, returnState)); - this.parent = parent; - this.returnState = returnState; - } - getParent(index) { - return this.parent; - } - getReturnState(index) { - return this.returnState; - } - findReturnState(returnState) { - return this.returnState === returnState ? 0 : -1; - } - get size() { - return 1; - } - get isEmpty() { - return false; - } - get hasEmpty() { - return false; - } - appendContext(suffix, contextCache) { - return contextCache.getChild(this.parent.appendContext(suffix, contextCache), this.returnState); - } - addEmptyContext() { - let parents = [this.parent, PredictionContext.EMPTY_FULL]; - let returnStates = [this.returnState, PredictionContext.EMPTY_FULL_STATE_KEY]; - return new ArrayPredictionContext(parents, returnStates); - } - removeEmptyContext() { - return this; - } - equals(o) { - if (o === this) { - return true; - } else if (!(o instanceof SingletonPredictionContext2)) { - return false; - } - let other = o; - if (this.hashCode() !== other.hashCode()) { - return false; - } - return this.returnState === other.returnState && this.parent.equals(other.parent); - } - }; - __decorate([ - Decorators_1.NotNull - ], SingletonPredictionContext.prototype, "parent", void 0); - __decorate([ - Decorators_1.Override - ], SingletonPredictionContext.prototype, "getParent", null); - __decorate([ - Decorators_1.Override - ], SingletonPredictionContext.prototype, "getReturnState", null); - __decorate([ - Decorators_1.Override - ], SingletonPredictionContext.prototype, "findReturnState", null); - __decorate([ - Decorators_1.Override - ], SingletonPredictionContext.prototype, "size", null); - __decorate([ - Decorators_1.Override - ], SingletonPredictionContext.prototype, "isEmpty", null); - __decorate([ - Decorators_1.Override - ], SingletonPredictionContext.prototype, "hasEmpty", null); - __decorate([ - Decorators_1.Override - ], SingletonPredictionContext.prototype, "appendContext", null); - __decorate([ - Decorators_1.Override - ], SingletonPredictionContext.prototype, "addEmptyContext", null); - __decorate([ - Decorators_1.Override - ], SingletonPredictionContext.prototype, "removeEmptyContext", null); - __decorate([ - Decorators_1.Override - ], SingletonPredictionContext.prototype, "equals", null); - SingletonPredictionContext = __decorate([ - __param(0, Decorators_1.NotNull) - ], SingletonPredictionContext); - exports.SingletonPredictionContext = SingletonPredictionContext; - (function(PredictionContext2) { - PredictionContext2.EMPTY_LOCAL = new EmptyPredictionContext(false); - PredictionContext2.EMPTY_FULL = new EmptyPredictionContext(true); - PredictionContext2.EMPTY_LOCAL_STATE_KEY = -(1 << 31 >>> 0); - PredictionContext2.EMPTY_FULL_STATE_KEY = (1 << 31 >>> 0) - 1; - class IdentityHashMap extends Array2DHashMap_1.Array2DHashMap { - constructor() { - super(IdentityEqualityComparator.INSTANCE); - } - } - PredictionContext2.IdentityHashMap = IdentityHashMap; - class IdentityEqualityComparator { - IdentityEqualityComparator() { - } - hashCode(obj) { - return obj.hashCode(); - } - equals(a, b) { - return a === b; - } - } - IdentityEqualityComparator.INSTANCE = new IdentityEqualityComparator(); - __decorate([ - Decorators_1.Override - ], IdentityEqualityComparator.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], IdentityEqualityComparator.prototype, "equals", null); - PredictionContext2.IdentityEqualityComparator = IdentityEqualityComparator; - })(PredictionContext = exports.PredictionContext || (exports.PredictionContext = {})); -}); - -// node_modules/antlr4ts/atn/ATNConfig.js -var require_ATNConfig = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ATNConfig = void 0; - var Array2DHashMap_1 = require_Array2DHashMap(); - var DecisionState_1 = require_DecisionState(); - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var ObjectEqualityComparator_1 = require_ObjectEqualityComparator(); - var PredictionContext_1 = require_PredictionContext(); - var SemanticContext_1 = require_SemanticContext(); - var assert = require("assert"); - var SUPPRESS_PRECEDENCE_FILTER = 2147483648; - var ATNConfig = class ATNConfig2 { - constructor(state, altOrConfig, context) { - if (typeof altOrConfig === "number") { - assert((altOrConfig & 16777215) === altOrConfig); - this._state = state; - this.altAndOuterContextDepth = altOrConfig; - this._context = context; - } else { - this._state = state; - this.altAndOuterContextDepth = altOrConfig.altAndOuterContextDepth; - this._context = context; - } - } - static create(state, alt, context, semanticContext = SemanticContext_1.SemanticContext.NONE, lexerActionExecutor) { - if (semanticContext !== SemanticContext_1.SemanticContext.NONE) { - if (lexerActionExecutor != null) { - return new ActionSemanticContextATNConfig(lexerActionExecutor, semanticContext, state, alt, context, false); - } else { - return new SemanticContextATNConfig(semanticContext, state, alt, context); - } - } else if (lexerActionExecutor != null) { - return new ActionATNConfig(lexerActionExecutor, state, alt, context, false); - } else { - return new ATNConfig2(state, alt, context); - } - } - get state() { - return this._state; - } - get alt() { - return this.altAndOuterContextDepth & 16777215; - } - get context() { - return this._context; - } - set context(context) { - this._context = context; - } - get reachesIntoOuterContext() { - return this.outerContextDepth !== 0; - } - get outerContextDepth() { - return this.altAndOuterContextDepth >>> 24 & 127; - } - set outerContextDepth(outerContextDepth) { - assert(outerContextDepth >= 0); - outerContextDepth = Math.min(outerContextDepth, 127); - this.altAndOuterContextDepth = outerContextDepth << 24 | (this.altAndOuterContextDepth & ~2130706432) >>> 0; - } - get lexerActionExecutor() { - return void 0; - } - get semanticContext() { - return SemanticContext_1.SemanticContext.NONE; - } - get hasPassedThroughNonGreedyDecision() { - return false; - } - clone() { - return this.transform(this.state, false); - } - transform(state, checkNonGreedy, arg2) { - if (arg2 == null) { - return this.transformImpl(state, this._context, this.semanticContext, checkNonGreedy, this.lexerActionExecutor); - } else if (arg2 instanceof PredictionContext_1.PredictionContext) { - return this.transformImpl(state, arg2, this.semanticContext, checkNonGreedy, this.lexerActionExecutor); - } else if (arg2 instanceof SemanticContext_1.SemanticContext) { - return this.transformImpl(state, this._context, arg2, checkNonGreedy, this.lexerActionExecutor); - } else { - return this.transformImpl(state, this._context, this.semanticContext, checkNonGreedy, arg2); - } - } - transformImpl(state, context, semanticContext, checkNonGreedy, lexerActionExecutor) { - let passedThroughNonGreedy = checkNonGreedy && ATNConfig2.checkNonGreedyDecision(this, state); - if (semanticContext !== SemanticContext_1.SemanticContext.NONE) { - if (lexerActionExecutor != null || passedThroughNonGreedy) { - return new ActionSemanticContextATNConfig(lexerActionExecutor, semanticContext, state, this, context, passedThroughNonGreedy); - } else { - return new SemanticContextATNConfig(semanticContext, state, this, context); - } - } else if (lexerActionExecutor != null || passedThroughNonGreedy) { - return new ActionATNConfig(lexerActionExecutor, state, this, context, passedThroughNonGreedy); - } else { - return new ATNConfig2(state, this, context); - } - } - static checkNonGreedyDecision(source, target) { - return source.hasPassedThroughNonGreedyDecision || target instanceof DecisionState_1.DecisionState && target.nonGreedy; - } - appendContext(context, contextCache) { - if (typeof context === "number") { - let appendedContext = this.context.appendSingleContext(context, contextCache); - let result = this.transform(this.state, false, appendedContext); - return result; - } else { - let appendedContext = this.context.appendContext(context, contextCache); - let result = this.transform(this.state, false, appendedContext); - return result; - } - } - contains(subconfig) { - if (this.state.stateNumber !== subconfig.state.stateNumber || this.alt !== subconfig.alt || !this.semanticContext.equals(subconfig.semanticContext)) { - return false; - } - let leftWorkList = []; - let rightWorkList = []; - leftWorkList.push(this.context); - rightWorkList.push(subconfig.context); - while (true) { - let left = leftWorkList.pop(); - let right = rightWorkList.pop(); - if (!left || !right) { - break; - } - if (left === right) { - return true; - } - if (left.size < right.size) { - return false; - } - if (right.isEmpty) { - return left.hasEmpty; - } else { - for (let i = 0; i < right.size; i++) { - let index = left.findReturnState(right.getReturnState(i)); - if (index < 0) { - return false; - } - leftWorkList.push(left.getParent(index)); - rightWorkList.push(right.getParent(i)); - } - } - } - return false; - } - get isPrecedenceFilterSuppressed() { - return (this.altAndOuterContextDepth & SUPPRESS_PRECEDENCE_FILTER) !== 0; - } - set isPrecedenceFilterSuppressed(value) { - if (value) { - this.altAndOuterContextDepth |= SUPPRESS_PRECEDENCE_FILTER; - } else { - this.altAndOuterContextDepth &= ~SUPPRESS_PRECEDENCE_FILTER; - } - } - equals(o) { - if (this === o) { - return true; - } else if (!(o instanceof ATNConfig2)) { - return false; - } - return this.state.stateNumber === o.state.stateNumber && this.alt === o.alt && this.reachesIntoOuterContext === o.reachesIntoOuterContext && this.context.equals(o.context) && this.semanticContext.equals(o.semanticContext) && this.isPrecedenceFilterSuppressed === o.isPrecedenceFilterSuppressed && this.hasPassedThroughNonGreedyDecision === o.hasPassedThroughNonGreedyDecision && ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE.equals(this.lexerActionExecutor, o.lexerActionExecutor); - } - hashCode() { - let hashCode = MurmurHash_1.MurmurHash.initialize(7); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, this.state.stateNumber); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, this.alt); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, this.reachesIntoOuterContext ? 1 : 0); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, this.context); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, this.semanticContext); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, this.hasPassedThroughNonGreedyDecision ? 1 : 0); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, this.lexerActionExecutor); - hashCode = MurmurHash_1.MurmurHash.finish(hashCode, 7); - return hashCode; - } - toDotString() { - let builder = ""; - builder += "digraph G {\n"; - builder += "rankdir=LR;\n"; - let visited = new Array2DHashMap_1.Array2DHashMap(PredictionContext_1.PredictionContext.IdentityEqualityComparator.INSTANCE); - let workList = []; - function getOrAddContext(context) { - let newNumber = visited.size; - let result = visited.putIfAbsent(context, newNumber); - if (result != null) { - return result; - } - workList.push(context); - return newNumber; - } - workList.push(this.context); - visited.put(this.context, 0); - while (true) { - let current = workList.pop(); - if (!current) { - break; - } - for (let i = 0; i < current.size; i++) { - builder += " s" + getOrAddContext(current); - builder += "->"; - builder += "s" + getOrAddContext(current.getParent(i)); - builder += '[label="' + current.getReturnState(i) + '"];\n'; - } - } - builder += "}\n"; - return builder.toString(); - } - toString(recog, showAlt, showContext) { - if (showContext == null) { - showContext = showAlt != null; - } - if (showAlt == null) { - showAlt = true; - } - let buf = ""; - let contexts; - if (showContext) { - contexts = this.context.toStrings(recog, this.state.stateNumber); - } else { - contexts = ["?"]; - } - let first = true; - for (let contextDesc of contexts) { - if (first) { - first = false; - } else { - buf += ", "; - } - buf += "("; - buf += this.state; - if (showAlt) { - buf += ","; - buf += this.alt; - } - if (this.context) { - buf += ","; - buf += contextDesc; - } - if (this.semanticContext !== SemanticContext_1.SemanticContext.NONE) { - buf += ","; - buf += this.semanticContext; - } - if (this.reachesIntoOuterContext) { - buf += ",up=" + this.outerContextDepth; - } - buf += ")"; - } - return buf.toString(); - } - }; - __decorate([ - Decorators_1.NotNull - ], ATNConfig.prototype, "_state", void 0); - __decorate([ - Decorators_1.NotNull - ], ATNConfig.prototype, "_context", void 0); - __decorate([ - Decorators_1.NotNull - ], ATNConfig.prototype, "state", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], ATNConfig.prototype, "context", null); - __decorate([ - Decorators_1.NotNull - ], ATNConfig.prototype, "semanticContext", null); - __decorate([ - Decorators_1.Override - ], ATNConfig.prototype, "clone", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ATNConfig.prototype, "transformImpl", null); - __decorate([ - Decorators_1.Override - ], ATNConfig.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], ATNConfig.prototype, "hashCode", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(3, Decorators_1.NotNull) - ], ATNConfig, "create", null); - ATNConfig = __decorate([ - __param(0, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ATNConfig); - exports.ATNConfig = ATNConfig; - var SemanticContextATNConfig = class SemanticContextATNConfig extends ATNConfig { - constructor(semanticContext, state, altOrConfig, context) { - if (typeof altOrConfig === "number") { - super(state, altOrConfig, context); - } else { - super(state, altOrConfig, context); - } - this._semanticContext = semanticContext; - } - get semanticContext() { - return this._semanticContext; - } - }; - __decorate([ - Decorators_1.NotNull - ], SemanticContextATNConfig.prototype, "_semanticContext", void 0); - __decorate([ - Decorators_1.Override - ], SemanticContextATNConfig.prototype, "semanticContext", null); - SemanticContextATNConfig = __decorate([ - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], SemanticContextATNConfig); - var ActionATNConfig = class ActionATNConfig extends ATNConfig { - constructor(lexerActionExecutor, state, altOrConfig, context, passedThroughNonGreedyDecision) { - if (typeof altOrConfig === "number") { - super(state, altOrConfig, context); - } else { - super(state, altOrConfig, context); - if (altOrConfig.semanticContext !== SemanticContext_1.SemanticContext.NONE) { - throw new Error("Not supported"); - } - } - this._lexerActionExecutor = lexerActionExecutor; - this.passedThroughNonGreedyDecision = passedThroughNonGreedyDecision; - } - get lexerActionExecutor() { - return this._lexerActionExecutor; - } - get hasPassedThroughNonGreedyDecision() { - return this.passedThroughNonGreedyDecision; - } - }; - __decorate([ - Decorators_1.Override - ], ActionATNConfig.prototype, "lexerActionExecutor", null); - __decorate([ - Decorators_1.Override - ], ActionATNConfig.prototype, "hasPassedThroughNonGreedyDecision", null); - ActionATNConfig = __decorate([ - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ActionATNConfig); - var ActionSemanticContextATNConfig = class ActionSemanticContextATNConfig extends SemanticContextATNConfig { - constructor(lexerActionExecutor, semanticContext, state, altOrConfig, context, passedThroughNonGreedyDecision) { - if (typeof altOrConfig === "number") { - super(semanticContext, state, altOrConfig, context); - } else { - super(semanticContext, state, altOrConfig, context); - } - this._lexerActionExecutor = lexerActionExecutor; - this.passedThroughNonGreedyDecision = passedThroughNonGreedyDecision; - } - get lexerActionExecutor() { - return this._lexerActionExecutor; - } - get hasPassedThroughNonGreedyDecision() { - return this.passedThroughNonGreedyDecision; - } - }; - __decorate([ - Decorators_1.Override - ], ActionSemanticContextATNConfig.prototype, "lexerActionExecutor", null); - __decorate([ - Decorators_1.Override - ], ActionSemanticContextATNConfig.prototype, "hasPassedThroughNonGreedyDecision", null); - ActionSemanticContextATNConfig = __decorate([ - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ActionSemanticContextATNConfig); -}); - -// node_modules/antlr4ts/misc/BitSet.js -var require_BitSet = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.BitSet = void 0; - var util = require("util"); - var MurmurHash_1 = require_MurmurHash(); - var EMPTY_DATA = new Uint16Array(0); - function getIndex(bitNumber) { - return bitNumber >>> 4; - } - function unIndex(n) { - return n * 16; - } - function findLSBSet(word) { - let bit = 1; - for (let i = 0; i < 16; i++) { - if ((word & bit) !== 0) { - return i; - } - bit = bit << 1 >>> 0; - } - throw new RangeError("No specified bit found"); - } - function findMSBSet(word) { - let bit = 1 << 15 >>> 0; - for (let i = 15; i >= 0; i--) { - if ((word & bit) !== 0) { - return i; - } - bit = bit >>> 1; - } - throw new RangeError("No specified bit found"); - } - function bitsFor(fromBit, toBit) { - fromBit &= 15; - toBit &= 15; - if (fromBit === toBit) { - return 1 << fromBit >>> 0; - } - return 65535 >>> 15 - toBit ^ 65535 >>> 16 - fromBit; - } - var POP_CNT = new Uint8Array(65536); - for (let i = 0; i < 16; i++) { - const stride = 1 << i >>> 0; - let index = 0; - while (index < POP_CNT.length) { - index += stride; - for (let j = 0; j < stride; j++) { - POP_CNT[index]++; - index++; - } - } - } - var BitSet = class { - constructor(arg) { - if (!arg) { - this.data = EMPTY_DATA; - } else if (typeof arg === "number") { - if (arg < 0) { - throw new RangeError("nbits cannot be negative"); - } else { - this.data = new Uint16Array(getIndex(arg - 1) + 1); - } - } else { - if (arg instanceof BitSet) { - this.data = arg.data.slice(0); - } else { - let max = -1; - for (let v of arg) { - if (max < v) { - max = v; - } - } - this.data = new Uint16Array(getIndex(max - 1) + 1); - for (let v of arg) { - this.set(v); - } - } - } - } - and(set) { - const data = this.data; - const other = set.data; - const words = Math.min(data.length, other.length); - let lastWord = -1; - for (let i = 0; i < words; i++) { - let value = data[i] &= other[i]; - if (value !== 0) { - lastWord = i; - } - } - if (lastWord === -1) { - this.data = EMPTY_DATA; - } - if (lastWord < data.length - 1) { - this.data = data.slice(0, lastWord + 1); - } - } - andNot(set) { - const data = this.data; - const other = set.data; - const words = Math.min(data.length, other.length); - let lastWord = -1; - for (let i = 0; i < words; i++) { - let value = data[i] &= other[i] ^ 65535; - if (value !== 0) { - lastWord = i; - } - } - if (lastWord === -1) { - this.data = EMPTY_DATA; - } - if (lastWord < data.length - 1) { - this.data = data.slice(0, lastWord + 1); - } - } - cardinality() { - if (this.isEmpty) { - return 0; - } - const data = this.data; - const length = data.length; - let result = 0; - for (let i = 0; i < length; i++) { - result += POP_CNT[data[i]]; - } - return result; - } - clear(fromIndex, toIndex) { - if (fromIndex == null) { - this.data.fill(0); - } else if (toIndex == null) { - this.set(fromIndex, false); - } else { - this.set(fromIndex, toIndex, false); - } - } - flip(fromIndex, toIndex) { - if (toIndex == null) { - toIndex = fromIndex; - } - if (fromIndex < 0 || toIndex < fromIndex) { - throw new RangeError(); - } - let word = getIndex(fromIndex); - const lastWord = getIndex(toIndex); - if (word === lastWord) { - this.data[word] ^= bitsFor(fromIndex, toIndex); - } else { - this.data[word++] ^= bitsFor(fromIndex, 15); - while (word < lastWord) { - this.data[word++] ^= 65535; - } - this.data[word++] ^= bitsFor(0, toIndex); - } - } - get(fromIndex, toIndex) { - if (toIndex === void 0) { - return !!(this.data[getIndex(fromIndex)] & bitsFor(fromIndex, fromIndex)); - } else { - let result = new BitSet(toIndex + 1); - for (let i = fromIndex; i <= toIndex; i++) { - result.set(i, this.get(i)); - } - return result; - } - } - intersects(set) { - let smallerLength = Math.min(this.length(), set.length()); - if (smallerLength === 0) { - return false; - } - let bound = getIndex(smallerLength - 1); - for (let i = 0; i <= bound; i++) { - if ((this.data[i] & set.data[i]) !== 0) { - return true; - } - } - return false; - } - get isEmpty() { - return this.length() === 0; - } - length() { - if (!this.data.length) { - return 0; - } - return this.previousSetBit(unIndex(this.data.length) - 1) + 1; - } - nextClearBit(fromIndex) { - if (fromIndex < 0) { - throw new RangeError("fromIndex cannot be negative"); - } - const data = this.data; - const length = data.length; - let word = getIndex(fromIndex); - if (word > length) { - return -1; - } - let ignore = 65535 ^ bitsFor(fromIndex, 15); - if ((data[word] | ignore) === 65535) { - word++; - ignore = 0; - for (; word < length; word++) { - if (data[word] !== 65535) { - break; - } - } - if (word === length) { - return -1; - } - } - return unIndex(word) + findLSBSet((data[word] | ignore) ^ 65535); - } - nextSetBit(fromIndex) { - if (fromIndex < 0) { - throw new RangeError("fromIndex cannot be negative"); - } - const data = this.data; - const length = data.length; - let word = getIndex(fromIndex); - if (word > length) { - return -1; - } - let mask = bitsFor(fromIndex, 15); - if ((data[word] & mask) === 0) { - word++; - mask = 65535; - for (; word < length; word++) { - if (data[word] !== 0) { - break; - } - } - if (word >= length) { - return -1; - } - } - return unIndex(word) + findLSBSet(data[word] & mask); - } - or(set) { - const data = this.data; - const other = set.data; - const minWords = Math.min(data.length, other.length); - const words = Math.max(data.length, other.length); - const dest = data.length === words ? data : new Uint16Array(words); - let lastWord = -1; - for (let i = 0; i < minWords; i++) { - let value = dest[i] = data[i] | other[i]; - if (value !== 0) { - lastWord = i; - } - } - const longer = data.length > other.length ? data : other; - for (let i = minWords; i < words; i++) { - let value = dest[i] = longer[i]; - if (value !== 0) { - lastWord = i; - } - } - if (lastWord === -1) { - this.data = EMPTY_DATA; - } else if (dest.length === lastWord + 1) { - this.data = dest; - } else { - this.data = dest.slice(0, lastWord); - } - } - previousClearBit(fromIndex) { - if (fromIndex < 0) { - throw new RangeError("fromIndex cannot be negative"); - } - const data = this.data; - const length = data.length; - let word = getIndex(fromIndex); - if (word >= length) { - word = length - 1; - } - let ignore = 65535 ^ bitsFor(0, fromIndex); - if ((data[word] | ignore) === 65535) { - ignore = 0; - word--; - for (; word >= 0; word--) { - if (data[word] !== 65535) { - break; - } - } - if (word < 0) { - return -1; - } - } - return unIndex(word) + findMSBSet((data[word] | ignore) ^ 65535); - } - previousSetBit(fromIndex) { - if (fromIndex < 0) { - throw new RangeError("fromIndex cannot be negative"); - } - const data = this.data; - const length = data.length; - let word = getIndex(fromIndex); - if (word >= length) { - word = length - 1; - } - let mask = bitsFor(0, fromIndex); - if ((data[word] & mask) === 0) { - word--; - mask = 65535; - for (; word >= 0; word--) { - if (data[word] !== 0) { - break; - } - } - if (word < 0) { - return -1; - } - } - return unIndex(word) + findMSBSet(data[word] & mask); - } - set(fromIndex, toIndex, value) { - if (toIndex === void 0) { - toIndex = fromIndex; - value = true; - } else if (typeof toIndex === "boolean") { - value = toIndex; - toIndex = fromIndex; - } - if (value === void 0) { - value = true; - } - if (fromIndex < 0 || fromIndex > toIndex) { - throw new RangeError(); - } - let word = getIndex(fromIndex); - let lastWord = getIndex(toIndex); - if (value && lastWord >= this.data.length) { - let temp = new Uint16Array(lastWord + 1); - this.data.forEach((value2, index) => temp[index] = value2); - this.data = temp; - } else if (!value) { - if (word >= this.data.length) { - return; - } - if (lastWord >= this.data.length) { - lastWord = this.data.length - 1; - toIndex = this.data.length * 16 - 1; - } - } - if (word === lastWord) { - this._setBits(word, value, bitsFor(fromIndex, toIndex)); - } else { - this._setBits(word++, value, bitsFor(fromIndex, 15)); - while (word < lastWord) { - this.data[word++] = value ? 65535 : 0; - } - this._setBits(word, value, bitsFor(0, toIndex)); - } - } - _setBits(word, value, mask) { - if (value) { - this.data[word] |= mask; - } else { - this.data[word] &= 65535 ^ mask; - } - } - get size() { - return this.data.byteLength * 8; - } - hashCode() { - return MurmurHash_1.MurmurHash.hashCode(this.data, 22); - } - equals(obj) { - if (obj === this) { - return true; - } else if (!(obj instanceof BitSet)) { - return false; - } - const len = this.length(); - if (len !== obj.length()) { - return false; - } - if (len === 0) { - return true; - } - let bound = getIndex(len - 1); - for (let i = 0; i <= bound; i++) { - if (this.data[i] !== obj.data[i]) { - return false; - } - } - return true; - } - toString() { - let result = "{"; - let first = true; - for (let i = this.nextSetBit(0); i >= 0; i = this.nextSetBit(i + 1)) { - if (first) { - first = false; - } else { - result += ", "; - } - result += i; - } - result += "}"; - return result; - } - xor(set) { - const data = this.data; - const other = set.data; - const minWords = Math.min(data.length, other.length); - const words = Math.max(data.length, other.length); - const dest = data.length === words ? data : new Uint16Array(words); - let lastWord = -1; - for (let i = 0; i < minWords; i++) { - let value = dest[i] = data[i] ^ other[i]; - if (value !== 0) { - lastWord = i; - } - } - const longer = data.length > other.length ? data : other; - for (let i = minWords; i < words; i++) { - let value = dest[i] = longer[i]; - if (value !== 0) { - lastWord = i; - } - } - if (lastWord === -1) { - this.data = EMPTY_DATA; - } else if (dest.length === lastWord + 1) { - this.data = dest; - } else { - this.data = dest.slice(0, lastWord + 1); - } - } - clone() { - return new BitSet(this); - } - [Symbol.iterator]() { - return new BitSetIterator(this.data); - } - [util.inspect.custom]() { - return "BitSet " + this.toString(); - } - }; - exports.BitSet = BitSet; - var BitSetIterator = class { - constructor(data) { - this.data = data; - this.index = 0; - this.mask = 65535; - } - next() { - while (this.index < this.data.length) { - const bits = this.data[this.index] & this.mask; - if (bits !== 0) { - const bitNumber = unIndex(this.index) + findLSBSet(bits); - this.mask = bitsFor(bitNumber + 1, 15); - return {done: false, value: bitNumber}; - } - this.index++; - this.mask = 65535; - } - return {done: true, value: -1}; - } - [Symbol.iterator]() { - return this; - } - }; -}); - -// node_modules/antlr4ts/atn/ATNConfigSet.js -var require_ATNConfigSet = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ATNConfigSet = void 0; - var Array2DHashMap_1 = require_Array2DHashMap(); - var Array2DHashSet_1 = require_Array2DHashSet(); - var ArrayEqualityComparator_1 = require_ArrayEqualityComparator(); - var ATN_1 = require_ATN(); - var ATNConfig_1 = require_ATNConfig(); - var BitSet_1 = require_BitSet(); - var Decorators_1 = require_Decorators(); - var ObjectEqualityComparator_1 = require_ObjectEqualityComparator(); - var PredictionContext_1 = require_PredictionContext(); - var PredictionContextCache_1 = require_PredictionContextCache(); - var SemanticContext_1 = require_SemanticContext(); - var assert = require("assert"); - var Utils3 = require_Utils(); - var KeyTypeEqualityComparer = class { - hashCode(key) { - return key.state ^ key.alt; - } - equals(a, b) { - return a.state === b.state && a.alt === b.alt; - } - }; - KeyTypeEqualityComparer.INSTANCE = new KeyTypeEqualityComparer(); - function NewKeyedConfigMap(map) { - if (map) { - return new Array2DHashMap_1.Array2DHashMap(map); - } else { - return new Array2DHashMap_1.Array2DHashMap(KeyTypeEqualityComparer.INSTANCE); - } - } - var ATNConfigSet = class { - constructor(set, readonly) { - this._uniqueAlt = 0; - this._hasSemanticContext = false; - this._dipsIntoOuterContext = false; - this.outermostConfigSet = false; - this.cachedHashCode = -1; - if (!set) { - this.mergedConfigs = NewKeyedConfigMap(); - this.unmerged = []; - this.configs = []; - this._uniqueAlt = ATN_1.ATN.INVALID_ALT_NUMBER; - } else { - if (readonly) { - this.mergedConfigs = void 0; - this.unmerged = void 0; - } else if (!set.isReadOnly) { - this.mergedConfigs = NewKeyedConfigMap(set.mergedConfigs); - this.unmerged = set.unmerged.slice(0); - } else { - this.mergedConfigs = NewKeyedConfigMap(); - this.unmerged = []; - } - this.configs = set.configs.slice(0); - this._dipsIntoOuterContext = set._dipsIntoOuterContext; - this._hasSemanticContext = set._hasSemanticContext; - this.outermostConfigSet = set.outermostConfigSet; - if (readonly || !set.isReadOnly) { - this._uniqueAlt = set._uniqueAlt; - this._conflictInfo = set._conflictInfo; - } - } - } - getRepresentedAlternatives() { - if (this._conflictInfo != null) { - return this._conflictInfo.conflictedAlts.clone(); - } - let alts = new BitSet_1.BitSet(); - for (let config of this) { - alts.set(config.alt); - } - return alts; - } - get isReadOnly() { - return this.mergedConfigs == null; - } - get isOutermostConfigSet() { - return this.outermostConfigSet; - } - set isOutermostConfigSet(outermostConfigSet) { - if (this.outermostConfigSet && !outermostConfigSet) { - throw new Error("IllegalStateException"); - } - assert(!outermostConfigSet || !this._dipsIntoOuterContext); - this.outermostConfigSet = outermostConfigSet; - } - getStates() { - let states = new Array2DHashSet_1.Array2DHashSet(ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE); - for (let c of this.configs) { - states.add(c.state); - } - return states; - } - optimizeConfigs(interpreter) { - if (this.configs.length === 0) { - return; - } - for (let config of this.configs) { - config.context = interpreter.atn.getCachedContext(config.context); - } - } - clone(readonly) { - let copy = new ATNConfigSet(this, readonly); - if (!readonly && this.isReadOnly) { - copy.addAll(this.configs); - } - return copy; - } - get size() { - return this.configs.length; - } - get isEmpty() { - return this.configs.length === 0; - } - contains(o) { - if (!(o instanceof ATNConfig_1.ATNConfig)) { - return false; - } - if (this.mergedConfigs && this.unmerged) { - let config = o; - let configKey = this.getKey(config); - let mergedConfig = this.mergedConfigs.get(configKey); - if (mergedConfig != null && this.canMerge(config, configKey, mergedConfig)) { - return mergedConfig.contains(config); - } - for (let c of this.unmerged) { - if (c.contains(o)) { - return true; - } - } - } else { - for (let c of this.configs) { - if (c.contains(o)) { - return true; - } - } - } - return false; - } - *[Symbol.iterator]() { - yield* this.configs; - } - toArray() { - return this.configs; - } - add(e, contextCache) { - this.ensureWritable(); - if (!this.mergedConfigs || !this.unmerged) { - throw new Error("Covered by ensureWritable but duplicated here for strict null check limitation"); - } - assert(!this.outermostConfigSet || !e.reachesIntoOuterContext); - if (contextCache == null) { - contextCache = PredictionContextCache_1.PredictionContextCache.UNCACHED; - } - let addKey; - let key = this.getKey(e); - let mergedConfig = this.mergedConfigs.get(key); - addKey = mergedConfig == null; - if (mergedConfig != null && this.canMerge(e, key, mergedConfig)) { - mergedConfig.outerContextDepth = Math.max(mergedConfig.outerContextDepth, e.outerContextDepth); - if (e.isPrecedenceFilterSuppressed) { - mergedConfig.isPrecedenceFilterSuppressed = true; - } - let joined = PredictionContext_1.PredictionContext.join(mergedConfig.context, e.context, contextCache); - this.updatePropertiesForMergedConfig(e); - if (mergedConfig.context === joined) { - return false; - } - mergedConfig.context = joined; - return true; - } - for (let i = 0; i < this.unmerged.length; i++) { - let unmergedConfig = this.unmerged[i]; - if (this.canMerge(e, key, unmergedConfig)) { - unmergedConfig.outerContextDepth = Math.max(unmergedConfig.outerContextDepth, e.outerContextDepth); - if (e.isPrecedenceFilterSuppressed) { - unmergedConfig.isPrecedenceFilterSuppressed = true; - } - let joined = PredictionContext_1.PredictionContext.join(unmergedConfig.context, e.context, contextCache); - this.updatePropertiesForMergedConfig(e); - if (unmergedConfig.context === joined) { - return false; - } - unmergedConfig.context = joined; - if (addKey) { - this.mergedConfigs.put(key, unmergedConfig); - this.unmerged.splice(i, 1); - } - return true; - } - } - this.configs.push(e); - if (addKey) { - this.mergedConfigs.put(key, e); - } else { - this.unmerged.push(e); - } - this.updatePropertiesForAddedConfig(e); - return true; - } - updatePropertiesForMergedConfig(config) { - this._dipsIntoOuterContext = this._dipsIntoOuterContext || config.reachesIntoOuterContext; - assert(!this.outermostConfigSet || !this._dipsIntoOuterContext); - } - updatePropertiesForAddedConfig(config) { - if (this.configs.length === 1) { - this._uniqueAlt = config.alt; - } else if (this._uniqueAlt !== config.alt) { - this._uniqueAlt = ATN_1.ATN.INVALID_ALT_NUMBER; - } - this._hasSemanticContext = this._hasSemanticContext || !SemanticContext_1.SemanticContext.NONE.equals(config.semanticContext); - this._dipsIntoOuterContext = this._dipsIntoOuterContext || config.reachesIntoOuterContext; - assert(!this.outermostConfigSet || !this._dipsIntoOuterContext); - } - canMerge(left, leftKey, right) { - if (left.state.stateNumber !== right.state.stateNumber) { - return false; - } - if (leftKey.alt !== right.alt) { - return false; - } - return left.semanticContext.equals(right.semanticContext); - } - getKey(e) { - return {state: e.state.stateNumber, alt: e.alt}; - } - containsAll(c) { - for (let o of c) { - if (!(o instanceof ATNConfig_1.ATNConfig)) { - return false; - } - if (!this.contains(o)) { - return false; - } - } - return true; - } - addAll(c, contextCache) { - this.ensureWritable(); - let changed = false; - for (let group of c) { - if (this.add(group, contextCache)) { - changed = true; - } - } - return changed; - } - clear() { - this.ensureWritable(); - if (!this.mergedConfigs || !this.unmerged) { - throw new Error("Covered by ensureWritable but duplicated here for strict null check limitation"); - } - this.mergedConfigs.clear(); - this.unmerged.length = 0; - this.configs.length = 0; - this._dipsIntoOuterContext = false; - this._hasSemanticContext = false; - this._uniqueAlt = ATN_1.ATN.INVALID_ALT_NUMBER; - this._conflictInfo = void 0; - } - equals(obj) { - if (this === obj) { - return true; - } - if (!(obj instanceof ATNConfigSet)) { - return false; - } - return this.outermostConfigSet === obj.outermostConfigSet && Utils3.equals(this._conflictInfo, obj._conflictInfo) && ArrayEqualityComparator_1.ArrayEqualityComparator.INSTANCE.equals(this.configs, obj.configs); - } - hashCode() { - if (this.isReadOnly && this.cachedHashCode !== -1) { - return this.cachedHashCode; - } - let hashCode = 1; - hashCode = 5 * hashCode ^ (this.outermostConfigSet ? 1 : 0); - hashCode = 5 * hashCode ^ ArrayEqualityComparator_1.ArrayEqualityComparator.INSTANCE.hashCode(this.configs); - if (this.isReadOnly) { - this.cachedHashCode = hashCode; - } - return hashCode; - } - toString(showContext) { - if (showContext == null) { - showContext = false; - } - let buf = ""; - let sortedConfigs = this.configs.slice(0); - sortedConfigs.sort((o1, o2) => { - if (o1.alt !== o2.alt) { - return o1.alt - o2.alt; - } else if (o1.state.stateNumber !== o2.state.stateNumber) { - return o1.state.stateNumber - o2.state.stateNumber; - } else { - return o1.semanticContext.toString().localeCompare(o2.semanticContext.toString()); - } - }); - buf += "["; - for (let i = 0; i < sortedConfigs.length; i++) { - if (i > 0) { - buf += ", "; - } - buf += sortedConfigs[i].toString(void 0, true, showContext); - } - buf += "]"; - if (this._hasSemanticContext) { - buf += ",hasSemanticContext=" + this._hasSemanticContext; - } - if (this._uniqueAlt !== ATN_1.ATN.INVALID_ALT_NUMBER) { - buf += ",uniqueAlt=" + this._uniqueAlt; - } - if (this._conflictInfo != null) { - buf += ",conflictingAlts=" + this._conflictInfo.conflictedAlts; - if (!this._conflictInfo.isExact) { - buf += "*"; - } - } - if (this._dipsIntoOuterContext) { - buf += ",dipsIntoOuterContext"; - } - return buf.toString(); - } - get uniqueAlt() { - return this._uniqueAlt; - } - get hasSemanticContext() { - return this._hasSemanticContext; - } - set hasSemanticContext(value) { - this.ensureWritable(); - this._hasSemanticContext = value; - } - get conflictInfo() { - return this._conflictInfo; - } - set conflictInfo(conflictInfo) { - this.ensureWritable(); - this._conflictInfo = conflictInfo; - } - get conflictingAlts() { - if (this._conflictInfo == null) { - return void 0; - } - return this._conflictInfo.conflictedAlts; - } - get isExactConflict() { - if (this._conflictInfo == null) { - return false; - } - return this._conflictInfo.isExact; - } - get dipsIntoOuterContext() { - return this._dipsIntoOuterContext; - } - get(index) { - return this.configs[index]; - } - ensureWritable() { - if (this.isReadOnly) { - throw new Error("This ATNConfigSet is read only."); - } - } - }; - __decorate([ - Decorators_1.NotNull - ], ATNConfigSet.prototype, "getRepresentedAlternatives", null); - __decorate([ - Decorators_1.Override - ], ATNConfigSet.prototype, "size", null); - __decorate([ - Decorators_1.Override - ], ATNConfigSet.prototype, "isEmpty", null); - __decorate([ - Decorators_1.Override - ], ATNConfigSet.prototype, "contains", null); - __decorate([ - Decorators_1.Override - ], ATNConfigSet.prototype, Symbol.iterator, null); - __decorate([ - Decorators_1.Override - ], ATNConfigSet.prototype, "toArray", null); - __decorate([ - Decorators_1.Override - ], ATNConfigSet.prototype, "containsAll", null); - __decorate([ - Decorators_1.Override - ], ATNConfigSet.prototype, "clear", null); - __decorate([ - Decorators_1.Override - ], ATNConfigSet.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], ATNConfigSet.prototype, "hashCode", null); - exports.ATNConfigSet = ATNConfigSet; -}); - -// node_modules/antlr4ts/dfa/DFAState.js -var require_DFAState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.DFAState = void 0; - var ATN_1 = require_ATN(); - var BitSet_1 = require_BitSet(); - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var PredictionContext_1 = require_PredictionContext(); - var assert = require("assert"); - var DFAState = class { - constructor(configs) { - this.stateNumber = -1; - this.configs = configs; - this.edges = new Map(); - this.contextEdges = new Map(); - } - get isContextSensitive() { - return !!this.contextSymbols; - } - isContextSymbol(symbol) { - if (!this.isContextSensitive) { - return false; - } - return this.contextSymbols.get(symbol); - } - setContextSymbol(symbol) { - assert(this.isContextSensitive); - this.contextSymbols.set(symbol); - } - setContextSensitive(atn) { - assert(!this.configs.isOutermostConfigSet); - if (this.isContextSensitive) { - return; - } - if (!this.contextSymbols) { - this.contextSymbols = new BitSet_1.BitSet(); - } - } - get acceptStateInfo() { - return this._acceptStateInfo; - } - set acceptStateInfo(acceptStateInfo) { - this._acceptStateInfo = acceptStateInfo; - } - get isAcceptState() { - return !!this._acceptStateInfo; - } - get prediction() { - if (!this._acceptStateInfo) { - return ATN_1.ATN.INVALID_ALT_NUMBER; - } - return this._acceptStateInfo.prediction; - } - get lexerActionExecutor() { - if (!this._acceptStateInfo) { - return void 0; - } - return this._acceptStateInfo.lexerActionExecutor; - } - getTarget(symbol) { - return this.edges.get(symbol); - } - setTarget(symbol, target) { - this.edges.set(symbol, target); - } - getEdgeMap() { - return this.edges; - } - getContextTarget(invokingState) { - if (invokingState === PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY) { - invokingState = -1; - } - return this.contextEdges.get(invokingState); - } - setContextTarget(invokingState, target) { - if (!this.isContextSensitive) { - throw new Error("The state is not context sensitive."); - } - if (invokingState === PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY) { - invokingState = -1; - } - this.contextEdges.set(invokingState, target); - } - getContextEdgeMap() { - let map = new Map(this.contextEdges); - let existing = map.get(-1); - if (existing !== void 0) { - if (map.size === 1) { - let result = new Map(); - result.set(PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY, existing); - return result; - } else { - map.delete(-1); - map.set(PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY, existing); - } - } - return map; - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(7); - hash = MurmurHash_1.MurmurHash.update(hash, this.configs.hashCode()); - hash = MurmurHash_1.MurmurHash.finish(hash, 1); - return hash; - } - equals(o) { - if (this === o) { - return true; - } - if (!(o instanceof DFAState)) { - return false; - } - let other = o; - let sameSet = this.configs.equals(other.configs); - return sameSet; - } - toString() { - let buf = ""; - buf += this.stateNumber + ":" + this.configs; - if (this.isAcceptState) { - buf += "=>"; - if (this.predicates) { - buf += this.predicates; - } else { - buf += this.prediction; - } - } - return buf.toString(); - } - }; - __decorate([ - Decorators_1.NotNull - ], DFAState.prototype, "configs", void 0); - __decorate([ - Decorators_1.NotNull - ], DFAState.prototype, "edges", void 0); - __decorate([ - Decorators_1.NotNull - ], DFAState.prototype, "contextEdges", void 0); - __decorate([ - Decorators_1.Override - ], DFAState.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], DFAState.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], DFAState.prototype, "toString", null); - exports.DFAState = DFAState; - (function(DFAState2) { - let PredPrediction = class PredPrediction { - constructor(pred, alt) { - this.alt = alt; - this.pred = pred; - } - toString() { - return "(" + this.pred + ", " + this.alt + ")"; - } - }; - __decorate([ - Decorators_1.NotNull - ], PredPrediction.prototype, "pred", void 0); - __decorate([ - Decorators_1.Override - ], PredPrediction.prototype, "toString", null); - PredPrediction = __decorate([ - __param(0, Decorators_1.NotNull) - ], PredPrediction); - DFAState2.PredPrediction = PredPrediction; - })(DFAState = exports.DFAState || (exports.DFAState = {})); -}); - -// node_modules/antlr4ts/atn/ATNSimulator.js -var require_ATNSimulator = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ATNSimulator = void 0; - var ATNConfigSet_1 = require_ATNConfigSet(); - var DFAState_1 = require_DFAState(); - var Decorators_1 = require_Decorators(); - var PredictionContext_1 = require_PredictionContext(); - var ATNSimulator = class ATNSimulator2 { - constructor(atn) { - this.atn = atn; - } - static get ERROR() { - if (!ATNSimulator2._ERROR) { - ATNSimulator2._ERROR = new DFAState_1.DFAState(new ATNConfigSet_1.ATNConfigSet()); - ATNSimulator2._ERROR.stateNumber = PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY; - } - return ATNSimulator2._ERROR; - } - clearDFA() { - this.atn.clearDFA(); - } - }; - __decorate([ - Decorators_1.NotNull - ], ATNSimulator.prototype, "atn", void 0); - __decorate([ - Decorators_1.NotNull - ], ATNSimulator, "ERROR", null); - ATNSimulator = __decorate([ - __param(0, Decorators_1.NotNull) - ], ATNSimulator); - exports.ATNSimulator = ATNSimulator; - (function(ATNSimulator2) { - const RULE_VARIANT_DELIMITER = "$"; - const RULE_LF_VARIANT_MARKER = "$lf$"; - const RULE_NOLF_VARIANT_MARKER = "$nolf$"; - })(ATNSimulator = exports.ATNSimulator || (exports.ATNSimulator = {})); - exports.ATNSimulator = ATNSimulator; -}); - -// node_modules/antlr4ts/ConsoleErrorListener.js -var require_ConsoleErrorListener = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ConsoleErrorListener = void 0; - var ConsoleErrorListener = class { - syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e) { - console.error(`line ${line}:${charPositionInLine} ${msg}`); - } - }; - exports.ConsoleErrorListener = ConsoleErrorListener; - ConsoleErrorListener.INSTANCE = new ConsoleErrorListener(); -}); - -// node_modules/antlr4ts/ProxyErrorListener.js -var require_ProxyErrorListener = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ProxyErrorListener = void 0; - var Decorators_1 = require_Decorators(); - var ProxyErrorListener = class { - constructor(delegates) { - this.delegates = delegates; - if (!delegates) { - throw new Error("Invalid delegates"); - } - } - getDelegates() { - return this.delegates; - } - syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e) { - this.delegates.forEach((listener) => { - if (listener.syntaxError) { - listener.syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e); - } - }); - } - }; - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull), - __param(4, Decorators_1.NotNull) - ], ProxyErrorListener.prototype, "syntaxError", null); - exports.ProxyErrorListener = ProxyErrorListener; -}); - -// node_modules/antlr4ts/Recognizer.js -var require_Recognizer = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Recognizer = void 0; - var ConsoleErrorListener_1 = require_ConsoleErrorListener(); - var ProxyErrorListener_1 = require_ProxyErrorListener(); - var Decorators_1 = require_Decorators(); - var Token_1 = require_Token(); - var Utils3 = require_Utils(); - var Recognizer = class { - constructor() { - this._listeners = [ConsoleErrorListener_1.ConsoleErrorListener.INSTANCE]; - this._stateNumber = -1; - } - getTokenTypeMap() { - let vocabulary = this.vocabulary; - let result = Recognizer.tokenTypeMapCache.get(vocabulary); - if (result == null) { - let intermediateResult = new Map(); - for (let i = 0; i <= this.atn.maxTokenType; i++) { - let literalName = vocabulary.getLiteralName(i); - if (literalName != null) { - intermediateResult.set(literalName, i); - } - let symbolicName = vocabulary.getSymbolicName(i); - if (symbolicName != null) { - intermediateResult.set(symbolicName, i); - } - } - intermediateResult.set("EOF", Token_1.Token.EOF); - result = intermediateResult; - Recognizer.tokenTypeMapCache.set(vocabulary, result); - } - return result; - } - getRuleIndexMap() { - let ruleNames = this.ruleNames; - if (ruleNames == null) { - throw new Error("The current recognizer does not provide a list of rule names."); - } - let result = Recognizer.ruleIndexMapCache.get(ruleNames); - if (result == null) { - result = Utils3.toMap(ruleNames); - Recognizer.ruleIndexMapCache.set(ruleNames, result); - } - return result; - } - getTokenType(tokenName) { - let ttype = this.getTokenTypeMap().get(tokenName); - if (ttype != null) { - return ttype; - } - return Token_1.Token.INVALID_TYPE; - } - get serializedATN() { - throw new Error("there is no serialized ATN"); - } - get atn() { - return this._interp.atn; - } - get interpreter() { - return this._interp; - } - set interpreter(interpreter) { - this._interp = interpreter; - } - get parseInfo() { - return Promise.resolve(void 0); - } - getErrorHeader(e) { - let token = e.getOffendingToken(); - if (!token) { - return ""; - } - let line = token.line; - let charPositionInLine = token.charPositionInLine; - return "line " + line + ":" + charPositionInLine; - } - addErrorListener(listener) { - if (!listener) { - throw new TypeError("listener must not be null"); - } - this._listeners.push(listener); - } - removeErrorListener(listener) { - let position = this._listeners.indexOf(listener); - if (position !== -1) { - this._listeners.splice(position, 1); - } - } - removeErrorListeners() { - this._listeners.length = 0; - } - getErrorListeners() { - return this._listeners.slice(0); - } - getErrorListenerDispatch() { - return new ProxyErrorListener_1.ProxyErrorListener(this.getErrorListeners()); - } - sempred(_localctx, ruleIndex, actionIndex) { - return true; - } - precpred(localctx, precedence) { - return true; - } - action(_localctx, ruleIndex, actionIndex) { - } - get state() { - return this._stateNumber; - } - set state(atnState) { - this._stateNumber = atnState; - } - }; - Recognizer.EOF = -1; - Recognizer.tokenTypeMapCache = new WeakMap(); - Recognizer.ruleIndexMapCache = new WeakMap(); - __decorate([ - Decorators_1.SuppressWarnings("serial"), - Decorators_1.NotNull - ], Recognizer.prototype, "_listeners", void 0); - __decorate([ - Decorators_1.NotNull - ], Recognizer.prototype, "getTokenTypeMap", null); - __decorate([ - Decorators_1.NotNull - ], Recognizer.prototype, "getRuleIndexMap", null); - __decorate([ - Decorators_1.NotNull - ], Recognizer.prototype, "serializedATN", null); - __decorate([ - Decorators_1.NotNull - ], Recognizer.prototype, "atn", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], Recognizer.prototype, "interpreter", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], Recognizer.prototype, "getErrorHeader", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], Recognizer.prototype, "addErrorListener", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], Recognizer.prototype, "removeErrorListener", null); - __decorate([ - Decorators_1.NotNull - ], Recognizer.prototype, "getErrorListeners", null); - exports.Recognizer = Recognizer; -}); - -// node_modules/antlr4ts/VocabularyImpl.js -var require_VocabularyImpl = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.VocabularyImpl = void 0; - var Decorators_1 = require_Decorators(); - var Token_1 = require_Token(); - var VocabularyImpl3 = class { - constructor(literalNames, symbolicNames, displayNames) { - this.literalNames = literalNames; - this.symbolicNames = symbolicNames; - this.displayNames = displayNames; - this._maxTokenType = Math.max(this.displayNames.length, Math.max(this.literalNames.length, this.symbolicNames.length)) - 1; - } - get maxTokenType() { - return this._maxTokenType; - } - getLiteralName(tokenType) { - if (tokenType >= 0 && tokenType < this.literalNames.length) { - return this.literalNames[tokenType]; - } - return void 0; - } - getSymbolicName(tokenType) { - if (tokenType >= 0 && tokenType < this.symbolicNames.length) { - return this.symbolicNames[tokenType]; - } - if (tokenType === Token_1.Token.EOF) { - return "EOF"; - } - return void 0; - } - getDisplayName(tokenType) { - if (tokenType >= 0 && tokenType < this.displayNames.length) { - let displayName = this.displayNames[tokenType]; - if (displayName) { - return displayName; - } - } - let literalName = this.getLiteralName(tokenType); - if (literalName) { - return literalName; - } - let symbolicName = this.getSymbolicName(tokenType); - if (symbolicName) { - return symbolicName; - } - return String(tokenType); - } - }; - VocabularyImpl3.EMPTY_VOCABULARY = new VocabularyImpl3([], [], []); - __decorate([ - Decorators_1.NotNull - ], VocabularyImpl3.prototype, "literalNames", void 0); - __decorate([ - Decorators_1.NotNull - ], VocabularyImpl3.prototype, "symbolicNames", void 0); - __decorate([ - Decorators_1.NotNull - ], VocabularyImpl3.prototype, "displayNames", void 0); - __decorate([ - Decorators_1.Override - ], VocabularyImpl3.prototype, "maxTokenType", null); - __decorate([ - Decorators_1.Override - ], VocabularyImpl3.prototype, "getLiteralName", null); - __decorate([ - Decorators_1.Override - ], VocabularyImpl3.prototype, "getSymbolicName", null); - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull - ], VocabularyImpl3.prototype, "getDisplayName", null); - __decorate([ - Decorators_1.NotNull - ], VocabularyImpl3, "EMPTY_VOCABULARY", void 0); - exports.VocabularyImpl = VocabularyImpl3; -}); - -// node_modules/antlr4ts/dfa/DFASerializer.js -var require_DFASerializer = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.DFASerializer = void 0; - var ATNSimulator_1 = require_ATNSimulator(); - var Decorators_1 = require_Decorators(); - var PredictionContext_1 = require_PredictionContext(); - var Recognizer_1 = require_Recognizer(); - var VocabularyImpl_1 = require_VocabularyImpl(); - var DFASerializer = class { - constructor(dfa, vocabulary, ruleNames, atn) { - if (vocabulary instanceof Recognizer_1.Recognizer) { - ruleNames = vocabulary.ruleNames; - atn = vocabulary.atn; - vocabulary = vocabulary.vocabulary; - } else if (!vocabulary) { - vocabulary = VocabularyImpl_1.VocabularyImpl.EMPTY_VOCABULARY; - } - this.dfa = dfa; - this.vocabulary = vocabulary; - this.ruleNames = ruleNames; - this.atn = atn; - } - toString() { - if (!this.dfa.s0) { - return ""; - } - let buf = ""; - if (this.dfa.states) { - let states = new Array(...this.dfa.states.toArray()); - states.sort((o1, o2) => o1.stateNumber - o2.stateNumber); - for (let s of states) { - let edges = s.getEdgeMap(); - let edgeKeys = [...edges.keys()].sort((a, b) => a - b); - let contextEdges = s.getContextEdgeMap(); - let contextEdgeKeys = [...contextEdges.keys()].sort((a, b) => a - b); - for (let entry of edgeKeys) { - let value = edges.get(entry); - if ((value == null || value === ATNSimulator_1.ATNSimulator.ERROR) && !s.isContextSymbol(entry)) { - continue; - } - let contextSymbol = false; - buf += this.getStateString(s) + "-" + this.getEdgeLabel(entry) + "->"; - if (s.isContextSymbol(entry)) { - buf += "!"; - contextSymbol = true; - } - let t = value; - if (t && t.stateNumber !== ATNSimulator_1.ATNSimulator.ERROR.stateNumber) { - buf += this.getStateString(t) + "\n"; - } else if (contextSymbol) { - buf += "ctx\n"; - } - } - if (s.isContextSensitive) { - for (let entry of contextEdgeKeys) { - buf += this.getStateString(s) + "-" + this.getContextLabel(entry) + "->" + this.getStateString(contextEdges.get(entry)) + "\n"; - } - } - } - } - let output = buf; - if (output.length === 0) { - return ""; - } - return output; - } - getContextLabel(i) { - if (i === PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY) { - return "ctx:EMPTY_FULL"; - } else if (i === PredictionContext_1.PredictionContext.EMPTY_LOCAL_STATE_KEY) { - return "ctx:EMPTY_LOCAL"; - } - if (this.atn && i > 0 && i <= this.atn.states.length) { - let state = this.atn.states[i]; - let ruleIndex = state.ruleIndex; - if (this.ruleNames && ruleIndex >= 0 && ruleIndex < this.ruleNames.length) { - return "ctx:" + String(i) + "(" + this.ruleNames[ruleIndex] + ")"; - } - } - return "ctx:" + String(i); - } - getEdgeLabel(i) { - return this.vocabulary.getDisplayName(i); - } - getStateString(s) { - if (s === ATNSimulator_1.ATNSimulator.ERROR) { - return "ERROR"; - } - let n = s.stateNumber; - let stateStr = "s" + n; - if (s.isAcceptState) { - if (s.predicates) { - stateStr = ":s" + n + "=>" + s.predicates; - } else { - stateStr = ":s" + n + "=>" + s.prediction; - } - } - if (s.isContextSensitive) { - stateStr += "*"; - for (let config of s.configs) { - if (config.reachesIntoOuterContext) { - stateStr += "*"; - break; - } - } - } - return stateStr; - } - }; - __decorate([ - Decorators_1.NotNull - ], DFASerializer.prototype, "dfa", void 0); - __decorate([ - Decorators_1.NotNull - ], DFASerializer.prototype, "vocabulary", void 0); - __decorate([ - Decorators_1.Override - ], DFASerializer.prototype, "toString", null); - exports.DFASerializer = DFASerializer; -}); - -// node_modules/antlr4ts/dfa/LexerDFASerializer.js -var require_LexerDFASerializer = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerDFASerializer = void 0; - var DFASerializer_1 = require_DFASerializer(); - var Decorators_1 = require_Decorators(); - var VocabularyImpl_1 = require_VocabularyImpl(); - var LexerDFASerializer = class LexerDFASerializer extends DFASerializer_1.DFASerializer { - constructor(dfa) { - super(dfa, VocabularyImpl_1.VocabularyImpl.EMPTY_VOCABULARY); - } - getEdgeLabel(i) { - return "'" + String.fromCodePoint(i) + "'"; - } - }; - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull - ], LexerDFASerializer.prototype, "getEdgeLabel", null); - LexerDFASerializer = __decorate([ - __param(0, Decorators_1.NotNull) - ], LexerDFASerializer); - exports.LexerDFASerializer = LexerDFASerializer; -}); - -// node_modules/antlr4ts/atn/StarLoopEntryState.js -var require_StarLoopEntryState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.StarLoopEntryState = void 0; - var ATNStateType_1 = require_ATNStateType(); - var BitSet_1 = require_BitSet(); - var DecisionState_1 = require_DecisionState(); - var Decorators_1 = require_Decorators(); - var StarLoopEntryState = class extends DecisionState_1.DecisionState { - constructor() { - super(...arguments); - this.precedenceRuleDecision = false; - this.precedenceLoopbackStates = new BitSet_1.BitSet(); - } - get stateType() { - return ATNStateType_1.ATNStateType.STAR_LOOP_ENTRY; - } - }; - __decorate([ - Decorators_1.Override - ], StarLoopEntryState.prototype, "stateType", null); - exports.StarLoopEntryState = StarLoopEntryState; -}); - -// node_modules/antlr4ts/dfa/DFA.js -var require_DFA = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.DFA = void 0; - var Array2DHashSet_1 = require_Array2DHashSet(); - var ATNConfigSet_1 = require_ATNConfigSet(); - var DFASerializer_1 = require_DFASerializer(); - var DFAState_1 = require_DFAState(); - var LexerDFASerializer_1 = require_LexerDFASerializer(); - var Decorators_1 = require_Decorators(); - var ObjectEqualityComparator_1 = require_ObjectEqualityComparator(); - var StarLoopEntryState_1 = require_StarLoopEntryState(); - var VocabularyImpl_1 = require_VocabularyImpl(); - var DFA = class DFA { - constructor(atnStartState, decision = 0) { - this.states = new Array2DHashSet_1.Array2DHashSet(ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE); - this.nextStateNumber = 0; - if (!atnStartState.atn) { - throw new Error("The ATNState must be associated with an ATN"); - } - this.atnStartState = atnStartState; - this.atn = atnStartState.atn; - this.decision = decision; - let isPrecedenceDfa = false; - if (atnStartState instanceof StarLoopEntryState_1.StarLoopEntryState) { - if (atnStartState.precedenceRuleDecision) { - isPrecedenceDfa = true; - this.s0 = new DFAState_1.DFAState(new ATNConfigSet_1.ATNConfigSet()); - this.s0full = new DFAState_1.DFAState(new ATNConfigSet_1.ATNConfigSet()); - } - } - this.precedenceDfa = isPrecedenceDfa; - } - get isPrecedenceDfa() { - return this.precedenceDfa; - } - getPrecedenceStartState(precedence, fullContext) { - if (!this.isPrecedenceDfa) { - throw new Error("Only precedence DFAs may contain a precedence start state."); - } - if (fullContext) { - return this.s0full.getTarget(precedence); - } else { - return this.s0.getTarget(precedence); - } - } - setPrecedenceStartState(precedence, fullContext, startState) { - if (!this.isPrecedenceDfa) { - throw new Error("Only precedence DFAs may contain a precedence start state."); - } - if (precedence < 0) { - return; - } - if (fullContext) { - this.s0full.setTarget(precedence, startState); - } else { - this.s0.setTarget(precedence, startState); - } - } - get isEmpty() { - if (this.isPrecedenceDfa) { - return this.s0.getEdgeMap().size === 0 && this.s0full.getEdgeMap().size === 0; - } - return this.s0 == null && this.s0full == null; - } - get isContextSensitive() { - if (this.isPrecedenceDfa) { - return this.s0full.getEdgeMap().size > 0; - } - return this.s0full != null; - } - addState(state) { - state.stateNumber = this.nextStateNumber++; - return this.states.getOrAdd(state); - } - toString(vocabulary, ruleNames) { - if (!vocabulary) { - vocabulary = VocabularyImpl_1.VocabularyImpl.EMPTY_VOCABULARY; - } - if (!this.s0) { - return ""; - } - let serializer; - if (ruleNames) { - serializer = new DFASerializer_1.DFASerializer(this, vocabulary, ruleNames, this.atnStartState.atn); - } else { - serializer = new DFASerializer_1.DFASerializer(this, vocabulary); - } - return serializer.toString(); - } - toLexerString() { - if (!this.s0) { - return ""; - } - let serializer = new LexerDFASerializer_1.LexerDFASerializer(this); - return serializer.toString(); - } - }; - __decorate([ - Decorators_1.NotNull - ], DFA.prototype, "states", void 0); - __decorate([ - Decorators_1.NotNull - ], DFA.prototype, "atnStartState", void 0); - __decorate([ - Decorators_1.NotNull - ], DFA.prototype, "atn", void 0); - DFA = __decorate([ - __param(0, Decorators_1.NotNull) - ], DFA); - exports.DFA = DFA; -}); - -// node_modules/antlr4ts/atn/BasicState.js -var require_BasicState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.BasicState = void 0; - var ATNState_1 = require_ATNState(); - var ATNStateType_1 = require_ATNStateType(); - var Decorators_1 = require_Decorators(); - var BasicState = class extends ATNState_1.ATNState { - get stateType() { - return ATNStateType_1.ATNStateType.BASIC; - } - }; - __decorate([ - Decorators_1.Override - ], BasicState.prototype, "stateType", null); - exports.BasicState = BasicState; -}); - -// node_modules/antlr4ts/atn/InvalidState.js -var require_InvalidState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.InvalidState = void 0; - var ATNStateType_1 = require_ATNStateType(); - var BasicState_1 = require_BasicState(); - var Decorators_1 = require_Decorators(); - var InvalidState = class extends BasicState_1.BasicState { - get stateType() { - return ATNStateType_1.ATNStateType.INVALID_TYPE; - } - }; - __decorate([ - Decorators_1.Override - ], InvalidState.prototype, "stateType", null); - exports.InvalidState = InvalidState; -}); - -// node_modules/antlr4ts/atn/SetTransition.js -var require_SetTransition = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.SetTransition = void 0; - var IntervalSet_1 = require_IntervalSet(); - var Decorators_1 = require_Decorators(); - var Token_1 = require_Token(); - var Transition_1 = require_Transition(); - var SetTransition = class SetTransition extends Transition_1.Transition { - constructor(target, set) { - super(target); - if (set == null) { - set = IntervalSet_1.IntervalSet.of(Token_1.Token.INVALID_TYPE); - } - this.set = set; - } - get serializationType() { - return 7; - } - get label() { - return this.set; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return this.set.contains(symbol); - } - toString() { - return this.set.toString(); - } - }; - __decorate([ - Decorators_1.NotNull - ], SetTransition.prototype, "set", void 0); - __decorate([ - Decorators_1.Override - ], SetTransition.prototype, "serializationType", null); - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull - ], SetTransition.prototype, "label", null); - __decorate([ - Decorators_1.Override - ], SetTransition.prototype, "matches", null); - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull - ], SetTransition.prototype, "toString", null); - SetTransition = __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.Nullable) - ], SetTransition); - exports.SetTransition = SetTransition; -}); - -// node_modules/antlr4ts/atn/NotSetTransition.js -var require_NotSetTransition = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.NotSetTransition = void 0; - var Decorators_1 = require_Decorators(); - var SetTransition_1 = require_SetTransition(); - var NotSetTransition = class NotSetTransition extends SetTransition_1.SetTransition { - constructor(target, set) { - super(target, set); - } - get serializationType() { - return 8; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return symbol >= minVocabSymbol && symbol <= maxVocabSymbol && !super.matches(symbol, minVocabSymbol, maxVocabSymbol); - } - toString() { - return "~" + super.toString(); - } - }; - __decorate([ - Decorators_1.Override - ], NotSetTransition.prototype, "serializationType", null); - __decorate([ - Decorators_1.Override - ], NotSetTransition.prototype, "matches", null); - __decorate([ - Decorators_1.Override - ], NotSetTransition.prototype, "toString", null); - NotSetTransition = __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.Nullable) - ], NotSetTransition); - exports.NotSetTransition = NotSetTransition; -}); - -// node_modules/antlr4ts/atn/RuleStopState.js -var require_RuleStopState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RuleStopState = void 0; - var ATNState_1 = require_ATNState(); - var ATNStateType_1 = require_ATNStateType(); - var Decorators_1 = require_Decorators(); - var RuleStopState = class extends ATNState_1.ATNState { - get nonStopStateNumber() { - return -1; - } - get stateType() { - return ATNStateType_1.ATNStateType.RULE_STOP; - } - }; - __decorate([ - Decorators_1.Override - ], RuleStopState.prototype, "nonStopStateNumber", null); - __decorate([ - Decorators_1.Override - ], RuleStopState.prototype, "stateType", null); - exports.RuleStopState = RuleStopState; -}); - -// node_modules/antlr4ts/atn/RuleTransition.js -var require_RuleTransition = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RuleTransition = void 0; - var Decorators_1 = require_Decorators(); - var Transition_1 = require_Transition(); - var RuleTransition = class RuleTransition extends Transition_1.Transition { - constructor(ruleStart, ruleIndex, precedence, followState) { - super(ruleStart); - this.tailCall = false; - this.optimizedTailCall = false; - this.ruleIndex = ruleIndex; - this.precedence = precedence; - this.followState = followState; - } - get serializationType() { - return 3; - } - get isEpsilon() { - return true; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return false; - } - }; - __decorate([ - Decorators_1.NotNull - ], RuleTransition.prototype, "followState", void 0); - __decorate([ - Decorators_1.Override - ], RuleTransition.prototype, "serializationType", null); - __decorate([ - Decorators_1.Override - ], RuleTransition.prototype, "isEpsilon", null); - __decorate([ - Decorators_1.Override - ], RuleTransition.prototype, "matches", null); - RuleTransition = __decorate([ - __param(0, Decorators_1.NotNull), - __param(3, Decorators_1.NotNull) - ], RuleTransition); - exports.RuleTransition = RuleTransition; -}); - -// node_modules/antlr4ts/atn/WildcardTransition.js -var require_WildcardTransition = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.WildcardTransition = void 0; - var Decorators_1 = require_Decorators(); - var Transition_1 = require_Transition(); - var WildcardTransition = class WildcardTransition extends Transition_1.Transition { - constructor(target) { - super(target); - } - get serializationType() { - return 9; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return symbol >= minVocabSymbol && symbol <= maxVocabSymbol; - } - toString() { - return "."; - } - }; - __decorate([ - Decorators_1.Override - ], WildcardTransition.prototype, "serializationType", null); - __decorate([ - Decorators_1.Override - ], WildcardTransition.prototype, "matches", null); - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull - ], WildcardTransition.prototype, "toString", null); - WildcardTransition = __decorate([ - __param(0, Decorators_1.NotNull) - ], WildcardTransition); - exports.WildcardTransition = WildcardTransition; -}); - -// node_modules/antlr4ts/atn/LL1Analyzer.js -var require_LL1Analyzer = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LL1Analyzer = void 0; - var AbstractPredicateTransition_1 = require_AbstractPredicateTransition(); - var Array2DHashSet_1 = require_Array2DHashSet(); - var ATNConfig_1 = require_ATNConfig(); - var BitSet_1 = require_BitSet(); - var IntervalSet_1 = require_IntervalSet(); - var Decorators_1 = require_Decorators(); - var NotSetTransition_1 = require_NotSetTransition(); - var ObjectEqualityComparator_1 = require_ObjectEqualityComparator(); - var PredictionContext_1 = require_PredictionContext(); - var RuleStopState_1 = require_RuleStopState(); - var RuleTransition_1 = require_RuleTransition(); - var Token_1 = require_Token(); - var WildcardTransition_1 = require_WildcardTransition(); - var LL1Analyzer = class LL1Analyzer2 { - constructor(atn) { - this.atn = atn; - } - getDecisionLookahead(s) { - if (s == null) { - return void 0; - } - let look = new Array(s.numberOfTransitions); - for (let alt = 0; alt < s.numberOfTransitions; alt++) { - let current = new IntervalSet_1.IntervalSet(); - look[alt] = current; - let lookBusy = new Array2DHashSet_1.Array2DHashSet(ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE); - let seeThruPreds = false; - this._LOOK(s.transition(alt).target, void 0, PredictionContext_1.PredictionContext.EMPTY_LOCAL, current, lookBusy, new BitSet_1.BitSet(), seeThruPreds, false); - if (current.size === 0 || current.contains(LL1Analyzer2.HIT_PRED)) { - current = void 0; - look[alt] = current; - } - } - return look; - } - LOOK(s, ctx, stopState) { - if (stopState === void 0) { - if (s.atn == null) { - throw new Error("Illegal state"); - } - stopState = s.atn.ruleToStopState[s.ruleIndex]; - } else if (stopState === null) { - stopState = void 0; - } - let r = new IntervalSet_1.IntervalSet(); - let seeThruPreds = true; - let addEOF = true; - this._LOOK(s, stopState, ctx, r, new Array2DHashSet_1.Array2DHashSet(), new BitSet_1.BitSet(), seeThruPreds, addEOF); - return r; - } - _LOOK(s, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF) { - let c = ATNConfig_1.ATNConfig.create(s, 0, ctx); - if (!lookBusy.add(c)) { - return; - } - if (s === stopState) { - if (PredictionContext_1.PredictionContext.isEmptyLocal(ctx)) { - look.add(Token_1.Token.EPSILON); - return; - } else if (ctx.isEmpty) { - if (addEOF) { - look.add(Token_1.Token.EOF); - } - return; - } - } - if (s instanceof RuleStopState_1.RuleStopState) { - if (ctx.isEmpty && !PredictionContext_1.PredictionContext.isEmptyLocal(ctx)) { - if (addEOF) { - look.add(Token_1.Token.EOF); - } - return; - } - let removed = calledRuleStack.get(s.ruleIndex); - try { - calledRuleStack.clear(s.ruleIndex); - for (let i = 0; i < ctx.size; i++) { - if (ctx.getReturnState(i) === PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY) { - continue; - } - let returnState = this.atn.states[ctx.getReturnState(i)]; - this._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF); - } - } finally { - if (removed) { - calledRuleStack.set(s.ruleIndex); - } - } - } - let n = s.numberOfTransitions; - for (let i = 0; i < n; i++) { - let t = s.transition(i); - if (t instanceof RuleTransition_1.RuleTransition) { - if (calledRuleStack.get(t.ruleIndex)) { - continue; - } - let newContext = ctx.getChild(t.followState.stateNumber); - try { - calledRuleStack.set(t.ruleIndex); - this._LOOK(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF); - } finally { - calledRuleStack.clear(t.ruleIndex); - } - } else if (t instanceof AbstractPredicateTransition_1.AbstractPredicateTransition) { - if (seeThruPreds) { - this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF); - } else { - look.add(LL1Analyzer2.HIT_PRED); - } - } else if (t.isEpsilon) { - this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF); - } else if (t instanceof WildcardTransition_1.WildcardTransition) { - look.addAll(IntervalSet_1.IntervalSet.of(Token_1.Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType)); - } else { - let set = t.label; - if (set != null) { - if (t instanceof NotSetTransition_1.NotSetTransition) { - set = set.complement(IntervalSet_1.IntervalSet.of(Token_1.Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType)); - } - look.addAll(set); - } - } - } - } - }; - LL1Analyzer.HIT_PRED = Token_1.Token.INVALID_TYPE; - __decorate([ - Decorators_1.NotNull - ], LL1Analyzer.prototype, "atn", void 0); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], LL1Analyzer.prototype, "LOOK", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull), - __param(3, Decorators_1.NotNull), - __param(4, Decorators_1.NotNull), - __param(5, Decorators_1.NotNull) - ], LL1Analyzer.prototype, "_LOOK", null); - LL1Analyzer = __decorate([ - __param(0, Decorators_1.NotNull) - ], LL1Analyzer); - exports.LL1Analyzer = LL1Analyzer; -}); - -// node_modules/antlr4ts/atn/ATN.js -var require_ATN = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ATN = void 0; - var Array2DHashMap_1 = require_Array2DHashMap(); - var DFA_1 = require_DFA(); - var IntervalSet_1 = require_IntervalSet(); - var InvalidState_1 = require_InvalidState(); - var LL1Analyzer_1 = require_LL1Analyzer(); - var Decorators_1 = require_Decorators(); - var ObjectEqualityComparator_1 = require_ObjectEqualityComparator(); - var PredictionContext_1 = require_PredictionContext(); - var Token_1 = require_Token(); - var assert = require("assert"); - var ATN2 = class ATN { - constructor(grammarType, maxTokenType) { - this.states = []; - this.decisionToState = []; - this.modeNameToStartState = new Map(); - this.modeToStartState = []; - this.contextCache = new Array2DHashMap_1.Array2DHashMap(ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE); - this.decisionToDFA = []; - this.modeToDFA = []; - this.LL1Table = new Map(); - this.grammarType = grammarType; - this.maxTokenType = maxTokenType; - } - clearDFA() { - this.decisionToDFA = new Array(this.decisionToState.length); - for (let i = 0; i < this.decisionToDFA.length; i++) { - this.decisionToDFA[i] = new DFA_1.DFA(this.decisionToState[i], i); - } - this.modeToDFA = new Array(this.modeToStartState.length); - for (let i = 0; i < this.modeToDFA.length; i++) { - this.modeToDFA[i] = new DFA_1.DFA(this.modeToStartState[i]); - } - this.contextCache.clear(); - this.LL1Table.clear(); - } - get contextCacheSize() { - return this.contextCache.size; - } - getCachedContext(context) { - return PredictionContext_1.PredictionContext.getCachedContext(context, this.contextCache, new PredictionContext_1.PredictionContext.IdentityHashMap()); - } - getDecisionToDFA() { - assert(this.decisionToDFA != null && this.decisionToDFA.length === this.decisionToState.length); - return this.decisionToDFA; - } - nextTokens(s, ctx) { - if (ctx) { - let anal = new LL1Analyzer_1.LL1Analyzer(this); - let next = anal.LOOK(s, ctx); - return next; - } else { - if (s.nextTokenWithinRule) { - return s.nextTokenWithinRule; - } - s.nextTokenWithinRule = this.nextTokens(s, PredictionContext_1.PredictionContext.EMPTY_LOCAL); - s.nextTokenWithinRule.setReadonly(true); - return s.nextTokenWithinRule; - } - } - addState(state) { - state.atn = this; - state.stateNumber = this.states.length; - this.states.push(state); - } - removeState(state) { - let invalidState = new InvalidState_1.InvalidState(); - invalidState.atn = this; - invalidState.stateNumber = state.stateNumber; - this.states[state.stateNumber] = invalidState; - } - defineMode(name, s) { - this.modeNameToStartState.set(name, s); - this.modeToStartState.push(s); - this.modeToDFA.push(new DFA_1.DFA(s)); - this.defineDecisionState(s); - } - defineDecisionState(s) { - this.decisionToState.push(s); - s.decision = this.decisionToState.length - 1; - this.decisionToDFA.push(new DFA_1.DFA(s, s.decision)); - return s.decision; - } - getDecisionState(decision) { - if (this.decisionToState.length > 0) { - return this.decisionToState[decision]; - } - return void 0; - } - get numberOfDecisions() { - return this.decisionToState.length; - } - getExpectedTokens(stateNumber, context) { - if (stateNumber < 0 || stateNumber >= this.states.length) { - throw new RangeError("Invalid state number."); - } - let ctx = context; - let s = this.states[stateNumber]; - let following = this.nextTokens(s); - if (!following.contains(Token_1.Token.EPSILON)) { - return following; - } - let expected = new IntervalSet_1.IntervalSet(); - expected.addAll(following); - expected.remove(Token_1.Token.EPSILON); - while (ctx != null && ctx.invokingState >= 0 && following.contains(Token_1.Token.EPSILON)) { - let invokingState = this.states[ctx.invokingState]; - let rt = invokingState.transition(0); - following = this.nextTokens(rt.followState); - expected.addAll(following); - expected.remove(Token_1.Token.EPSILON); - ctx = ctx._parent; - } - if (following.contains(Token_1.Token.EPSILON)) { - expected.add(Token_1.Token.EOF); - } - return expected; - } - }; - __decorate([ - Decorators_1.NotNull - ], ATN2.prototype, "states", void 0); - __decorate([ - Decorators_1.NotNull - ], ATN2.prototype, "decisionToState", void 0); - __decorate([ - Decorators_1.NotNull - ], ATN2.prototype, "modeNameToStartState", void 0); - __decorate([ - Decorators_1.NotNull - ], ATN2.prototype, "modeToStartState", void 0); - __decorate([ - Decorators_1.NotNull - ], ATN2.prototype, "decisionToDFA", void 0); - __decorate([ - Decorators_1.NotNull - ], ATN2.prototype, "modeToDFA", void 0); - __decorate([ - Decorators_1.NotNull - ], ATN2.prototype, "nextTokens", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ATN2.prototype, "removeState", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ATN2.prototype, "defineMode", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ATN2.prototype, "defineDecisionState", null); - __decorate([ - Decorators_1.NotNull - ], ATN2.prototype, "getExpectedTokens", null); - ATN2 = __decorate([ - __param(0, Decorators_1.NotNull) - ], ATN2); - exports.ATN = ATN2; - (function(ATN3) { - ATN3.INVALID_ALT_NUMBER = 0; - })(ATN2 = exports.ATN || (exports.ATN = {})); - exports.ATN = ATN2; -}); - -// node_modules/antlr4ts/atn/LexerIndexedCustomAction.js -var require_LexerIndexedCustomAction = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerIndexedCustomAction = void 0; - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var LexerIndexedCustomAction = class LexerIndexedCustomAction2 { - constructor(offset, action) { - this._offset = offset; - this._action = action; - } - get offset() { - return this._offset; - } - get action() { - return this._action; - } - get actionType() { - return this._action.actionType; - } - get isPositionDependent() { - return true; - } - execute(lexer) { - this._action.execute(lexer); - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(); - hash = MurmurHash_1.MurmurHash.update(hash, this._offset); - hash = MurmurHash_1.MurmurHash.update(hash, this._action); - return MurmurHash_1.MurmurHash.finish(hash, 2); - } - equals(obj) { - if (obj === this) { - return true; - } else if (!(obj instanceof LexerIndexedCustomAction2)) { - return false; - } - return this._offset === obj._offset && this._action.equals(obj._action); - } - }; - __decorate([ - Decorators_1.NotNull - ], LexerIndexedCustomAction.prototype, "action", null); - __decorate([ - Decorators_1.Override - ], LexerIndexedCustomAction.prototype, "actionType", null); - __decorate([ - Decorators_1.Override - ], LexerIndexedCustomAction.prototype, "isPositionDependent", null); - __decorate([ - Decorators_1.Override - ], LexerIndexedCustomAction.prototype, "execute", null); - __decorate([ - Decorators_1.Override - ], LexerIndexedCustomAction.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], LexerIndexedCustomAction.prototype, "equals", null); - LexerIndexedCustomAction = __decorate([ - __param(1, Decorators_1.NotNull) - ], LexerIndexedCustomAction); - exports.LexerIndexedCustomAction = LexerIndexedCustomAction; -}); - -// node_modules/antlr4ts/atn/LexerActionExecutor.js -var require_LexerActionExecutor = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerActionExecutor = void 0; - var ArrayEqualityComparator_1 = require_ArrayEqualityComparator(); - var LexerIndexedCustomAction_1 = require_LexerIndexedCustomAction(); - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var LexerActionExecutor = class LexerActionExecutor2 { - constructor(lexerActions) { - this._lexerActions = lexerActions; - let hash = MurmurHash_1.MurmurHash.initialize(); - for (let lexerAction of lexerActions) { - hash = MurmurHash_1.MurmurHash.update(hash, lexerAction); - } - this.cachedHashCode = MurmurHash_1.MurmurHash.finish(hash, lexerActions.length); - } - static append(lexerActionExecutor, lexerAction) { - if (!lexerActionExecutor) { - return new LexerActionExecutor2([lexerAction]); - } - let lexerActions = lexerActionExecutor._lexerActions.slice(0); - lexerActions.push(lexerAction); - return new LexerActionExecutor2(lexerActions); - } - fixOffsetBeforeMatch(offset) { - let updatedLexerActions; - for (let i = 0; i < this._lexerActions.length; i++) { - if (this._lexerActions[i].isPositionDependent && !(this._lexerActions[i] instanceof LexerIndexedCustomAction_1.LexerIndexedCustomAction)) { - if (!updatedLexerActions) { - updatedLexerActions = this._lexerActions.slice(0); - } - updatedLexerActions[i] = new LexerIndexedCustomAction_1.LexerIndexedCustomAction(offset, this._lexerActions[i]); - } - } - if (!updatedLexerActions) { - return this; - } - return new LexerActionExecutor2(updatedLexerActions); - } - get lexerActions() { - return this._lexerActions; - } - execute(lexer, input, startIndex) { - let requiresSeek = false; - let stopIndex = input.index; - try { - for (let lexerAction of this._lexerActions) { - if (lexerAction instanceof LexerIndexedCustomAction_1.LexerIndexedCustomAction) { - let offset = lexerAction.offset; - input.seek(startIndex + offset); - lexerAction = lexerAction.action; - requiresSeek = startIndex + offset !== stopIndex; - } else if (lexerAction.isPositionDependent) { - input.seek(stopIndex); - requiresSeek = false; - } - lexerAction.execute(lexer); - } - } finally { - if (requiresSeek) { - input.seek(stopIndex); - } - } - } - hashCode() { - return this.cachedHashCode; - } - equals(obj) { - if (obj === this) { - return true; - } else if (!(obj instanceof LexerActionExecutor2)) { - return false; - } - return this.cachedHashCode === obj.cachedHashCode && ArrayEqualityComparator_1.ArrayEqualityComparator.INSTANCE.equals(this._lexerActions, obj._lexerActions); - } - }; - __decorate([ - Decorators_1.NotNull - ], LexerActionExecutor.prototype, "_lexerActions", void 0); - __decorate([ - Decorators_1.NotNull - ], LexerActionExecutor.prototype, "lexerActions", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], LexerActionExecutor.prototype, "execute", null); - __decorate([ - Decorators_1.Override - ], LexerActionExecutor.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], LexerActionExecutor.prototype, "equals", null); - __decorate([ - Decorators_1.NotNull, - __param(1, Decorators_1.NotNull) - ], LexerActionExecutor, "append", null); - LexerActionExecutor = __decorate([ - __param(0, Decorators_1.NotNull) - ], LexerActionExecutor); - exports.LexerActionExecutor = LexerActionExecutor; -}); - -// node_modules/antlr4ts/LexerNoViableAltException.js -var require_LexerNoViableAltException = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerNoViableAltException = void 0; - var RecognitionException_1 = require_RecognitionException(); - var Decorators_1 = require_Decorators(); - var Interval_1 = require_Interval(); - var Utils3 = require_Utils(); - var LexerNoViableAltException = class LexerNoViableAltException extends RecognitionException_1.RecognitionException { - constructor(lexer, input, startIndex, deadEndConfigs) { - super(lexer, input); - this._startIndex = startIndex; - this._deadEndConfigs = deadEndConfigs; - } - get startIndex() { - return this._startIndex; - } - get deadEndConfigs() { - return this._deadEndConfigs; - } - get inputStream() { - return super.inputStream; - } - toString() { - let symbol = ""; - if (this._startIndex >= 0 && this._startIndex < this.inputStream.size) { - symbol = this.inputStream.getText(Interval_1.Interval.of(this._startIndex, this._startIndex)); - symbol = Utils3.escapeWhitespace(symbol, false); - } - return `LexerNoViableAltException('${symbol}')`; - } - }; - __decorate([ - Decorators_1.Override - ], LexerNoViableAltException.prototype, "inputStream", null); - __decorate([ - Decorators_1.Override - ], LexerNoViableAltException.prototype, "toString", null); - LexerNoViableAltException = __decorate([ - __param(1, Decorators_1.NotNull) - ], LexerNoViableAltException); - exports.LexerNoViableAltException = LexerNoViableAltException; -}); - -// node_modules/antlr4ts/atn/OrderedATNConfigSet.js -var require_OrderedATNConfigSet = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.OrderedATNConfigSet = void 0; - var ATNConfigSet_1 = require_ATNConfigSet(); - var Decorators_1 = require_Decorators(); - var OrderedATNConfigSet = class extends ATNConfigSet_1.ATNConfigSet { - constructor(set, readonly) { - if (set != null && readonly != null) { - super(set, readonly); - } else { - super(); - } - } - clone(readonly) { - let copy = new OrderedATNConfigSet(this, readonly); - if (!readonly && this.isReadOnly) { - copy.addAll(this); - } - return copy; - } - getKey(e) { - return {state: 0, alt: e.hashCode()}; - } - canMerge(left, leftKey, right) { - return left.equals(right); - } - }; - __decorate([ - Decorators_1.Override - ], OrderedATNConfigSet.prototype, "clone", null); - __decorate([ - Decorators_1.Override - ], OrderedATNConfigSet.prototype, "getKey", null); - __decorate([ - Decorators_1.Override - ], OrderedATNConfigSet.prototype, "canMerge", null); - exports.OrderedATNConfigSet = OrderedATNConfigSet; -}); - -// node_modules/antlr4ts/atn/LexerATNSimulator.js -var require_LexerATNSimulator = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerATNSimulator = void 0; - var AcceptStateInfo_1 = require_AcceptStateInfo(); - var ATN_1 = require_ATN(); - var ATNConfig_1 = require_ATNConfig(); - var ATNConfigSet_1 = require_ATNConfigSet(); - var ATNSimulator_1 = require_ATNSimulator(); - var DFAState_1 = require_DFAState(); - var Interval_1 = require_Interval(); - var IntStream_1 = require_IntStream(); - var Lexer_1 = require_Lexer(); - var LexerActionExecutor_1 = require_LexerActionExecutor(); - var LexerNoViableAltException_1 = require_LexerNoViableAltException(); - var Decorators_1 = require_Decorators(); - var OrderedATNConfigSet_1 = require_OrderedATNConfigSet(); - var PredictionContext_1 = require_PredictionContext(); - var RuleStopState_1 = require_RuleStopState(); - var Token_1 = require_Token(); - var assert = require("assert"); - var LexerATNSimulator2 = class LexerATNSimulator3 extends ATNSimulator_1.ATNSimulator { - constructor(atn, recog) { - super(atn); - this.optimize_tail_calls = true; - this.startIndex = -1; - this._line = 1; - this._charPositionInLine = 0; - this.mode = Lexer_1.Lexer.DEFAULT_MODE; - this.prevAccept = new LexerATNSimulator3.SimState(); - this.recog = recog; - } - copyState(simulator) { - this._charPositionInLine = simulator.charPositionInLine; - this._line = simulator._line; - this.mode = simulator.mode; - this.startIndex = simulator.startIndex; - } - match(input, mode) { - this.mode = mode; - let mark = input.mark(); - try { - this.startIndex = input.index; - this.prevAccept.reset(); - let s0 = this.atn.modeToDFA[mode].s0; - if (s0 == null) { - return this.matchATN(input); - } else { - return this.execATN(input, s0); - } - } finally { - input.release(mark); - } - } - reset() { - this.prevAccept.reset(); - this.startIndex = -1; - this._line = 1; - this._charPositionInLine = 0; - this.mode = Lexer_1.Lexer.DEFAULT_MODE; - } - matchATN(input) { - let startState = this.atn.modeToStartState[this.mode]; - if (LexerATNSimulator3.debug) { - console.log(`matchATN mode ${this.mode} start: ${startState}`); - } - let old_mode = this.mode; - let s0_closure = this.computeStartState(input, startState); - let suppressEdge = s0_closure.hasSemanticContext; - if (suppressEdge) { - s0_closure.hasSemanticContext = false; - } - let next = this.addDFAState(s0_closure); - if (!suppressEdge) { - let dfa = this.atn.modeToDFA[this.mode]; - if (!dfa.s0) { - dfa.s0 = next; - } else { - next = dfa.s0; - } - } - let predict = this.execATN(input, next); - if (LexerATNSimulator3.debug) { - console.log(`DFA after matchATN: ${this.atn.modeToDFA[old_mode].toLexerString()}`); - } - return predict; - } - execATN(input, ds0) { - if (LexerATNSimulator3.debug) { - console.log(`start state closure=${ds0.configs}`); - } - if (ds0.isAcceptState) { - this.captureSimState(this.prevAccept, input, ds0); - } - let t = input.LA(1); - let s = ds0; - while (true) { - if (LexerATNSimulator3.debug) { - console.log(`execATN loop starting closure: ${s.configs}`); - } - let target = this.getExistingTargetState(s, t); - if (target == null) { - target = this.computeTargetState(input, s, t); - } - if (target === ATNSimulator_1.ATNSimulator.ERROR) { - break; - } - if (t !== IntStream_1.IntStream.EOF) { - this.consume(input); - } - if (target.isAcceptState) { - this.captureSimState(this.prevAccept, input, target); - if (t === IntStream_1.IntStream.EOF) { - break; - } - } - t = input.LA(1); - s = target; - } - return this.failOrAccept(this.prevAccept, input, s.configs, t); - } - getExistingTargetState(s, t) { - let target = s.getTarget(t); - if (LexerATNSimulator3.debug && target != null) { - console.log("reuse state " + s.stateNumber + " edge to " + target.stateNumber); - } - return target; - } - computeTargetState(input, s, t) { - let reach = new OrderedATNConfigSet_1.OrderedATNConfigSet(); - this.getReachableConfigSet(input, s.configs, reach, t); - if (reach.isEmpty) { - if (!reach.hasSemanticContext) { - this.addDFAEdge(s, t, ATNSimulator_1.ATNSimulator.ERROR); - } - return ATNSimulator_1.ATNSimulator.ERROR; - } - return this.addDFAEdge(s, t, reach); - } - failOrAccept(prevAccept, input, reach, t) { - if (prevAccept.dfaState != null) { - let lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor; - this.accept(input, lexerActionExecutor, this.startIndex, prevAccept.index, prevAccept.line, prevAccept.charPos); - return prevAccept.dfaState.prediction; - } else { - if (t === IntStream_1.IntStream.EOF && input.index === this.startIndex) { - return Token_1.Token.EOF; - } - throw new LexerNoViableAltException_1.LexerNoViableAltException(this.recog, input, this.startIndex, reach); - } - } - getReachableConfigSet(input, closure, reach, t) { - let skipAlt = ATN_1.ATN.INVALID_ALT_NUMBER; - for (let c of closure) { - let currentAltReachedAcceptState = c.alt === skipAlt; - if (currentAltReachedAcceptState && c.hasPassedThroughNonGreedyDecision) { - continue; - } - if (LexerATNSimulator3.debug) { - console.log(`testing ${this.getTokenName(t)} at ${c.toString(this.recog, true)}`); - } - let n = c.state.numberOfOptimizedTransitions; - for (let ti = 0; ti < n; ti++) { - let trans = c.state.getOptimizedTransition(ti); - let target = this.getReachableTarget(trans, t); - if (target != null) { - let lexerActionExecutor = c.lexerActionExecutor; - let config; - if (lexerActionExecutor != null) { - lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.index - this.startIndex); - config = c.transform(target, true, lexerActionExecutor); - } else { - assert(c.lexerActionExecutor == null); - config = c.transform(target, true); - } - let treatEofAsEpsilon = t === IntStream_1.IntStream.EOF; - if (this.closure(input, config, reach, currentAltReachedAcceptState, true, treatEofAsEpsilon)) { - skipAlt = c.alt; - break; - } - } - } - } - } - accept(input, lexerActionExecutor, startIndex, index, line, charPos) { - if (LexerATNSimulator3.debug) { - console.log(`ACTION ${lexerActionExecutor}`); - } - input.seek(index); - this._line = line; - this._charPositionInLine = charPos; - if (lexerActionExecutor != null && this.recog != null) { - lexerActionExecutor.execute(this.recog, input, startIndex); - } - } - getReachableTarget(trans, t) { - if (trans.matches(t, Lexer_1.Lexer.MIN_CHAR_VALUE, Lexer_1.Lexer.MAX_CHAR_VALUE)) { - return trans.target; - } - return void 0; - } - computeStartState(input, p) { - let initialContext = PredictionContext_1.PredictionContext.EMPTY_FULL; - let configs = new OrderedATNConfigSet_1.OrderedATNConfigSet(); - for (let i = 0; i < p.numberOfTransitions; i++) { - let target = p.transition(i).target; - let c = ATNConfig_1.ATNConfig.create(target, i + 1, initialContext); - this.closure(input, c, configs, false, false, false); - } - return configs; - } - closure(input, config, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon) { - if (LexerATNSimulator3.debug) { - console.log("closure(" + config.toString(this.recog, true) + ")"); - } - if (config.state instanceof RuleStopState_1.RuleStopState) { - if (LexerATNSimulator3.debug) { - if (this.recog != null) { - console.log(`closure at ${this.recog.ruleNames[config.state.ruleIndex]} rule stop ${config}`); - } else { - console.log(`closure at rule stop ${config}`); - } - } - let context = config.context; - if (context.isEmpty) { - configs.add(config); - return true; - } else if (context.hasEmpty) { - configs.add(config.transform(config.state, true, PredictionContext_1.PredictionContext.EMPTY_FULL)); - currentAltReachedAcceptState = true; - } - for (let i = 0; i < context.size; i++) { - let returnStateNumber = context.getReturnState(i); - if (returnStateNumber === PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY) { - continue; - } - let newContext = context.getParent(i); - let returnState = this.atn.states[returnStateNumber]; - let c = config.transform(returnState, false, newContext); - currentAltReachedAcceptState = this.closure(input, c, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon); - } - return currentAltReachedAcceptState; - } - if (!config.state.onlyHasEpsilonTransitions) { - if (!currentAltReachedAcceptState || !config.hasPassedThroughNonGreedyDecision) { - configs.add(config); - } - } - let p = config.state; - for (let i = 0; i < p.numberOfOptimizedTransitions; i++) { - let t = p.getOptimizedTransition(i); - let c = this.getEpsilonTarget(input, config, t, configs, speculative, treatEofAsEpsilon); - if (c != null) { - currentAltReachedAcceptState = this.closure(input, c, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon); - } - } - return currentAltReachedAcceptState; - } - getEpsilonTarget(input, config, t, configs, speculative, treatEofAsEpsilon) { - let c; - switch (t.serializationType) { - case 3: - let ruleTransition = t; - if (this.optimize_tail_calls && ruleTransition.optimizedTailCall && !config.context.hasEmpty) { - c = config.transform(t.target, true); - } else { - let newContext = config.context.getChild(ruleTransition.followState.stateNumber); - c = config.transform(t.target, true, newContext); - } - break; - case 10: - throw new Error("Precedence predicates are not supported in lexers."); - case 4: - let pt = t; - if (LexerATNSimulator3.debug) { - console.log("EVAL rule " + pt.ruleIndex + ":" + pt.predIndex); - } - configs.hasSemanticContext = true; - if (this.evaluatePredicate(input, pt.ruleIndex, pt.predIndex, speculative)) { - c = config.transform(t.target, true); - } else { - c = void 0; - } - break; - case 6: - if (config.context.hasEmpty) { - let lexerActionExecutor = LexerActionExecutor_1.LexerActionExecutor.append(config.lexerActionExecutor, this.atn.lexerActions[t.actionIndex]); - c = config.transform(t.target, true, lexerActionExecutor); - break; - } else { - c = config.transform(t.target, true); - break; - } - case 1: - c = config.transform(t.target, true); - break; - case 5: - case 2: - case 7: - if (treatEofAsEpsilon) { - if (t.matches(IntStream_1.IntStream.EOF, Lexer_1.Lexer.MIN_CHAR_VALUE, Lexer_1.Lexer.MAX_CHAR_VALUE)) { - c = config.transform(t.target, false); - break; - } - } - c = void 0; - break; - default: - c = void 0; - break; - } - return c; - } - evaluatePredicate(input, ruleIndex, predIndex, speculative) { - if (this.recog == null) { - return true; - } - if (!speculative) { - return this.recog.sempred(void 0, ruleIndex, predIndex); - } - let savedCharPositionInLine = this._charPositionInLine; - let savedLine = this._line; - let index = input.index; - let marker = input.mark(); - try { - this.consume(input); - return this.recog.sempred(void 0, ruleIndex, predIndex); - } finally { - this._charPositionInLine = savedCharPositionInLine; - this._line = savedLine; - input.seek(index); - input.release(marker); - } - } - captureSimState(settings, input, dfaState) { - settings.index = input.index; - settings.line = this._line; - settings.charPos = this._charPositionInLine; - settings.dfaState = dfaState; - } - addDFAEdge(p, t, q) { - if (q instanceof ATNConfigSet_1.ATNConfigSet) { - let suppressEdge = q.hasSemanticContext; - if (suppressEdge) { - q.hasSemanticContext = false; - } - let to = this.addDFAState(q); - if (suppressEdge) { - return to; - } - this.addDFAEdge(p, t, to); - return to; - } else { - if (LexerATNSimulator3.debug) { - console.log("EDGE " + p + " -> " + q + " upon " + String.fromCharCode(t)); - } - if (p != null) { - p.setTarget(t, q); - } - } - } - addDFAState(configs) { - assert(!configs.hasSemanticContext); - let proposed = new DFAState_1.DFAState(configs); - let existing = this.atn.modeToDFA[this.mode].states.get(proposed); - if (existing != null) { - return existing; - } - configs.optimizeConfigs(this); - let newState = new DFAState_1.DFAState(configs.clone(true)); - let firstConfigWithRuleStopState; - for (let c of configs) { - if (c.state instanceof RuleStopState_1.RuleStopState) { - firstConfigWithRuleStopState = c; - break; - } - } - if (firstConfigWithRuleStopState != null) { - let prediction = this.atn.ruleToTokenType[firstConfigWithRuleStopState.state.ruleIndex]; - let lexerActionExecutor = firstConfigWithRuleStopState.lexerActionExecutor; - newState.acceptStateInfo = new AcceptStateInfo_1.AcceptStateInfo(prediction, lexerActionExecutor); - } - return this.atn.modeToDFA[this.mode].addState(newState); - } - getDFA(mode) { - return this.atn.modeToDFA[mode]; - } - getText(input) { - return input.getText(Interval_1.Interval.of(this.startIndex, input.index - 1)); - } - get line() { - return this._line; - } - set line(line) { - this._line = line; - } - get charPositionInLine() { - return this._charPositionInLine; - } - set charPositionInLine(charPositionInLine) { - this._charPositionInLine = charPositionInLine; - } - consume(input) { - let curChar = input.LA(1); - if (curChar === "\n".charCodeAt(0)) { - this._line++; - this._charPositionInLine = 0; - } else { - this._charPositionInLine++; - } - input.consume(); - } - getTokenName(t) { - if (t === -1) { - return "EOF"; - } - return "'" + String.fromCharCode(t) + "'"; - } - }; - __decorate([ - Decorators_1.NotNull - ], LexerATNSimulator2.prototype, "prevAccept", void 0); - __decorate([ - __param(0, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "copyState", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "match", null); - __decorate([ - Decorators_1.Override - ], LexerATNSimulator2.prototype, "reset", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "matchATN", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "execATN", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "getExistingTargetState", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "computeTargetState", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "getReachableConfigSet", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "accept", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "computeStartState", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "closure", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull), - __param(3, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "getEpsilonTarget", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "evaluatePredicate", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "captureSimState", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "addDFAState", null); - __decorate([ - Decorators_1.NotNull - ], LexerATNSimulator2.prototype, "getDFA", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "getText", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], LexerATNSimulator2.prototype, "consume", null); - __decorate([ - Decorators_1.NotNull - ], LexerATNSimulator2.prototype, "getTokenName", null); - LexerATNSimulator2 = __decorate([ - __param(0, Decorators_1.NotNull) - ], LexerATNSimulator2); - exports.LexerATNSimulator = LexerATNSimulator2; - (function(LexerATNSimulator3) { - LexerATNSimulator3.debug = false; - LexerATNSimulator3.dfa_debug = false; - class SimState { - constructor() { - this.index = -1; - this.line = 0; - this.charPos = -1; - } - reset() { - this.index = -1; - this.line = 0; - this.charPos = -1; - this.dfaState = void 0; - } - } - LexerATNSimulator3.SimState = SimState; - })(LexerATNSimulator2 = exports.LexerATNSimulator || (exports.LexerATNSimulator = {})); - exports.LexerATNSimulator = LexerATNSimulator2; -}); - -// node_modules/antlr4ts/Lexer.js -var require_Lexer = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Lexer = void 0; - var CommonTokenFactory_1 = require_CommonTokenFactory(); - var IntegerStack_1 = require_IntegerStack(); - var Interval_1 = require_Interval(); - var IntStream_1 = require_IntStream(); - var LexerATNSimulator_1 = require_LexerATNSimulator(); - var LexerNoViableAltException_1 = require_LexerNoViableAltException(); - var Decorators_1 = require_Decorators(); - var Recognizer_1 = require_Recognizer(); - var Token_1 = require_Token(); - var Lexer2 = class extends Recognizer_1.Recognizer { - constructor(input) { - super(); - this._factory = CommonTokenFactory_1.CommonTokenFactory.DEFAULT; - this._tokenStartCharIndex = -1; - this._tokenStartLine = 0; - this._tokenStartCharPositionInLine = 0; - this._hitEOF = false; - this._channel = 0; - this._type = 0; - this._modeStack = new IntegerStack_1.IntegerStack(); - this._mode = Lexer2.DEFAULT_MODE; - this._input = input; - this._tokenFactorySourcePair = {source: this, stream: input}; - } - static get DEFAULT_TOKEN_CHANNEL() { - return Token_1.Token.DEFAULT_CHANNEL; - } - static get HIDDEN() { - return Token_1.Token.HIDDEN_CHANNEL; - } - reset(resetInput) { - if (resetInput === void 0 || resetInput) { - this._input.seek(0); - } - this._token = void 0; - this._type = Token_1.Token.INVALID_TYPE; - this._channel = Token_1.Token.DEFAULT_CHANNEL; - this._tokenStartCharIndex = -1; - this._tokenStartCharPositionInLine = -1; - this._tokenStartLine = -1; - this._text = void 0; - this._hitEOF = false; - this._mode = Lexer2.DEFAULT_MODE; - this._modeStack.clear(); - this.interpreter.reset(); - } - nextToken() { - if (this._input == null) { - throw new Error("nextToken requires a non-null input stream."); - } - let tokenStartMarker = this._input.mark(); - try { - outer: - while (true) { - if (this._hitEOF) { - return this.emitEOF(); - } - this._token = void 0; - this._channel = Token_1.Token.DEFAULT_CHANNEL; - this._tokenStartCharIndex = this._input.index; - this._tokenStartCharPositionInLine = this.interpreter.charPositionInLine; - this._tokenStartLine = this.interpreter.line; - this._text = void 0; - do { - this._type = Token_1.Token.INVALID_TYPE; - let ttype; - try { - ttype = this.interpreter.match(this._input, this._mode); - } catch (e) { - if (e instanceof LexerNoViableAltException_1.LexerNoViableAltException) { - this.notifyListeners(e); - this.recover(e); - ttype = Lexer2.SKIP; - } else { - throw e; - } - } - if (this._input.LA(1) === IntStream_1.IntStream.EOF) { - this._hitEOF = true; - } - if (this._type === Token_1.Token.INVALID_TYPE) { - this._type = ttype; - } - if (this._type === Lexer2.SKIP) { - continue outer; - } - } while (this._type === Lexer2.MORE); - if (this._token == null) { - return this.emit(); - } - return this._token; - } - } finally { - this._input.release(tokenStartMarker); - } - } - skip() { - this._type = Lexer2.SKIP; - } - more() { - this._type = Lexer2.MORE; - } - mode(m) { - this._mode = m; - } - pushMode(m) { - if (LexerATNSimulator_1.LexerATNSimulator.debug) { - console.log("pushMode " + m); - } - this._modeStack.push(this._mode); - this.mode(m); - } - popMode() { - if (this._modeStack.isEmpty) { - throw new Error("EmptyStackException"); - } - if (LexerATNSimulator_1.LexerATNSimulator.debug) { - console.log("popMode back to " + this._modeStack.peek()); - } - this.mode(this._modeStack.pop()); - return this._mode; - } - get tokenFactory() { - return this._factory; - } - set tokenFactory(factory) { - this._factory = factory; - } - get inputStream() { - return this._input; - } - set inputStream(input) { - this.reset(false); - this._input = input; - this._tokenFactorySourcePair = {source: this, stream: this._input}; - } - get sourceName() { - return this._input.sourceName; - } - emit(token) { - if (!token) { - token = this._factory.create(this._tokenFactorySourcePair, this._type, this._text, this._channel, this._tokenStartCharIndex, this.charIndex - 1, this._tokenStartLine, this._tokenStartCharPositionInLine); - } - this._token = token; - return token; - } - emitEOF() { - let cpos = this.charPositionInLine; - let line = this.line; - let eof = this._factory.create(this._tokenFactorySourcePair, Token_1.Token.EOF, void 0, Token_1.Token.DEFAULT_CHANNEL, this._input.index, this._input.index - 1, line, cpos); - this.emit(eof); - return eof; - } - get line() { - return this.interpreter.line; - } - set line(line) { - this.interpreter.line = line; - } - get charPositionInLine() { - return this.interpreter.charPositionInLine; - } - set charPositionInLine(charPositionInLine) { - this.interpreter.charPositionInLine = charPositionInLine; - } - get charIndex() { - return this._input.index; - } - get text() { - if (this._text != null) { - return this._text; - } - return this.interpreter.getText(this._input); - } - set text(text) { - this._text = text; - } - get token() { - return this._token; - } - set token(_token) { - this._token = _token; - } - set type(ttype) { - this._type = ttype; - } - get type() { - return this._type; - } - set channel(channel) { - this._channel = channel; - } - get channel() { - return this._channel; - } - getAllTokens() { - let tokens2 = []; - let t = this.nextToken(); - while (t.type !== Token_1.Token.EOF) { - tokens2.push(t); - t = this.nextToken(); - } - return tokens2; - } - notifyListeners(e) { - let text = this._input.getText(Interval_1.Interval.of(this._tokenStartCharIndex, this._input.index)); - let msg = "token recognition error at: '" + this.getErrorDisplay(text) + "'"; - let listener = this.getErrorListenerDispatch(); - if (listener.syntaxError) { - listener.syntaxError(this, void 0, this._tokenStartLine, this._tokenStartCharPositionInLine, msg, e); - } - } - getErrorDisplay(s) { - if (typeof s === "number") { - switch (s) { - case Token_1.Token.EOF: - return ""; - case 10: - return "\\n"; - case 9: - return "\\t"; - case 13: - return "\\r"; - } - return String.fromCharCode(s); - } - return s.replace(/\n/g, "\\n").replace(/\t/g, "\\t").replace(/\r/g, "\\r"); - } - getCharErrorDisplay(c) { - let s = this.getErrorDisplay(c); - return "'" + s + "'"; - } - recover(re) { - if (re instanceof LexerNoViableAltException_1.LexerNoViableAltException) { - if (this._input.LA(1) !== IntStream_1.IntStream.EOF) { - this.interpreter.consume(this._input); - } - } else { - this._input.consume(); - } - } - }; - Lexer2.DEFAULT_MODE = 0; - Lexer2.MORE = -2; - Lexer2.SKIP = -3; - Lexer2.MIN_CHAR_VALUE = 0; - Lexer2.MAX_CHAR_VALUE = 1114111; - __decorate([ - Decorators_1.Override - ], Lexer2.prototype, "nextToken", null); - __decorate([ - Decorators_1.Override - ], Lexer2.prototype, "tokenFactory", null); - __decorate([ - Decorators_1.Override - ], Lexer2.prototype, "inputStream", null); - __decorate([ - Decorators_1.Override - ], Lexer2.prototype, "sourceName", null); - __decorate([ - Decorators_1.Override - ], Lexer2.prototype, "line", null); - __decorate([ - Decorators_1.Override - ], Lexer2.prototype, "charPositionInLine", null); - exports.Lexer = Lexer2; -}); - -// node_modules/antlr4ts/misc/IntervalSet.js -var require_IntervalSet = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.IntervalSet = void 0; - var ArrayEqualityComparator_1 = require_ArrayEqualityComparator(); - var IntegerList_1 = require_IntegerList(); - var Interval_1 = require_Interval(); - var Lexer_1 = require_Lexer(); - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var Token_1 = require_Token(); - var IntervalSet = class { - constructor(intervals) { - this.readonly = false; - if (intervals != null) { - this._intervals = intervals.slice(0); - } else { - this._intervals = []; - } - } - static get COMPLETE_CHAR_SET() { - if (IntervalSet._COMPLETE_CHAR_SET === void 0) { - IntervalSet._COMPLETE_CHAR_SET = IntervalSet.of(Lexer_1.Lexer.MIN_CHAR_VALUE, Lexer_1.Lexer.MAX_CHAR_VALUE); - IntervalSet._COMPLETE_CHAR_SET.setReadonly(true); - } - return IntervalSet._COMPLETE_CHAR_SET; - } - static get EMPTY_SET() { - if (IntervalSet._EMPTY_SET == null) { - IntervalSet._EMPTY_SET = new IntervalSet(); - IntervalSet._EMPTY_SET.setReadonly(true); - } - return IntervalSet._EMPTY_SET; - } - static of(a, b = a) { - let s = new IntervalSet(); - s.add(a, b); - return s; - } - clear() { - if (this.readonly) { - throw new Error("can't alter readonly IntervalSet"); - } - this._intervals.length = 0; - } - add(a, b = a) { - this.addRange(Interval_1.Interval.of(a, b)); - } - addRange(addition) { - if (this.readonly) { - throw new Error("can't alter readonly IntervalSet"); - } - if (addition.b < addition.a) { - return; - } - for (let i = 0; i < this._intervals.length; i++) { - let r = this._intervals[i]; - if (addition.equals(r)) { - return; - } - if (addition.adjacent(r) || !addition.disjoint(r)) { - let bigger = addition.union(r); - this._intervals[i] = bigger; - while (i < this._intervals.length - 1) { - i++; - let next = this._intervals[i]; - if (!bigger.adjacent(next) && bigger.disjoint(next)) { - break; - } - this._intervals.splice(i, 1); - i--; - this._intervals[i] = bigger.union(next); - } - return; - } - if (addition.startsBeforeDisjoint(r)) { - this._intervals.splice(i, 0, addition); - return; - } - } - this._intervals.push(addition); - } - static or(sets) { - let r = new IntervalSet(); - for (let s of sets) { - r.addAll(s); - } - return r; - } - addAll(set) { - if (set == null) { - return this; - } - if (set instanceof IntervalSet) { - let other = set; - let n = other._intervals.length; - for (let i = 0; i < n; i++) { - let I = other._intervals[i]; - this.add(I.a, I.b); - } - } else { - for (let value of set.toArray()) { - this.add(value); - } - } - return this; - } - complementRange(minElement, maxElement) { - return this.complement(IntervalSet.of(minElement, maxElement)); - } - complement(vocabulary) { - if (vocabulary.isNil) { - return IntervalSet.EMPTY_SET; - } - let vocabularyIS; - if (vocabulary instanceof IntervalSet) { - vocabularyIS = vocabulary; - } else { - vocabularyIS = new IntervalSet(); - vocabularyIS.addAll(vocabulary); - } - return vocabularyIS.subtract(this); - } - subtract(a) { - if (a == null || a.isNil) { - return new IntervalSet(this._intervals); - } - if (a instanceof IntervalSet) { - return IntervalSet.subtract(this, a); - } - let other = new IntervalSet(); - other.addAll(a); - return IntervalSet.subtract(this, other); - } - static subtract(left, right) { - if (left.isNil) { - return new IntervalSet(); - } - let result = new IntervalSet(left._intervals); - if (right.isNil) { - return result; - } - let resultI = 0; - let rightI = 0; - while (resultI < result._intervals.length && rightI < right._intervals.length) { - let resultInterval = result._intervals[resultI]; - let rightInterval = right._intervals[rightI]; - if (rightInterval.b < resultInterval.a) { - rightI++; - continue; - } - if (rightInterval.a > resultInterval.b) { - resultI++; - continue; - } - let beforeCurrent; - let afterCurrent; - if (rightInterval.a > resultInterval.a) { - beforeCurrent = new Interval_1.Interval(resultInterval.a, rightInterval.a - 1); - } - if (rightInterval.b < resultInterval.b) { - afterCurrent = new Interval_1.Interval(rightInterval.b + 1, resultInterval.b); - } - if (beforeCurrent) { - if (afterCurrent) { - result._intervals[resultI] = beforeCurrent; - result._intervals.splice(resultI + 1, 0, afterCurrent); - resultI++; - rightI++; - continue; - } else { - result._intervals[resultI] = beforeCurrent; - resultI++; - continue; - } - } else { - if (afterCurrent) { - result._intervals[resultI] = afterCurrent; - rightI++; - continue; - } else { - result._intervals.splice(resultI, 1); - continue; - } - } - } - return result; - } - or(a) { - let o = new IntervalSet(); - o.addAll(this); - o.addAll(a); - return o; - } - and(other) { - if (other.isNil) { - return new IntervalSet(); - } - let myIntervals = this._intervals; - let theirIntervals = other._intervals; - let intersection; - let mySize = myIntervals.length; - let theirSize = theirIntervals.length; - let i = 0; - let j = 0; - while (i < mySize && j < theirSize) { - let mine = myIntervals[i]; - let theirs = theirIntervals[j]; - if (mine.startsBeforeDisjoint(theirs)) { - i++; - } else if (theirs.startsBeforeDisjoint(mine)) { - j++; - } else if (mine.properlyContains(theirs)) { - if (!intersection) { - intersection = new IntervalSet(); - } - intersection.addRange(mine.intersection(theirs)); - j++; - } else if (theirs.properlyContains(mine)) { - if (!intersection) { - intersection = new IntervalSet(); - } - intersection.addRange(mine.intersection(theirs)); - i++; - } else if (!mine.disjoint(theirs)) { - if (!intersection) { - intersection = new IntervalSet(); - } - intersection.addRange(mine.intersection(theirs)); - if (mine.startsAfterNonDisjoint(theirs)) { - j++; - } else if (theirs.startsAfterNonDisjoint(mine)) { - i++; - } - } - } - if (!intersection) { - return new IntervalSet(); - } - return intersection; - } - contains(el) { - let n = this._intervals.length; - let l = 0; - let r = n - 1; - while (l <= r) { - let m = l + r >> 1; - let I = this._intervals[m]; - let a = I.a; - let b = I.b; - if (b < el) { - l = m + 1; - } else if (a > el) { - r = m - 1; - } else { - return true; - } - } - return false; - } - get isNil() { - return this._intervals == null || this._intervals.length === 0; - } - get maxElement() { - if (this.isNil) { - throw new RangeError("set is empty"); - } - let last = this._intervals[this._intervals.length - 1]; - return last.b; - } - get minElement() { - if (this.isNil) { - throw new RangeError("set is empty"); - } - return this._intervals[0].a; - } - get intervals() { - return this._intervals; - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(); - for (let I of this._intervals) { - hash = MurmurHash_1.MurmurHash.update(hash, I.a); - hash = MurmurHash_1.MurmurHash.update(hash, I.b); - } - hash = MurmurHash_1.MurmurHash.finish(hash, this._intervals.length * 2); - return hash; - } - equals(o) { - if (o == null || !(o instanceof IntervalSet)) { - return false; - } - return ArrayEqualityComparator_1.ArrayEqualityComparator.INSTANCE.equals(this._intervals, o._intervals); - } - toString(elemAreChar = false) { - let buf = ""; - if (this._intervals == null || this._intervals.length === 0) { - return "{}"; - } - if (this.size > 1) { - buf += "{"; - } - let first = true; - for (let I of this._intervals) { - if (first) { - first = false; - } else { - buf += ", "; - } - let a = I.a; - let b = I.b; - if (a === b) { - if (a === Token_1.Token.EOF) { - buf += ""; - } else if (elemAreChar) { - buf += "'" + String.fromCodePoint(a) + "'"; - } else { - buf += a; - } - } else { - if (elemAreChar) { - buf += "'" + String.fromCodePoint(a) + "'..'" + String.fromCodePoint(b) + "'"; - } else { - buf += a + ".." + b; - } - } - } - if (this.size > 1) { - buf += "}"; - } - return buf; - } - toStringVocabulary(vocabulary) { - if (this._intervals == null || this._intervals.length === 0) { - return "{}"; - } - let buf = ""; - if (this.size > 1) { - buf += "{"; - } - let first = true; - for (let I of this._intervals) { - if (first) { - first = false; - } else { - buf += ", "; - } - let a = I.a; - let b = I.b; - if (a === b) { - buf += this.elementName(vocabulary, a); - } else { - for (let i = a; i <= b; i++) { - if (i > a) { - buf += ", "; - } - buf += this.elementName(vocabulary, i); - } - } - } - if (this.size > 1) { - buf += "}"; - } - return buf; - } - elementName(vocabulary, a) { - if (a === Token_1.Token.EOF) { - return ""; - } else if (a === Token_1.Token.EPSILON) { - return ""; - } else { - return vocabulary.getDisplayName(a); - } - } - get size() { - let n = 0; - let numIntervals = this._intervals.length; - if (numIntervals === 1) { - let firstInterval = this._intervals[0]; - return firstInterval.b - firstInterval.a + 1; - } - for (let i = 0; i < numIntervals; i++) { - let I = this._intervals[i]; - n += I.b - I.a + 1; - } - return n; - } - toIntegerList() { - let values = new IntegerList_1.IntegerList(this.size); - let n = this._intervals.length; - for (let i = 0; i < n; i++) { - let I = this._intervals[i]; - let a = I.a; - let b = I.b; - for (let v = a; v <= b; v++) { - values.add(v); - } - } - return values; - } - toSet() { - let s = new Set(); - for (let I of this._intervals) { - let a = I.a; - let b = I.b; - for (let v = a; v <= b; v++) { - s.add(v); - } - } - return s; - } - toArray() { - let values = new Array(); - let n = this._intervals.length; - for (let i = 0; i < n; i++) { - let I = this._intervals[i]; - let a = I.a; - let b = I.b; - for (let v = a; v <= b; v++) { - values.push(v); - } - } - return values; - } - remove(el) { - if (this.readonly) { - throw new Error("can't alter readonly IntervalSet"); - } - let n = this._intervals.length; - for (let i = 0; i < n; i++) { - let I = this._intervals[i]; - let a = I.a; - let b = I.b; - if (el < a) { - break; - } - if (el === a && el === b) { - this._intervals.splice(i, 1); - break; - } - if (el === a) { - this._intervals[i] = Interval_1.Interval.of(I.a + 1, I.b); - break; - } - if (el === b) { - this._intervals[i] = Interval_1.Interval.of(I.a, I.b - 1); - break; - } - if (el > a && el < b) { - let oldb = I.b; - this._intervals[i] = Interval_1.Interval.of(I.a, el - 1); - this.add(el + 1, oldb); - } - } - } - get isReadonly() { - return this.readonly; - } - setReadonly(readonly) { - if (this.readonly && !readonly) { - throw new Error("can't alter readonly IntervalSet"); - } - this.readonly = readonly; - } - }; - __decorate([ - Decorators_1.Override - ], IntervalSet.prototype, "addAll", null); - __decorate([ - Decorators_1.Override - ], IntervalSet.prototype, "complement", null); - __decorate([ - Decorators_1.Override - ], IntervalSet.prototype, "subtract", null); - __decorate([ - Decorators_1.Override - ], IntervalSet.prototype, "or", null); - __decorate([ - Decorators_1.Override - ], IntervalSet.prototype, "and", null); - __decorate([ - Decorators_1.Override - ], IntervalSet.prototype, "contains", null); - __decorate([ - Decorators_1.Override - ], IntervalSet.prototype, "isNil", null); - __decorate([ - Decorators_1.Override - ], IntervalSet.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], IntervalSet.prototype, "equals", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], IntervalSet.prototype, "toStringVocabulary", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], IntervalSet.prototype, "elementName", null); - __decorate([ - Decorators_1.Override - ], IntervalSet.prototype, "size", null); - __decorate([ - Decorators_1.Override - ], IntervalSet.prototype, "remove", null); - __decorate([ - Decorators_1.NotNull - ], IntervalSet, "of", null); - __decorate([ - Decorators_1.NotNull - ], IntervalSet, "subtract", null); - exports.IntervalSet = IntervalSet; -}); - -// node_modules/antlr4ts/atn/ATNDeserializationOptions.js -var require_ATNDeserializationOptions = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ATNDeserializationOptions = void 0; - var Decorators_1 = require_Decorators(); - var ATNDeserializationOptions = class { - constructor(options) { - this.readOnly = false; - if (options) { - this.verifyATN = options.verifyATN; - this.generateRuleBypassTransitions = options.generateRuleBypassTransitions; - this.optimize = options.optimize; - } else { - this.verifyATN = true; - this.generateRuleBypassTransitions = false; - this.optimize = true; - } - } - static get defaultOptions() { - if (ATNDeserializationOptions._defaultOptions == null) { - ATNDeserializationOptions._defaultOptions = new ATNDeserializationOptions(); - ATNDeserializationOptions._defaultOptions.makeReadOnly(); - } - return ATNDeserializationOptions._defaultOptions; - } - get isReadOnly() { - return this.readOnly; - } - makeReadOnly() { - this.readOnly = true; - } - get isVerifyATN() { - return this.verifyATN; - } - set isVerifyATN(verifyATN) { - this.throwIfReadOnly(); - this.verifyATN = verifyATN; - } - get isGenerateRuleBypassTransitions() { - return this.generateRuleBypassTransitions; - } - set isGenerateRuleBypassTransitions(generateRuleBypassTransitions) { - this.throwIfReadOnly(); - this.generateRuleBypassTransitions = generateRuleBypassTransitions; - } - get isOptimize() { - return this.optimize; - } - set isOptimize(optimize) { - this.throwIfReadOnly(); - this.optimize = optimize; - } - throwIfReadOnly() { - if (this.isReadOnly) { - throw new Error("The object is read only."); - } - } - }; - __decorate([ - Decorators_1.NotNull - ], ATNDeserializationOptions, "defaultOptions", null); - exports.ATNDeserializationOptions = ATNDeserializationOptions; -}); - -// node_modules/antlr4ts/atn/ActionTransition.js -var require_ActionTransition = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ActionTransition = void 0; - var Decorators_1 = require_Decorators(); - var Transition_1 = require_Transition(); - var ActionTransition = class ActionTransition extends Transition_1.Transition { - constructor(target, ruleIndex, actionIndex = -1, isCtxDependent = false) { - super(target); - this.ruleIndex = ruleIndex; - this.actionIndex = actionIndex; - this.isCtxDependent = isCtxDependent; - } - get serializationType() { - return 6; - } - get isEpsilon() { - return true; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return false; - } - toString() { - return "action_" + this.ruleIndex + ":" + this.actionIndex; - } - }; - __decorate([ - Decorators_1.Override - ], ActionTransition.prototype, "serializationType", null); - __decorate([ - Decorators_1.Override - ], ActionTransition.prototype, "isEpsilon", null); - __decorate([ - Decorators_1.Override - ], ActionTransition.prototype, "matches", null); - __decorate([ - Decorators_1.Override - ], ActionTransition.prototype, "toString", null); - ActionTransition = __decorate([ - __param(0, Decorators_1.NotNull) - ], ActionTransition); - exports.ActionTransition = ActionTransition; -}); - -// node_modules/antlr4ts/atn/AtomTransition.js -var require_AtomTransition = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.AtomTransition = void 0; - var IntervalSet_1 = require_IntervalSet(); - var Decorators_1 = require_Decorators(); - var Transition_1 = require_Transition(); - var AtomTransition2 = class AtomTransition extends Transition_1.Transition { - constructor(target, label) { - super(target); - this._label = label; - } - get serializationType() { - return 5; - } - get label() { - return IntervalSet_1.IntervalSet.of(this._label); - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return this._label === symbol; - } - toString() { - return String(this.label); - } - }; - __decorate([ - Decorators_1.Override - ], AtomTransition2.prototype, "serializationType", null); - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull - ], AtomTransition2.prototype, "label", null); - __decorate([ - Decorators_1.Override - ], AtomTransition2.prototype, "matches", null); - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull - ], AtomTransition2.prototype, "toString", null); - AtomTransition2 = __decorate([ - __param(0, Decorators_1.NotNull) - ], AtomTransition2); - exports.AtomTransition = AtomTransition2; -}); - -// node_modules/antlr4ts/atn/BlockStartState.js -var require_BlockStartState = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.BlockStartState = void 0; - var DecisionState_1 = require_DecisionState(); - var BlockStartState = class extends DecisionState_1.DecisionState { - }; - exports.BlockStartState = BlockStartState; -}); - -// node_modules/antlr4ts/atn/BasicBlockStartState.js -var require_BasicBlockStartState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.BasicBlockStartState = void 0; - var ATNStateType_1 = require_ATNStateType(); - var BlockStartState_1 = require_BlockStartState(); - var Decorators_1 = require_Decorators(); - var BasicBlockStartState = class extends BlockStartState_1.BlockStartState { - get stateType() { - return ATNStateType_1.ATNStateType.BLOCK_START; - } - }; - __decorate([ - Decorators_1.Override - ], BasicBlockStartState.prototype, "stateType", null); - exports.BasicBlockStartState = BasicBlockStartState; -}); - -// node_modules/antlr4ts/atn/BlockEndState.js -var require_BlockEndState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.BlockEndState = void 0; - var ATNState_1 = require_ATNState(); - var ATNStateType_1 = require_ATNStateType(); - var Decorators_1 = require_Decorators(); - var BlockEndState = class extends ATNState_1.ATNState { - get stateType() { - return ATNStateType_1.ATNStateType.BLOCK_END; - } - }; - __decorate([ - Decorators_1.Override - ], BlockEndState.prototype, "stateType", null); - exports.BlockEndState = BlockEndState; -}); - -// node_modules/antlr4ts/atn/EpsilonTransition.js -var require_EpsilonTransition = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.EpsilonTransition = void 0; - var Decorators_1 = require_Decorators(); - var Transition_1 = require_Transition(); - var EpsilonTransition = class EpsilonTransition extends Transition_1.Transition { - constructor(target, outermostPrecedenceReturn = -1) { - super(target); - this._outermostPrecedenceReturn = outermostPrecedenceReturn; - } - get outermostPrecedenceReturn() { - return this._outermostPrecedenceReturn; - } - get serializationType() { - return 1; - } - get isEpsilon() { - return true; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return false; - } - toString() { - return "epsilon"; - } - }; - __decorate([ - Decorators_1.Override - ], EpsilonTransition.prototype, "serializationType", null); - __decorate([ - Decorators_1.Override - ], EpsilonTransition.prototype, "isEpsilon", null); - __decorate([ - Decorators_1.Override - ], EpsilonTransition.prototype, "matches", null); - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull - ], EpsilonTransition.prototype, "toString", null); - EpsilonTransition = __decorate([ - __param(0, Decorators_1.NotNull) - ], EpsilonTransition); - exports.EpsilonTransition = EpsilonTransition; -}); - -// node_modules/antlr4ts/atn/LexerChannelAction.js -var require_LexerChannelAction = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerChannelAction = void 0; - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var LexerChannelAction = class { - constructor(channel) { - this._channel = channel; - } - get channel() { - return this._channel; - } - get actionType() { - return 0; - } - get isPositionDependent() { - return false; - } - execute(lexer) { - lexer.channel = this._channel; - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(); - hash = MurmurHash_1.MurmurHash.update(hash, this.actionType); - hash = MurmurHash_1.MurmurHash.update(hash, this._channel); - return MurmurHash_1.MurmurHash.finish(hash, 2); - } - equals(obj) { - if (obj === this) { - return true; - } else if (!(obj instanceof LexerChannelAction)) { - return false; - } - return this._channel === obj._channel; - } - toString() { - return `channel(${this._channel})`; - } - }; - __decorate([ - Decorators_1.Override - ], LexerChannelAction.prototype, "actionType", null); - __decorate([ - Decorators_1.Override - ], LexerChannelAction.prototype, "isPositionDependent", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], LexerChannelAction.prototype, "execute", null); - __decorate([ - Decorators_1.Override - ], LexerChannelAction.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], LexerChannelAction.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], LexerChannelAction.prototype, "toString", null); - exports.LexerChannelAction = LexerChannelAction; -}); - -// node_modules/antlr4ts/atn/LexerCustomAction.js -var require_LexerCustomAction = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerCustomAction = void 0; - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var LexerCustomAction = class { - constructor(ruleIndex, actionIndex) { - this._ruleIndex = ruleIndex; - this._actionIndex = actionIndex; - } - get ruleIndex() { - return this._ruleIndex; - } - get actionIndex() { - return this._actionIndex; - } - get actionType() { - return 1; - } - get isPositionDependent() { - return true; - } - execute(lexer) { - lexer.action(void 0, this._ruleIndex, this._actionIndex); - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(); - hash = MurmurHash_1.MurmurHash.update(hash, this.actionType); - hash = MurmurHash_1.MurmurHash.update(hash, this._ruleIndex); - hash = MurmurHash_1.MurmurHash.update(hash, this._actionIndex); - return MurmurHash_1.MurmurHash.finish(hash, 3); - } - equals(obj) { - if (obj === this) { - return true; - } else if (!(obj instanceof LexerCustomAction)) { - return false; - } - return this._ruleIndex === obj._ruleIndex && this._actionIndex === obj._actionIndex; - } - }; - __decorate([ - Decorators_1.Override - ], LexerCustomAction.prototype, "actionType", null); - __decorate([ - Decorators_1.Override - ], LexerCustomAction.prototype, "isPositionDependent", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], LexerCustomAction.prototype, "execute", null); - __decorate([ - Decorators_1.Override - ], LexerCustomAction.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], LexerCustomAction.prototype, "equals", null); - exports.LexerCustomAction = LexerCustomAction; -}); - -// node_modules/antlr4ts/atn/LexerModeAction.js -var require_LexerModeAction = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerModeAction = void 0; - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var LexerModeAction = class { - constructor(mode) { - this._mode = mode; - } - get mode() { - return this._mode; - } - get actionType() { - return 2; - } - get isPositionDependent() { - return false; - } - execute(lexer) { - lexer.mode(this._mode); - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(); - hash = MurmurHash_1.MurmurHash.update(hash, this.actionType); - hash = MurmurHash_1.MurmurHash.update(hash, this._mode); - return MurmurHash_1.MurmurHash.finish(hash, 2); - } - equals(obj) { - if (obj === this) { - return true; - } else if (!(obj instanceof LexerModeAction)) { - return false; - } - return this._mode === obj._mode; - } - toString() { - return `mode(${this._mode})`; - } - }; - __decorate([ - Decorators_1.Override - ], LexerModeAction.prototype, "actionType", null); - __decorate([ - Decorators_1.Override - ], LexerModeAction.prototype, "isPositionDependent", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], LexerModeAction.prototype, "execute", null); - __decorate([ - Decorators_1.Override - ], LexerModeAction.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], LexerModeAction.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], LexerModeAction.prototype, "toString", null); - exports.LexerModeAction = LexerModeAction; -}); - -// node_modules/antlr4ts/atn/LexerMoreAction.js -var require_LexerMoreAction = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerMoreAction = void 0; - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var LexerMoreAction = class { - constructor() { - } - get actionType() { - return 3; - } - get isPositionDependent() { - return false; - } - execute(lexer) { - lexer.more(); - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(); - hash = MurmurHash_1.MurmurHash.update(hash, this.actionType); - return MurmurHash_1.MurmurHash.finish(hash, 1); - } - equals(obj) { - return obj === this; - } - toString() { - return "more"; - } - }; - __decorate([ - Decorators_1.Override - ], LexerMoreAction.prototype, "actionType", null); - __decorate([ - Decorators_1.Override - ], LexerMoreAction.prototype, "isPositionDependent", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], LexerMoreAction.prototype, "execute", null); - __decorate([ - Decorators_1.Override - ], LexerMoreAction.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], LexerMoreAction.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], LexerMoreAction.prototype, "toString", null); - exports.LexerMoreAction = LexerMoreAction; - (function(LexerMoreAction2) { - LexerMoreAction2.INSTANCE = new LexerMoreAction2(); - })(LexerMoreAction = exports.LexerMoreAction || (exports.LexerMoreAction = {})); -}); - -// node_modules/antlr4ts/atn/LexerPopModeAction.js -var require_LexerPopModeAction = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerPopModeAction = void 0; - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var LexerPopModeAction = class { - constructor() { - } - get actionType() { - return 4; - } - get isPositionDependent() { - return false; - } - execute(lexer) { - lexer.popMode(); - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(); - hash = MurmurHash_1.MurmurHash.update(hash, this.actionType); - return MurmurHash_1.MurmurHash.finish(hash, 1); - } - equals(obj) { - return obj === this; - } - toString() { - return "popMode"; - } - }; - __decorate([ - Decorators_1.Override - ], LexerPopModeAction.prototype, "actionType", null); - __decorate([ - Decorators_1.Override - ], LexerPopModeAction.prototype, "isPositionDependent", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], LexerPopModeAction.prototype, "execute", null); - __decorate([ - Decorators_1.Override - ], LexerPopModeAction.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], LexerPopModeAction.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], LexerPopModeAction.prototype, "toString", null); - exports.LexerPopModeAction = LexerPopModeAction; - (function(LexerPopModeAction2) { - LexerPopModeAction2.INSTANCE = new LexerPopModeAction2(); - })(LexerPopModeAction = exports.LexerPopModeAction || (exports.LexerPopModeAction = {})); -}); - -// node_modules/antlr4ts/atn/LexerPushModeAction.js -var require_LexerPushModeAction = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerPushModeAction = void 0; - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var LexerPushModeAction = class { - constructor(mode) { - this._mode = mode; - } - get mode() { - return this._mode; - } - get actionType() { - return 5; - } - get isPositionDependent() { - return false; - } - execute(lexer) { - lexer.pushMode(this._mode); - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(); - hash = MurmurHash_1.MurmurHash.update(hash, this.actionType); - hash = MurmurHash_1.MurmurHash.update(hash, this._mode); - return MurmurHash_1.MurmurHash.finish(hash, 2); - } - equals(obj) { - if (obj === this) { - return true; - } else if (!(obj instanceof LexerPushModeAction)) { - return false; - } - return this._mode === obj._mode; - } - toString() { - return `pushMode(${this._mode})`; - } - }; - __decorate([ - Decorators_1.Override - ], LexerPushModeAction.prototype, "actionType", null); - __decorate([ - Decorators_1.Override - ], LexerPushModeAction.prototype, "isPositionDependent", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], LexerPushModeAction.prototype, "execute", null); - __decorate([ - Decorators_1.Override - ], LexerPushModeAction.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], LexerPushModeAction.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], LexerPushModeAction.prototype, "toString", null); - exports.LexerPushModeAction = LexerPushModeAction; -}); - -// node_modules/antlr4ts/atn/LexerSkipAction.js -var require_LexerSkipAction = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerSkipAction = void 0; - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var LexerSkipAction = class { - constructor() { - } - get actionType() { - return 6; - } - get isPositionDependent() { - return false; - } - execute(lexer) { - lexer.skip(); - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(); - hash = MurmurHash_1.MurmurHash.update(hash, this.actionType); - return MurmurHash_1.MurmurHash.finish(hash, 1); - } - equals(obj) { - return obj === this; - } - toString() { - return "skip"; - } - }; - __decorate([ - Decorators_1.Override - ], LexerSkipAction.prototype, "actionType", null); - __decorate([ - Decorators_1.Override - ], LexerSkipAction.prototype, "isPositionDependent", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], LexerSkipAction.prototype, "execute", null); - __decorate([ - Decorators_1.Override - ], LexerSkipAction.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], LexerSkipAction.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], LexerSkipAction.prototype, "toString", null); - exports.LexerSkipAction = LexerSkipAction; - (function(LexerSkipAction2) { - LexerSkipAction2.INSTANCE = new LexerSkipAction2(); - })(LexerSkipAction = exports.LexerSkipAction || (exports.LexerSkipAction = {})); -}); - -// node_modules/antlr4ts/atn/LexerTypeAction.js -var require_LexerTypeAction = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerTypeAction = void 0; - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var LexerTypeAction = class { - constructor(type) { - this._type = type; - } - get type() { - return this._type; - } - get actionType() { - return 7; - } - get isPositionDependent() { - return false; - } - execute(lexer) { - lexer.type = this._type; - } - hashCode() { - let hash = MurmurHash_1.MurmurHash.initialize(); - hash = MurmurHash_1.MurmurHash.update(hash, this.actionType); - hash = MurmurHash_1.MurmurHash.update(hash, this._type); - return MurmurHash_1.MurmurHash.finish(hash, 2); - } - equals(obj) { - if (obj === this) { - return true; - } else if (!(obj instanceof LexerTypeAction)) { - return false; - } - return this._type === obj._type; - } - toString() { - return `type(${this._type})`; - } - }; - __decorate([ - Decorators_1.Override - ], LexerTypeAction.prototype, "actionType", null); - __decorate([ - Decorators_1.Override - ], LexerTypeAction.prototype, "isPositionDependent", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], LexerTypeAction.prototype, "execute", null); - __decorate([ - Decorators_1.Override - ], LexerTypeAction.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], LexerTypeAction.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], LexerTypeAction.prototype, "toString", null); - exports.LexerTypeAction = LexerTypeAction; -}); - -// node_modules/antlr4ts/atn/LoopEndState.js -var require_LoopEndState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LoopEndState = void 0; - var ATNState_1 = require_ATNState(); - var ATNStateType_1 = require_ATNStateType(); - var Decorators_1 = require_Decorators(); - var LoopEndState = class extends ATNState_1.ATNState { - get stateType() { - return ATNStateType_1.ATNStateType.LOOP_END; - } - }; - __decorate([ - Decorators_1.Override - ], LoopEndState.prototype, "stateType", null); - exports.LoopEndState = LoopEndState; -}); - -// node_modules/antlr4ts/atn/ConflictInfo.js -var require_ConflictInfo = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ConflictInfo = void 0; - var Decorators_1 = require_Decorators(); - var Utils3 = require_Utils(); - var ConflictInfo = class { - constructor(conflictedAlts, exact) { - this._conflictedAlts = conflictedAlts; - this.exact = exact; - } - get conflictedAlts() { - return this._conflictedAlts; - } - get isExact() { - return this.exact; - } - equals(obj) { - if (obj === this) { - return true; - } else if (!(obj instanceof ConflictInfo)) { - return false; - } - return this.isExact === obj.isExact && Utils3.equals(this.conflictedAlts, obj.conflictedAlts); - } - hashCode() { - return this.conflictedAlts.hashCode(); - } - }; - __decorate([ - Decorators_1.Override - ], ConflictInfo.prototype, "equals", null); - __decorate([ - Decorators_1.Override - ], ConflictInfo.prototype, "hashCode", null); - exports.ConflictInfo = ConflictInfo; -}); - -// node_modules/antlr4ts/tree/TerminalNode.js -var require_TerminalNode = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.TerminalNode = void 0; - var Interval_1 = require_Interval(); - var Decorators_1 = require_Decorators(); - var Token_1 = require_Token(); - var TerminalNode = class { - constructor(symbol) { - this._symbol = symbol; - } - getChild(i) { - throw new RangeError("Terminal Node has no children."); - } - get symbol() { - return this._symbol; - } - get parent() { - return this._parent; - } - setParent(parent) { - this._parent = parent; - } - get payload() { - return this._symbol; - } - get sourceInterval() { - let tokenIndex = this._symbol.tokenIndex; - return new Interval_1.Interval(tokenIndex, tokenIndex); - } - get childCount() { - return 0; - } - accept(visitor) { - return visitor.visitTerminal(this); - } - get text() { - return this._symbol.text || ""; - } - toStringTree(parser) { - return this.toString(); - } - toString() { - if (this._symbol.type === Token_1.Token.EOF) { - return ""; - } - return this._symbol.text || ""; - } - }; - __decorate([ - Decorators_1.Override - ], TerminalNode.prototype, "getChild", null); - __decorate([ - Decorators_1.Override - ], TerminalNode.prototype, "parent", null); - __decorate([ - Decorators_1.Override - ], TerminalNode.prototype, "setParent", null); - __decorate([ - Decorators_1.Override - ], TerminalNode.prototype, "payload", null); - __decorate([ - Decorators_1.Override - ], TerminalNode.prototype, "sourceInterval", null); - __decorate([ - Decorators_1.Override - ], TerminalNode.prototype, "childCount", null); - __decorate([ - Decorators_1.Override - ], TerminalNode.prototype, "accept", null); - __decorate([ - Decorators_1.Override - ], TerminalNode.prototype, "text", null); - __decorate([ - Decorators_1.Override - ], TerminalNode.prototype, "toStringTree", null); - __decorate([ - Decorators_1.Override - ], TerminalNode.prototype, "toString", null); - exports.TerminalNode = TerminalNode; -}); - -// node_modules/antlr4ts/tree/ErrorNode.js -var require_ErrorNode = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ErrorNode = void 0; - var Decorators_1 = require_Decorators(); - var TerminalNode_1 = require_TerminalNode(); - var ErrorNode2 = class extends TerminalNode_1.TerminalNode { - constructor(token) { - super(token); - } - accept(visitor) { - return visitor.visitErrorNode(this); - } - }; - __decorate([ - Decorators_1.Override - ], ErrorNode2.prototype, "accept", null); - exports.ErrorNode = ErrorNode2; -}); - -// node_modules/antlr4ts/tree/RuleNode.js -var require_RuleNode = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RuleNode = void 0; - var RuleNode = class { - }; - exports.RuleNode = RuleNode; -}); - -// node_modules/antlr4ts/tree/Trees.js -var require_Trees = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Trees = void 0; - var ATN_1 = require_ATN(); - var CommonToken_1 = require_CommonToken(); - var ErrorNode_1 = require_ErrorNode(); - var Decorators_1 = require_Decorators(); - var Parser_1 = require_Parser(); - var ParserRuleContext_1 = require_ParserRuleContext(); - var RuleNode_1 = require_RuleNode(); - var TerminalNode_1 = require_TerminalNode(); - var Token_1 = require_Token(); - var Utils3 = require_Utils(); - var Trees = class { - static toStringTree(t, arg2) { - let ruleNames; - if (arg2 instanceof Parser_1.Parser) { - ruleNames = arg2.ruleNames; - } else { - ruleNames = arg2; - } - let s = Utils3.escapeWhitespace(this.getNodeText(t, ruleNames), false); - if (t.childCount === 0) { - return s; - } - let buf = ""; - buf += "("; - s = Utils3.escapeWhitespace(this.getNodeText(t, ruleNames), false); - buf += s; - buf += " "; - for (let i = 0; i < t.childCount; i++) { - if (i > 0) { - buf += " "; - } - buf += this.toStringTree(t.getChild(i), ruleNames); - } - buf += ")"; - return buf; - } - static getNodeText(t, arg2) { - let ruleNames; - if (arg2 instanceof Parser_1.Parser) { - ruleNames = arg2.ruleNames; - } else if (arg2) { - ruleNames = arg2; - } else { - let payload = t.payload; - if (typeof payload.text === "string") { - return payload.text; - } - return t.payload.toString(); - } - if (t instanceof RuleNode_1.RuleNode) { - let ruleContext = t.ruleContext; - let ruleIndex = ruleContext.ruleIndex; - let ruleName = ruleNames[ruleIndex]; - let altNumber = ruleContext.altNumber; - if (altNumber !== ATN_1.ATN.INVALID_ALT_NUMBER) { - return ruleName + ":" + altNumber; - } - return ruleName; - } else if (t instanceof ErrorNode_1.ErrorNode) { - return t.toString(); - } else if (t instanceof TerminalNode_1.TerminalNode) { - let symbol = t.symbol; - return symbol.text || ""; - } - throw new TypeError("Unexpected node type"); - } - static getChildren(t) { - let kids = []; - for (let i = 0; i < t.childCount; i++) { - kids.push(t.getChild(i)); - } - return kids; - } - static getAncestors(t) { - let ancestors = []; - let p = t.parent; - while (p) { - ancestors.unshift(p); - p = p.parent; - } - return ancestors; - } - static isAncestorOf(t, u) { - if (!t || !u || !t.parent) { - return false; - } - let p = u.parent; - while (p) { - if (t === p) { - return true; - } - p = p.parent; - } - return false; - } - static findAllTokenNodes(t, ttype) { - return Trees.findAllNodes(t, ttype, true); - } - static findAllRuleNodes(t, ruleIndex) { - return Trees.findAllNodes(t, ruleIndex, false); - } - static findAllNodes(t, index, findTokens) { - let nodes = []; - Trees._findAllNodes(t, index, findTokens, nodes); - return nodes; - } - static _findAllNodes(t, index, findTokens, nodes) { - if (findTokens && t instanceof TerminalNode_1.TerminalNode) { - if (t.symbol.type === index) { - nodes.push(t); - } - } else if (!findTokens && t instanceof ParserRuleContext_1.ParserRuleContext) { - if (t.ruleIndex === index) { - nodes.push(t); - } - } - for (let i = 0; i < t.childCount; i++) { - Trees._findAllNodes(t.getChild(i), index, findTokens, nodes); - } - } - static getDescendants(t) { - let nodes = []; - function recurse(e) { - nodes.push(e); - const n = e.childCount; - for (let i = 0; i < n; i++) { - recurse(e.getChild(i)); - } - } - recurse(t); - return nodes; - } - static getRootOfSubtreeEnclosingRegion(t, startTokenIndex, stopTokenIndex) { - let n = t.childCount; - for (let i = 0; i < n; i++) { - let child = t.getChild(i); - let r = Trees.getRootOfSubtreeEnclosingRegion(child, startTokenIndex, stopTokenIndex); - if (r) { - return r; - } - } - if (t instanceof ParserRuleContext_1.ParserRuleContext) { - let stopToken = t.stop; - if (startTokenIndex >= t.start.tokenIndex && (stopToken == null || stopTokenIndex <= stopToken.tokenIndex)) { - return t; - } - } - return void 0; - } - static stripChildrenOutOfRange(t, root, startIndex, stopIndex) { - if (!t) { - return; - } - let count = t.childCount; - for (let i = 0; i < count; i++) { - let child = t.getChild(i); - let range = child.sourceInterval; - if (child instanceof ParserRuleContext_1.ParserRuleContext && (range.b < startIndex || range.a > stopIndex)) { - if (Trees.isAncestorOf(child, root)) { - let abbrev = new CommonToken_1.CommonToken(Token_1.Token.INVALID_TYPE, "..."); - t.children[i] = new TerminalNode_1.TerminalNode(abbrev); - } - } - } - } - static findNodeSuchThat(t, pred) { - if (pred(t)) { - return t; - } - let n = t.childCount; - for (let i = 0; i < n; i++) { - let u = Trees.findNodeSuchThat(t.getChild(i), pred); - if (u !== void 0) { - return u; - } - } - return void 0; - } - }; - __decorate([ - __param(0, Decorators_1.NotNull) - ], Trees, "toStringTree", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], Trees, "getAncestors", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], Trees, "getRootOfSubtreeEnclosingRegion", null); - exports.Trees = Trees; -}); - -// node_modules/antlr4ts/RuleContext.js -var require_RuleContext = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RuleContext = void 0; - var ATN_1 = require_ATN(); - var Recognizer_1 = require_Recognizer(); - var RuleNode_1 = require_RuleNode(); - var Interval_1 = require_Interval(); - var Decorators_1 = require_Decorators(); - var Trees_1 = require_Trees(); - var ParserRuleContext_1 = require_ParserRuleContext(); - var RuleContext = class extends RuleNode_1.RuleNode { - constructor(parent, invokingState) { - super(); - this._parent = parent; - this.invokingState = invokingState != null ? invokingState : -1; - } - static getChildContext(parent, invokingState) { - return new RuleContext(parent, invokingState); - } - depth() { - let n = 0; - let p = this; - while (p) { - p = p._parent; - n++; - } - return n; - } - get isEmpty() { - return this.invokingState === -1; - } - get sourceInterval() { - return Interval_1.Interval.INVALID; - } - get ruleContext() { - return this; - } - get parent() { - return this._parent; - } - setParent(parent) { - this._parent = parent; - } - get payload() { - return this; - } - get text() { - if (this.childCount === 0) { - return ""; - } - let builder = ""; - for (let i = 0; i < this.childCount; i++) { - builder += this.getChild(i).text; - } - return builder.toString(); - } - get ruleIndex() { - return -1; - } - get altNumber() { - return ATN_1.ATN.INVALID_ALT_NUMBER; - } - set altNumber(altNumber) { - } - getChild(i) { - throw new RangeError("i must be greater than or equal to 0 and less than childCount"); - } - get childCount() { - return 0; - } - accept(visitor) { - return visitor.visitChildren(this); - } - toStringTree(recog) { - return Trees_1.Trees.toStringTree(this, recog); - } - toString(arg1, stop) { - const ruleNames = arg1 instanceof Recognizer_1.Recognizer ? arg1.ruleNames : arg1; - stop = stop || ParserRuleContext_1.ParserRuleContext.emptyContext(); - let buf = ""; - let p = this; - buf += "["; - while (p && p !== stop) { - if (!ruleNames) { - if (!p.isEmpty) { - buf += p.invokingState; - } - } else { - let ruleIndex = p.ruleIndex; - let ruleName = ruleIndex >= 0 && ruleIndex < ruleNames.length ? ruleNames[ruleIndex] : ruleIndex.toString(); - buf += ruleName; - } - if (p._parent && (ruleNames || !p._parent.isEmpty)) { - buf += " "; - } - p = p._parent; - } - buf += "]"; - return buf.toString(); - } - }; - __decorate([ - Decorators_1.Override - ], RuleContext.prototype, "sourceInterval", null); - __decorate([ - Decorators_1.Override - ], RuleContext.prototype, "ruleContext", null); - __decorate([ - Decorators_1.Override - ], RuleContext.prototype, "parent", null); - __decorate([ - Decorators_1.Override - ], RuleContext.prototype, "setParent", null); - __decorate([ - Decorators_1.Override - ], RuleContext.prototype, "payload", null); - __decorate([ - Decorators_1.Override - ], RuleContext.prototype, "text", null); - __decorate([ - Decorators_1.Override - ], RuleContext.prototype, "getChild", null); - __decorate([ - Decorators_1.Override - ], RuleContext.prototype, "childCount", null); - __decorate([ - Decorators_1.Override - ], RuleContext.prototype, "accept", null); - __decorate([ - Decorators_1.Override - ], RuleContext.prototype, "toStringTree", null); - exports.RuleContext = RuleContext; -}); - -// node_modules/antlr4ts/ParserRuleContext.js -var require_ParserRuleContext = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ParserRuleContext = void 0; - var ErrorNode_1 = require_ErrorNode(); - var Interval_1 = require_Interval(); - var Decorators_1 = require_Decorators(); - var RuleContext_1 = require_RuleContext(); - var TerminalNode_1 = require_TerminalNode(); - var ParserRuleContext2 = class extends RuleContext_1.RuleContext { - constructor(parent, invokingStateNumber) { - if (invokingStateNumber == null) { - super(); - } else { - super(parent, invokingStateNumber); - } - } - static emptyContext() { - return ParserRuleContext2.EMPTY; - } - copyFrom(ctx) { - this._parent = ctx._parent; - this.invokingState = ctx.invokingState; - this._start = ctx._start; - this._stop = ctx._stop; - if (ctx.children) { - this.children = []; - for (let child of ctx.children) { - if (child instanceof ErrorNode_1.ErrorNode) { - this.addChild(child); - } - } - } - } - enterRule(listener) { - } - exitRule(listener) { - } - addAnyChild(t) { - if (!this.children) { - this.children = [t]; - } else { - this.children.push(t); - } - return t; - } - addChild(t) { - let result; - if (t instanceof TerminalNode_1.TerminalNode) { - t.setParent(this); - this.addAnyChild(t); - return; - } else if (t instanceof RuleContext_1.RuleContext) { - this.addAnyChild(t); - return; - } else { - t = new TerminalNode_1.TerminalNode(t); - this.addAnyChild(t); - t.setParent(this); - return t; - } - } - addErrorNode(node) { - if (node instanceof ErrorNode_1.ErrorNode) { - const errorNode = node; - errorNode.setParent(this); - return this.addAnyChild(errorNode); - } else { - const badToken = node; - let t = new ErrorNode_1.ErrorNode(badToken); - this.addAnyChild(t); - t.setParent(this); - return t; - } - } - removeLastChild() { - if (this.children) { - this.children.pop(); - } - } - get parent() { - let parent = super.parent; - if (parent === void 0 || parent instanceof ParserRuleContext2) { - return parent; - } - throw new TypeError("Invalid parent type for ParserRuleContext"); - } - getChild(i, ctxType) { - if (!this.children || i < 0 || i >= this.children.length) { - throw new RangeError("index parameter must be between >= 0 and <= number of children."); - } - if (ctxType == null) { - return this.children[i]; - } - let result = this.tryGetChild(i, ctxType); - if (result === void 0) { - throw new Error("The specified node does not exist"); - } - return result; - } - tryGetChild(i, ctxType) { - if (!this.children || i < 0 || i >= this.children.length) { - return void 0; - } - let j = -1; - for (let o of this.children) { - if (o instanceof ctxType) { - j++; - if (j === i) { - return o; - } - } - } - return void 0; - } - getToken(ttype, i) { - let result = this.tryGetToken(ttype, i); - if (result === void 0) { - throw new Error("The specified token does not exist"); - } - return result; - } - tryGetToken(ttype, i) { - if (!this.children || i < 0 || i >= this.children.length) { - return void 0; - } - let j = -1; - for (let o of this.children) { - if (o instanceof TerminalNode_1.TerminalNode) { - let symbol = o.symbol; - if (symbol.type === ttype) { - j++; - if (j === i) { - return o; - } - } - } - } - return void 0; - } - getTokens(ttype) { - let tokens2 = []; - if (!this.children) { - return tokens2; - } - for (let o of this.children) { - if (o instanceof TerminalNode_1.TerminalNode) { - let symbol = o.symbol; - if (symbol.type === ttype) { - tokens2.push(o); - } - } - } - return tokens2; - } - get ruleContext() { - return this; - } - getRuleContext(i, ctxType) { - return this.getChild(i, ctxType); - } - tryGetRuleContext(i, ctxType) { - return this.tryGetChild(i, ctxType); - } - getRuleContexts(ctxType) { - let contexts = []; - if (!this.children) { - return contexts; - } - for (let o of this.children) { - if (o instanceof ctxType) { - contexts.push(o); - } - } - return contexts; - } - get childCount() { - return this.children ? this.children.length : 0; - } - get sourceInterval() { - if (!this._start) { - return Interval_1.Interval.INVALID; - } - if (!this._stop || this._stop.tokenIndex < this._start.tokenIndex) { - return Interval_1.Interval.of(this._start.tokenIndex, this._start.tokenIndex - 1); - } - return Interval_1.Interval.of(this._start.tokenIndex, this._stop.tokenIndex); - } - get start() { - return this._start; - } - get stop() { - return this._stop; - } - toInfoString(recognizer) { - let rules = recognizer.getRuleInvocationStack(this).reverse(); - return "ParserRuleContext" + rules + "{start=" + this._start + ", stop=" + this._stop + "}"; - } - }; - ParserRuleContext2.EMPTY = new ParserRuleContext2(); - __decorate([ - Decorators_1.Override - ], ParserRuleContext2.prototype, "parent", null); - __decorate([ - Decorators_1.Override - ], ParserRuleContext2.prototype, "childCount", null); - __decorate([ - Decorators_1.Override - ], ParserRuleContext2.prototype, "sourceInterval", null); - exports.ParserRuleContext = ParserRuleContext2; -}); - -// node_modules/antlr4ts/atn/PredictionMode.js -var require_PredictionMode = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.PredictionMode = void 0; - var Array2DHashMap_1 = require_Array2DHashMap(); - var MurmurHash_1 = require_MurmurHash(); - var Decorators_1 = require_Decorators(); - var RuleStopState_1 = require_RuleStopState(); - var PredictionMode; - (function(PredictionMode2) { - PredictionMode2[PredictionMode2["SLL"] = 0] = "SLL"; - PredictionMode2[PredictionMode2["LL"] = 1] = "LL"; - PredictionMode2[PredictionMode2["LL_EXACT_AMBIG_DETECTION"] = 2] = "LL_EXACT_AMBIG_DETECTION"; - })(PredictionMode = exports.PredictionMode || (exports.PredictionMode = {})); - (function(PredictionMode2) { - class AltAndContextMap extends Array2DHashMap_1.Array2DHashMap { - constructor() { - super(AltAndContextConfigEqualityComparator.INSTANCE); - } - } - class AltAndContextConfigEqualityComparator { - AltAndContextConfigEqualityComparator() { - } - hashCode(o) { - let hashCode = MurmurHash_1.MurmurHash.initialize(7); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, o.state.stateNumber); - hashCode = MurmurHash_1.MurmurHash.update(hashCode, o.context); - hashCode = MurmurHash_1.MurmurHash.finish(hashCode, 2); - return hashCode; - } - equals(a, b) { - if (a === b) { - return true; - } - if (a == null || b == null) { - return false; - } - return a.state.stateNumber === b.state.stateNumber && a.context.equals(b.context); - } - } - AltAndContextConfigEqualityComparator.INSTANCE = new AltAndContextConfigEqualityComparator(); - __decorate([ - Decorators_1.Override - ], AltAndContextConfigEqualityComparator.prototype, "hashCode", null); - __decorate([ - Decorators_1.Override - ], AltAndContextConfigEqualityComparator.prototype, "equals", null); - function hasConfigInRuleStopState(configs) { - for (let c of configs) { - if (c.state instanceof RuleStopState_1.RuleStopState) { - return true; - } - } - return false; - } - PredictionMode2.hasConfigInRuleStopState = hasConfigInRuleStopState; - function allConfigsInRuleStopStates(configs) { - for (let config of configs) { - if (!(config.state instanceof RuleStopState_1.RuleStopState)) { - return false; - } - } - return true; - } - PredictionMode2.allConfigsInRuleStopStates = allConfigsInRuleStopStates; - })(PredictionMode = exports.PredictionMode || (exports.PredictionMode = {})); -}); - -// node_modules/antlr4ts/atn/SimulatorState.js -var require_SimulatorState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.SimulatorState = void 0; - var Decorators_1 = require_Decorators(); - var ParserRuleContext_1 = require_ParserRuleContext(); - var SimulatorState = class SimulatorState { - constructor(outerContext, s0, useContext, remainingOuterContext) { - this.outerContext = outerContext != null ? outerContext : ParserRuleContext_1.ParserRuleContext.emptyContext(); - this.s0 = s0; - this.useContext = useContext; - this.remainingOuterContext = remainingOuterContext; - } - }; - SimulatorState = __decorate([ - __param(1, Decorators_1.NotNull) - ], SimulatorState); - exports.SimulatorState = SimulatorState; -}); - -// node_modules/antlr4ts/atn/ParserATNSimulator.js -var require_ParserATNSimulator = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ParserATNSimulator = void 0; - var AcceptStateInfo_1 = require_AcceptStateInfo(); - var ActionTransition_1 = require_ActionTransition(); - var Array2DHashSet_1 = require_Array2DHashSet(); - var Arrays_1 = require_Arrays(); - var ATN_1 = require_ATN(); - var ATNConfig_1 = require_ATNConfig(); - var ATNConfigSet_1 = require_ATNConfigSet(); - var ATNSimulator_1 = require_ATNSimulator(); - var ATNStateType_1 = require_ATNStateType(); - var AtomTransition_1 = require_AtomTransition(); - var BitSet_1 = require_BitSet(); - var ConflictInfo_1 = require_ConflictInfo(); - var DecisionState_1 = require_DecisionState(); - var DFAState_1 = require_DFAState(); - var IntegerList_1 = require_IntegerList(); - var Interval_1 = require_Interval(); - var IntStream_1 = require_IntStream(); - var Decorators_1 = require_Decorators(); - var NotSetTransition_1 = require_NotSetTransition(); - var NoViableAltException_1 = require_NoViableAltException(); - var ObjectEqualityComparator_1 = require_ObjectEqualityComparator(); - var ParserRuleContext_1 = require_ParserRuleContext(); - var PredictionContext_1 = require_PredictionContext(); - var PredictionContextCache_1 = require_PredictionContextCache(); - var PredictionMode_1 = require_PredictionMode(); - var RuleStopState_1 = require_RuleStopState(); - var RuleTransition_1 = require_RuleTransition(); - var SemanticContext_1 = require_SemanticContext(); - var SetTransition_1 = require_SetTransition(); - var SimulatorState_1 = require_SimulatorState(); - var Token_1 = require_Token(); - var VocabularyImpl_1 = require_VocabularyImpl(); - var assert = require("assert"); - var MAX_SHORT_VALUE = 65535; - var MIN_INTEGER_VALUE = -(1 << 31 >>> 0); - var ParserATNSimulator2 = class ParserATNSimulator3 extends ATNSimulator_1.ATNSimulator { - constructor(atn, parser) { - super(atn); - this.predictionMode = PredictionMode_1.PredictionMode.LL; - this.force_global_context = false; - this.always_try_local_context = true; - this.enable_global_context_dfa = false; - this.optimize_unique_closure = true; - this.optimize_ll1 = true; - this.optimize_tail_calls = true; - this.tail_call_preserves_sll = true; - this.treat_sllk1_conflict_as_ambiguity = false; - this.reportAmbiguities = false; - this.userWantsCtxSensitive = true; - this._parser = parser; - } - getPredictionMode() { - return this.predictionMode; - } - setPredictionMode(predictionMode) { - this.predictionMode = predictionMode; - } - reset() { - } - adaptivePredict(input, decision, outerContext, useContext) { - if (useContext === void 0) { - useContext = false; - } - let dfa = this.atn.decisionToDFA[decision]; - assert(dfa != null); - if (this.optimize_ll1 && !dfa.isPrecedenceDfa && !dfa.isEmpty) { - let ll_1 = input.LA(1); - if (ll_1 >= 0 && ll_1 <= 65535) { - let key = (decision << 16 >>> 0) + ll_1; - let alt = this.atn.LL1Table.get(key); - if (alt != null) { - return alt; - } - } - } - this.dfa = dfa; - if (this.force_global_context) { - useContext = true; - } else if (!this.always_try_local_context) { - useContext = useContext || dfa.isContextSensitive; - } - this.userWantsCtxSensitive = useContext || this.predictionMode !== PredictionMode_1.PredictionMode.SLL && outerContext != null && !this.atn.decisionToState[decision].sll; - if (outerContext == null) { - outerContext = ParserRuleContext_1.ParserRuleContext.emptyContext(); - } - let state; - if (!dfa.isEmpty) { - state = this.getStartState(dfa, input, outerContext, useContext); - } - if (state == null) { - if (outerContext == null) { - outerContext = ParserRuleContext_1.ParserRuleContext.emptyContext(); - } - if (ParserATNSimulator3.debug) { - console.log("ATN decision " + dfa.decision + " exec LA(1)==" + this.getLookaheadName(input) + ", outerContext=" + outerContext.toString(this._parser)); - } - state = this.computeStartState(dfa, outerContext, useContext); - } - let m = input.mark(); - let index = input.index; - try { - let alt = this.execDFA(dfa, input, index, state); - if (ParserATNSimulator3.debug) { - console.log("DFA after predictATN: " + dfa.toString(this._parser.vocabulary, this._parser.ruleNames)); - } - return alt; - } finally { - this.dfa = void 0; - input.seek(index); - input.release(m); - } - } - getStartState(dfa, input, outerContext, useContext) { - if (!useContext) { - if (dfa.isPrecedenceDfa) { - let state = dfa.getPrecedenceStartState(this._parser.precedence, false); - if (state == null) { - return void 0; - } - return new SimulatorState_1.SimulatorState(outerContext, state, false, outerContext); - } else { - if (dfa.s0 == null) { - return void 0; - } - return new SimulatorState_1.SimulatorState(outerContext, dfa.s0, false, outerContext); - } - } - if (!this.enable_global_context_dfa) { - return void 0; - } - let remainingContext = outerContext; - assert(outerContext != null); - let s0; - if (dfa.isPrecedenceDfa) { - s0 = dfa.getPrecedenceStartState(this._parser.precedence, true); - } else { - s0 = dfa.s0full; - } - while (remainingContext != null && s0 != null && s0.isContextSensitive) { - remainingContext = this.skipTailCalls(remainingContext); - s0 = s0.getContextTarget(this.getReturnState(remainingContext)); - if (remainingContext.isEmpty) { - assert(s0 == null || !s0.isContextSensitive); - } else { - remainingContext = remainingContext.parent; - } - } - if (s0 == null) { - return void 0; - } - return new SimulatorState_1.SimulatorState(outerContext, s0, useContext, remainingContext); - } - execDFA(dfa, input, startIndex, state) { - let outerContext = state.outerContext; - if (ParserATNSimulator3.dfa_debug) { - console.log("DFA decision " + dfa.decision + " exec LA(1)==" + this.getLookaheadName(input) + ", outerContext=" + outerContext.toString(this._parser)); - } - if (ParserATNSimulator3.dfa_debug) { - console.log(dfa.toString(this._parser.vocabulary, this._parser.ruleNames)); - } - let s = state.s0; - let t = input.LA(1); - let remainingOuterContext = state.remainingOuterContext; - while (true) { - if (ParserATNSimulator3.dfa_debug) { - console.log("DFA state " + s.stateNumber + " LA(1)==" + this.getLookaheadName(input)); - } - if (state.useContext) { - while (s.isContextSymbol(t)) { - let next; - if (remainingOuterContext != null) { - remainingOuterContext = this.skipTailCalls(remainingOuterContext); - next = s.getContextTarget(this.getReturnState(remainingOuterContext)); - } - if (next == null) { - let initialState = new SimulatorState_1.SimulatorState(state.outerContext, s, state.useContext, remainingOuterContext); - return this.execATN(dfa, input, startIndex, initialState); - } - assert(remainingOuterContext != null); - remainingOuterContext = remainingOuterContext.parent; - s = next; - } - } - if (this.isAcceptState(s, state.useContext)) { - if (s.predicates != null) { - if (ParserATNSimulator3.dfa_debug) { - console.log("accept " + s); - } - } else { - if (ParserATNSimulator3.dfa_debug) { - console.log("accept; predict " + s.prediction + " in state " + s.stateNumber); - } - } - break; - } - assert(!this.isAcceptState(s, state.useContext)); - let target = this.getExistingTargetState(s, t); - if (target == null) { - if (ParserATNSimulator3.dfa_debug && t >= 0) { - console.log("no edge for " + this._parser.vocabulary.getDisplayName(t)); - } - let alt; - if (ParserATNSimulator3.dfa_debug) { - let interval = Interval_1.Interval.of(startIndex, this._parser.inputStream.index); - console.log("ATN exec upon " + this._parser.inputStream.getText(interval) + " at DFA state " + s.stateNumber); - } - let initialState = new SimulatorState_1.SimulatorState(outerContext, s, state.useContext, remainingOuterContext); - alt = this.execATN(dfa, input, startIndex, initialState); - if (ParserATNSimulator3.dfa_debug) { - console.log("back from DFA update, alt=" + alt + ", dfa=\n" + dfa.toString(this._parser.vocabulary, this._parser.ruleNames)); - } - if (ParserATNSimulator3.dfa_debug) { - console.log("DFA decision " + dfa.decision + " predicts " + alt); - } - return alt; - } else if (target === ATNSimulator_1.ATNSimulator.ERROR) { - let errorState = new SimulatorState_1.SimulatorState(outerContext, s, state.useContext, remainingOuterContext); - return this.handleNoViableAlt(input, startIndex, errorState); - } - s = target; - if (!this.isAcceptState(s, state.useContext) && t !== IntStream_1.IntStream.EOF) { - input.consume(); - t = input.LA(1); - } - } - if (!state.useContext && s.configs.conflictInfo != null) { - if (dfa.atnStartState instanceof DecisionState_1.DecisionState) { - if (!this.userWantsCtxSensitive || !s.configs.dipsIntoOuterContext && s.configs.isExactConflict || this.treat_sllk1_conflict_as_ambiguity && input.index === startIndex) { - } else { - assert(!state.useContext); - let conflictingAlts; - let predicates2 = s.predicates; - if (predicates2 != null) { - let conflictIndex = input.index; - if (conflictIndex !== startIndex) { - input.seek(startIndex); - } - conflictingAlts = this.evalSemanticContext(predicates2, outerContext, true); - if (conflictingAlts.cardinality() === 1) { - return conflictingAlts.nextSetBit(0); - } - if (conflictIndex !== startIndex) { - input.seek(conflictIndex); - } - } - if (this.reportAmbiguities) { - let conflictState = new SimulatorState_1.SimulatorState(outerContext, s, state.useContext, remainingOuterContext); - this.reportAttemptingFullContext(dfa, conflictingAlts, conflictState, startIndex, input.index); - } - input.seek(startIndex); - return this.adaptivePredict(input, dfa.decision, outerContext, true); - } - } - } - let predicates = s.predicates; - if (predicates != null) { - let stopIndex = input.index; - if (startIndex !== stopIndex) { - input.seek(startIndex); - } - let alts = this.evalSemanticContext(predicates, outerContext, this.reportAmbiguities && this.predictionMode === PredictionMode_1.PredictionMode.LL_EXACT_AMBIG_DETECTION); - switch (alts.cardinality()) { - case 0: - throw this.noViableAlt(input, outerContext, s.configs, startIndex); - case 1: - return alts.nextSetBit(0); - default: - if (startIndex !== stopIndex) { - input.seek(stopIndex); - } - this.reportAmbiguity(dfa, s, startIndex, stopIndex, s.configs.isExactConflict, alts, s.configs); - return alts.nextSetBit(0); - } - } - if (ParserATNSimulator3.dfa_debug) { - console.log("DFA decision " + dfa.decision + " predicts " + s.prediction); - } - return s.prediction; - } - isAcceptState(state, useContext) { - if (!state.isAcceptState) { - return false; - } - if (state.configs.conflictingAlts == null) { - return true; - } - if (useContext && this.predictionMode === PredictionMode_1.PredictionMode.LL_EXACT_AMBIG_DETECTION) { - return state.configs.isExactConflict; - } - return true; - } - execATN(dfa, input, startIndex, initialState) { - if (ParserATNSimulator3.debug) { - console.log("execATN decision " + dfa.decision + " exec LA(1)==" + this.getLookaheadName(input)); - } - let outerContext = initialState.outerContext; - let useContext = initialState.useContext; - let t = input.LA(1); - let previous = initialState; - let contextCache = new PredictionContextCache_1.PredictionContextCache(); - while (true) { - let nextState = this.computeReachSet(dfa, previous, t, contextCache); - if (nextState == null) { - this.setDFAEdge(previous.s0, input.LA(1), ATNSimulator_1.ATNSimulator.ERROR); - return this.handleNoViableAlt(input, startIndex, previous); - } - let D = nextState.s0; - assert(D.isAcceptState || D.prediction === ATN_1.ATN.INVALID_ALT_NUMBER); - assert(D.isAcceptState || D.configs.conflictInfo == null); - if (this.isAcceptState(D, useContext)) { - let conflictingAlts = D.configs.conflictingAlts; - let predictedAlt = conflictingAlts == null ? D.prediction : ATN_1.ATN.INVALID_ALT_NUMBER; - if (predictedAlt !== ATN_1.ATN.INVALID_ALT_NUMBER) { - if (this.optimize_ll1 && input.index === startIndex && !dfa.isPrecedenceDfa && nextState.outerContext === nextState.remainingOuterContext && dfa.decision >= 0 && !D.configs.hasSemanticContext) { - if (t >= 0 && t <= MAX_SHORT_VALUE) { - let key = (dfa.decision << 16 >>> 0) + t; - this.atn.LL1Table.set(key, predictedAlt); - } - } - if (useContext && this.always_try_local_context) { - this.reportContextSensitivity(dfa, predictedAlt, nextState, startIndex, input.index); - } - } - predictedAlt = D.prediction; - let attemptFullContext = conflictingAlts != null && this.userWantsCtxSensitive; - if (attemptFullContext) { - attemptFullContext = !useContext && (D.configs.dipsIntoOuterContext || !D.configs.isExactConflict) && (!this.treat_sllk1_conflict_as_ambiguity || input.index !== startIndex); - } - if (D.configs.hasSemanticContext) { - let predPredictions = D.predicates; - if (predPredictions != null) { - let conflictIndex = input.index; - if (conflictIndex !== startIndex) { - input.seek(startIndex); - } - conflictingAlts = this.evalSemanticContext(predPredictions, outerContext, attemptFullContext || this.reportAmbiguities); - switch (conflictingAlts.cardinality()) { - case 0: - throw this.noViableAlt(input, outerContext, D.configs, startIndex); - case 1: - return conflictingAlts.nextSetBit(0); - default: - break; - } - if (conflictIndex !== startIndex) { - input.seek(conflictIndex); - } - } - } - if (!attemptFullContext) { - if (conflictingAlts != null) { - if (this.reportAmbiguities && conflictingAlts.cardinality() > 1) { - this.reportAmbiguity(dfa, D, startIndex, input.index, D.configs.isExactConflict, conflictingAlts, D.configs); - } - predictedAlt = conflictingAlts.nextSetBit(0); - } - return predictedAlt; - } else { - assert(!useContext); - assert(this.isAcceptState(D, false)); - if (ParserATNSimulator3.debug) { - console.log("RETRY with outerContext=" + outerContext); - } - let fullContextState = this.computeStartState(dfa, outerContext, true); - if (this.reportAmbiguities) { - this.reportAttemptingFullContext(dfa, conflictingAlts, nextState, startIndex, input.index); - } - input.seek(startIndex); - return this.execATN(dfa, input, startIndex, fullContextState); - } - } - previous = nextState; - if (t !== IntStream_1.IntStream.EOF) { - input.consume(); - t = input.LA(1); - } - } - } - handleNoViableAlt(input, startIndex, previous) { - if (previous.s0 != null) { - let alts = new BitSet_1.BitSet(); - let maxAlt = 0; - for (let config of previous.s0.configs) { - if (config.reachesIntoOuterContext || config.state instanceof RuleStopState_1.RuleStopState) { - alts.set(config.alt); - maxAlt = Math.max(maxAlt, config.alt); - } - } - switch (alts.cardinality()) { - case 0: - break; - case 1: - return alts.nextSetBit(0); - default: - if (!previous.s0.configs.hasSemanticContext) { - return alts.nextSetBit(0); - } - let filteredConfigs = new ATNConfigSet_1.ATNConfigSet(); - for (let config of previous.s0.configs) { - if (config.reachesIntoOuterContext || config.state instanceof RuleStopState_1.RuleStopState) { - filteredConfigs.add(config); - } - } - let altToPred = this.getPredsForAmbigAlts(alts, filteredConfigs, maxAlt); - if (altToPred != null) { - let predicates = this.getPredicatePredictions(alts, altToPred); - if (predicates != null) { - let stopIndex = input.index; - try { - input.seek(startIndex); - let filteredAlts = this.evalSemanticContext(predicates, previous.outerContext, false); - if (!filteredAlts.isEmpty) { - return filteredAlts.nextSetBit(0); - } - } finally { - input.seek(stopIndex); - } - } - } - return alts.nextSetBit(0); - } - } - throw this.noViableAlt(input, previous.outerContext, previous.s0.configs, startIndex); - } - computeReachSet(dfa, previous, t, contextCache) { - let useContext = previous.useContext; - let remainingGlobalContext = previous.remainingOuterContext; - let s = previous.s0; - if (useContext) { - while (s.isContextSymbol(t)) { - let next; - if (remainingGlobalContext != null) { - remainingGlobalContext = this.skipTailCalls(remainingGlobalContext); - next = s.getContextTarget(this.getReturnState(remainingGlobalContext)); - } - if (next == null) { - break; - } - assert(remainingGlobalContext != null); - remainingGlobalContext = remainingGlobalContext.parent; - s = next; - } - } - assert(!this.isAcceptState(s, useContext)); - if (this.isAcceptState(s, useContext)) { - return new SimulatorState_1.SimulatorState(previous.outerContext, s, useContext, remainingGlobalContext); - } - let s0 = s; - let target = this.getExistingTargetState(s0, t); - if (target == null) { - let result = this.computeTargetState(dfa, s0, remainingGlobalContext, t, useContext, contextCache); - target = result[0]; - remainingGlobalContext = result[1]; - } - if (target === ATNSimulator_1.ATNSimulator.ERROR) { - return void 0; - } - assert(!useContext || !target.configs.dipsIntoOuterContext); - return new SimulatorState_1.SimulatorState(previous.outerContext, target, useContext, remainingGlobalContext); - } - getExistingTargetState(s, t) { - return s.getTarget(t); - } - computeTargetState(dfa, s, remainingGlobalContext, t, useContext, contextCache) { - let closureConfigs = s.configs.toArray(); - let contextElements; - let reach = new ATNConfigSet_1.ATNConfigSet(); - let stepIntoGlobal; - do { - let hasMoreContext = !useContext || remainingGlobalContext != null; - if (!hasMoreContext) { - reach.isOutermostConfigSet = true; - } - let reachIntermediate = new ATNConfigSet_1.ATNConfigSet(); - let skippedStopStates; - for (let c of closureConfigs) { - if (ParserATNSimulator3.debug) { - console.log("testing " + this.getTokenName(t) + " at " + c.toString()); - } - if (c.state instanceof RuleStopState_1.RuleStopState) { - assert(c.context.isEmpty); - if (useContext && !c.reachesIntoOuterContext || t === IntStream_1.IntStream.EOF) { - if (skippedStopStates == null) { - skippedStopStates = []; - } - skippedStopStates.push(c); - } - continue; - } - let n = c.state.numberOfOptimizedTransitions; - for (let ti = 0; ti < n; ti++) { - let trans = c.state.getOptimizedTransition(ti); - let target = this.getReachableTarget(c, trans, t); - if (target != null) { - reachIntermediate.add(c.transform(target, false), contextCache); - } - } - } - if (this.optimize_unique_closure && skippedStopStates == null && t !== Token_1.Token.EOF && reachIntermediate.uniqueAlt !== ATN_1.ATN.INVALID_ALT_NUMBER) { - reachIntermediate.isOutermostConfigSet = reach.isOutermostConfigSet; - reach = reachIntermediate; - break; - } - let collectPredicates = false; - let treatEofAsEpsilon = t === Token_1.Token.EOF; - this.closure(reachIntermediate, reach, collectPredicates, hasMoreContext, contextCache, treatEofAsEpsilon); - stepIntoGlobal = reach.dipsIntoOuterContext; - if (t === IntStream_1.IntStream.EOF) { - reach = this.removeAllConfigsNotInRuleStopState(reach, contextCache); - } - if (skippedStopStates != null && (!useContext || !PredictionMode_1.PredictionMode.hasConfigInRuleStopState(reach))) { - assert(skippedStopStates.length > 0); - for (let c of skippedStopStates) { - reach.add(c, contextCache); - } - } - if (useContext && stepIntoGlobal) { - reach.clear(); - remainingGlobalContext = remainingGlobalContext; - remainingGlobalContext = this.skipTailCalls(remainingGlobalContext); - let nextContextElement = this.getReturnState(remainingGlobalContext); - if (contextElements == null) { - contextElements = new IntegerList_1.IntegerList(); - } - if (remainingGlobalContext.isEmpty) { - remainingGlobalContext = void 0; - } else { - remainingGlobalContext = remainingGlobalContext.parent; - } - contextElements.add(nextContextElement); - if (nextContextElement !== PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY) { - for (let i = 0; i < closureConfigs.length; i++) { - closureConfigs[i] = closureConfigs[i].appendContext(nextContextElement, contextCache); - } - } - } - } while (useContext && stepIntoGlobal); - if (reach.isEmpty) { - this.setDFAEdge(s, t, ATNSimulator_1.ATNSimulator.ERROR); - return [ATNSimulator_1.ATNSimulator.ERROR, remainingGlobalContext]; - } - let result = this.addDFAEdge(dfa, s, t, contextElements, reach, contextCache); - return [result, remainingGlobalContext]; - } - removeAllConfigsNotInRuleStopState(configs, contextCache) { - if (PredictionMode_1.PredictionMode.allConfigsInRuleStopStates(configs)) { - return configs; - } - let result = new ATNConfigSet_1.ATNConfigSet(); - for (let config of configs) { - if (!(config.state instanceof RuleStopState_1.RuleStopState)) { - continue; - } - result.add(config, contextCache); - } - return result; - } - computeStartState(dfa, globalContext, useContext) { - let s0 = dfa.isPrecedenceDfa ? dfa.getPrecedenceStartState(this._parser.precedence, useContext) : useContext ? dfa.s0full : dfa.s0; - if (s0 != null) { - if (!useContext) { - return new SimulatorState_1.SimulatorState(globalContext, s0, useContext, globalContext); - } - s0.setContextSensitive(this.atn); - } - let decision = dfa.decision; - let p = dfa.atnStartState; - let previousContext = 0; - let remainingGlobalContext = globalContext; - let initialContext = useContext ? PredictionContext_1.PredictionContext.EMPTY_FULL : PredictionContext_1.PredictionContext.EMPTY_LOCAL; - let contextCache = new PredictionContextCache_1.PredictionContextCache(); - if (useContext) { - if (!this.enable_global_context_dfa) { - while (remainingGlobalContext != null) { - if (remainingGlobalContext.isEmpty) { - previousContext = PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY; - remainingGlobalContext = void 0; - } else { - previousContext = this.getReturnState(remainingGlobalContext); - initialContext = initialContext.appendSingleContext(previousContext, contextCache); - remainingGlobalContext = remainingGlobalContext.parent; - } - } - } - while (s0 != null && s0.isContextSensitive && remainingGlobalContext != null) { - let next; - remainingGlobalContext = this.skipTailCalls(remainingGlobalContext); - if (remainingGlobalContext.isEmpty) { - next = s0.getContextTarget(PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY); - previousContext = PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY; - remainingGlobalContext = void 0; - } else { - previousContext = this.getReturnState(remainingGlobalContext); - next = s0.getContextTarget(previousContext); - initialContext = initialContext.appendSingleContext(previousContext, contextCache); - remainingGlobalContext = remainingGlobalContext.parent; - } - if (next == null) { - break; - } - s0 = next; - } - } - if (s0 != null && !s0.isContextSensitive) { - return new SimulatorState_1.SimulatorState(globalContext, s0, useContext, remainingGlobalContext); - } - let configs = new ATNConfigSet_1.ATNConfigSet(); - while (true) { - let reachIntermediate = new ATNConfigSet_1.ATNConfigSet(); - let n = p.numberOfTransitions; - for (let ti = 0; ti < n; ti++) { - let target = p.transition(ti).target; - reachIntermediate.add(ATNConfig_1.ATNConfig.create(target, ti + 1, initialContext)); - } - let hasMoreContext = remainingGlobalContext != null; - if (!hasMoreContext) { - configs.isOutermostConfigSet = true; - } - let collectPredicates = true; - this.closure(reachIntermediate, configs, collectPredicates, hasMoreContext, contextCache, false); - let stepIntoGlobal = configs.dipsIntoOuterContext; - let next; - if (useContext && !this.enable_global_context_dfa) { - s0 = this.addDFAState(dfa, configs, contextCache); - break; - } else if (s0 == null) { - if (!dfa.isPrecedenceDfa) { - next = this.addDFAState(dfa, configs, contextCache); - if (useContext) { - if (!dfa.s0full) { - dfa.s0full = next; - } else { - next = dfa.s0full; - } - } else { - if (!dfa.s0) { - dfa.s0 = next; - } else { - next = dfa.s0; - } - } - } else { - configs = this.applyPrecedenceFilter(configs, globalContext, contextCache); - next = this.addDFAState(dfa, configs, contextCache); - dfa.setPrecedenceStartState(this._parser.precedence, useContext, next); - } - } else { - if (dfa.isPrecedenceDfa) { - configs = this.applyPrecedenceFilter(configs, globalContext, contextCache); - } - next = this.addDFAState(dfa, configs, contextCache); - s0.setContextTarget(previousContext, next); - } - s0 = next; - if (!useContext || !stepIntoGlobal) { - break; - } - next.setContextSensitive(this.atn); - remainingGlobalContext = remainingGlobalContext; - configs.clear(); - remainingGlobalContext = this.skipTailCalls(remainingGlobalContext); - let nextContextElement = this.getReturnState(remainingGlobalContext); - if (remainingGlobalContext.isEmpty) { - remainingGlobalContext = void 0; - } else { - remainingGlobalContext = remainingGlobalContext.parent; - } - if (nextContextElement !== PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY) { - initialContext = initialContext.appendSingleContext(nextContextElement, contextCache); - } - previousContext = nextContextElement; - } - return new SimulatorState_1.SimulatorState(globalContext, s0, useContext, remainingGlobalContext); - } - applyPrecedenceFilter(configs, globalContext, contextCache) { - let statesFromAlt1 = new Map(); - let configSet = new ATNConfigSet_1.ATNConfigSet(); - for (let config of configs) { - if (config.alt !== 1) { - continue; - } - let updatedContext = config.semanticContext.evalPrecedence(this._parser, globalContext); - if (updatedContext == null) { - continue; - } - statesFromAlt1.set(config.state.stateNumber, config.context); - if (updatedContext !== config.semanticContext) { - configSet.add(config.transform(config.state, false, updatedContext), contextCache); - } else { - configSet.add(config, contextCache); - } - } - for (let config of configs) { - if (config.alt === 1) { - continue; - } - if (!config.isPrecedenceFilterSuppressed) { - let context = statesFromAlt1.get(config.state.stateNumber); - if (context != null && context.equals(config.context)) { - continue; - } - } - configSet.add(config, contextCache); - } - return configSet; - } - getReachableTarget(source, trans, ttype) { - if (trans.matches(ttype, 0, this.atn.maxTokenType)) { - return trans.target; - } - return void 0; - } - predicateDFAState(D, configs, nalts) { - let conflictingAlts = this.getConflictingAltsFromConfigSet(configs); - if (!conflictingAlts) { - throw new Error("This unhandled scenario is intended to be unreachable, but I'm currently not sure of why we know that's the case."); - } - if (ParserATNSimulator3.debug) { - console.log("predicateDFAState " + D); - } - let altToPred = this.getPredsForAmbigAlts(conflictingAlts, configs, nalts); - let predPredictions; - if (altToPred != null) { - predPredictions = this.getPredicatePredictions(conflictingAlts, altToPred); - D.predicates = predPredictions; - } - return predPredictions; - } - getPredsForAmbigAlts(ambigAlts, configs, nalts) { - let altToPred = new Array(nalts + 1); - let n = altToPred.length; - for (let c of configs) { - if (ambigAlts.get(c.alt)) { - altToPred[c.alt] = SemanticContext_1.SemanticContext.or(altToPred[c.alt], c.semanticContext); - } - } - let nPredAlts = 0; - for (let i = 0; i < n; i++) { - if (altToPred[i] == null) { - altToPred[i] = SemanticContext_1.SemanticContext.NONE; - } else if (altToPred[i] !== SemanticContext_1.SemanticContext.NONE) { - nPredAlts++; - } - } - let result = altToPred; - if (nPredAlts === 0) { - result = void 0; - } - if (ParserATNSimulator3.debug) { - console.log("getPredsForAmbigAlts result " + (result ? Arrays_1.Arrays.toString(result) : "undefined")); - } - return result; - } - getPredicatePredictions(ambigAlts, altToPred) { - let pairs = []; - let containsPredicate = false; - for (let i = 1; i < altToPred.length; i++) { - let pred = altToPred[i]; - assert(pred != null); - if (ambigAlts != null && ambigAlts.get(i) && pred === SemanticContext_1.SemanticContext.NONE) { - pairs.push(new DFAState_1.DFAState.PredPrediction(pred, i)); - } else if (pred !== SemanticContext_1.SemanticContext.NONE) { - containsPredicate = true; - pairs.push(new DFAState_1.DFAState.PredPrediction(pred, i)); - } - } - if (!containsPredicate) { - return void 0; - } - return pairs; - } - evalSemanticContext(predPredictions, outerContext, complete) { - let predictions = new BitSet_1.BitSet(); - for (let pair of predPredictions) { - if (pair.pred === SemanticContext_1.SemanticContext.NONE) { - predictions.set(pair.alt); - if (!complete) { - break; - } - continue; - } - let evaluatedResult = this.evalSemanticContextImpl(pair.pred, outerContext, pair.alt); - if (ParserATNSimulator3.debug || ParserATNSimulator3.dfa_debug) { - console.log("eval pred " + pair + "=" + evaluatedResult); - } - if (evaluatedResult) { - if (ParserATNSimulator3.debug || ParserATNSimulator3.dfa_debug) { - console.log("PREDICT " + pair.alt); - } - predictions.set(pair.alt); - if (!complete) { - break; - } - } - } - return predictions; - } - evalSemanticContextImpl(pred, parserCallStack, alt) { - return pred.eval(this._parser, parserCallStack); - } - closure(sourceConfigs, configs, collectPredicates, hasMoreContext, contextCache, treatEofAsEpsilon) { - if (contextCache == null) { - contextCache = PredictionContextCache_1.PredictionContextCache.UNCACHED; - } - let currentConfigs = sourceConfigs; - let closureBusy = new Array2DHashSet_1.Array2DHashSet(ObjectEqualityComparator_1.ObjectEqualityComparator.INSTANCE); - while (currentConfigs.size > 0) { - let intermediate = new ATNConfigSet_1.ATNConfigSet(); - for (let config of currentConfigs) { - this.closureImpl(config, configs, intermediate, closureBusy, collectPredicates, hasMoreContext, contextCache, 0, treatEofAsEpsilon); - } - currentConfigs = intermediate; - } - } - closureImpl(config, configs, intermediate, closureBusy, collectPredicates, hasMoreContexts, contextCache, depth, treatEofAsEpsilon) { - if (ParserATNSimulator3.debug) { - console.log("closure(" + config.toString(this._parser, true) + ")"); - } - if (config.state instanceof RuleStopState_1.RuleStopState) { - if (!config.context.isEmpty) { - let hasEmpty = config.context.hasEmpty; - let nonEmptySize = config.context.size - (hasEmpty ? 1 : 0); - for (let i = 0; i < nonEmptySize; i++) { - let newContext = config.context.getParent(i); - let returnState = this.atn.states[config.context.getReturnState(i)]; - let c = ATNConfig_1.ATNConfig.create(returnState, config.alt, newContext, config.semanticContext); - c.outerContextDepth = config.outerContextDepth; - c.isPrecedenceFilterSuppressed = config.isPrecedenceFilterSuppressed; - assert(depth > MIN_INTEGER_VALUE); - this.closureImpl(c, configs, intermediate, closureBusy, collectPredicates, hasMoreContexts, contextCache, depth - 1, treatEofAsEpsilon); - } - if (!hasEmpty || !hasMoreContexts) { - return; - } - config = config.transform(config.state, false, PredictionContext_1.PredictionContext.EMPTY_LOCAL); - } else if (!hasMoreContexts) { - configs.add(config, contextCache); - return; - } else { - if (ParserATNSimulator3.debug) { - console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex)); - } - if (config.context === PredictionContext_1.PredictionContext.EMPTY_FULL) { - config = config.transform(config.state, false, PredictionContext_1.PredictionContext.EMPTY_LOCAL); - } else if (!config.reachesIntoOuterContext && PredictionContext_1.PredictionContext.isEmptyLocal(config.context)) { - configs.add(config, contextCache); - } - } - } - let p = config.state; - if (!p.onlyHasEpsilonTransitions) { - configs.add(config, contextCache); - if (ParserATNSimulator3.debug) { - console.log("added config " + configs); - } - } - for (let i = 0; i < p.numberOfOptimizedTransitions; i++) { - if (i === 0 && p.stateType === ATNStateType_1.ATNStateType.STAR_LOOP_ENTRY && p.precedenceRuleDecision && !config.context.hasEmpty) { - let precedenceDecision = p; - let suppress = true; - for (let j = 0; j < config.context.size; j++) { - if (!precedenceDecision.precedenceLoopbackStates.get(config.context.getReturnState(j))) { - suppress = false; - break; - } - } - if (suppress) { - continue; - } - } - let t = p.getOptimizedTransition(i); - let continueCollecting = !(t instanceof ActionTransition_1.ActionTransition) && collectPredicates; - let c = this.getEpsilonTarget(config, t, continueCollecting, depth === 0, contextCache, treatEofAsEpsilon); - if (c != null) { - if (t instanceof RuleTransition_1.RuleTransition) { - if (intermediate != null && !collectPredicates) { - intermediate.add(c, contextCache); - continue; - } - } - let newDepth = depth; - if (config.state instanceof RuleStopState_1.RuleStopState) { - if (this.dfa != null && this.dfa.isPrecedenceDfa) { - let outermostPrecedenceReturn = t.outermostPrecedenceReturn; - if (outermostPrecedenceReturn === this.dfa.atnStartState.ruleIndex) { - c.isPrecedenceFilterSuppressed = true; - } - } - c.outerContextDepth = c.outerContextDepth + 1; - if (!closureBusy.add(c)) { - continue; - } - assert(newDepth > MIN_INTEGER_VALUE); - newDepth--; - if (ParserATNSimulator3.debug) { - console.log("dips into outer ctx: " + c); - } - } else if (t instanceof RuleTransition_1.RuleTransition) { - if (this.optimize_tail_calls && t.optimizedTailCall && (!this.tail_call_preserves_sll || !PredictionContext_1.PredictionContext.isEmptyLocal(config.context))) { - assert(c.context === config.context); - if (newDepth === 0) { - newDepth--; - if (!this.tail_call_preserves_sll && PredictionContext_1.PredictionContext.isEmptyLocal(config.context)) { - c.outerContextDepth = c.outerContextDepth + 1; - } - } - } else { - if (newDepth >= 0) { - newDepth++; - } - } - } else { - if (!t.isEpsilon && !closureBusy.add(c)) { - continue; - } - } - this.closureImpl(c, configs, intermediate, closureBusy, continueCollecting, hasMoreContexts, contextCache, newDepth, treatEofAsEpsilon); - } - } - } - getRuleName(index) { - if (this._parser != null && index >= 0) { - return this._parser.ruleNames[index]; - } - return ""; - } - getEpsilonTarget(config, t, collectPredicates, inContext, contextCache, treatEofAsEpsilon) { - switch (t.serializationType) { - case 3: - return this.ruleTransition(config, t, contextCache); - case 10: - return this.precedenceTransition(config, t, collectPredicates, inContext); - case 4: - return this.predTransition(config, t, collectPredicates, inContext); - case 6: - return this.actionTransition(config, t); - case 1: - return config.transform(t.target, false); - case 5: - case 2: - case 7: - if (treatEofAsEpsilon) { - if (t.matches(Token_1.Token.EOF, 0, 1)) { - return config.transform(t.target, false); - } - } - return void 0; - default: - return void 0; - } - } - actionTransition(config, t) { - if (ParserATNSimulator3.debug) { - console.log("ACTION edge " + t.ruleIndex + ":" + t.actionIndex); - } - return config.transform(t.target, false); - } - precedenceTransition(config, pt, collectPredicates, inContext) { - if (ParserATNSimulator3.debug) { - console.log("PRED (collectPredicates=" + collectPredicates + ") " + pt.precedence + ">=_p, ctx dependent=true"); - if (this._parser != null) { - console.log("context surrounding pred is " + this._parser.getRuleInvocationStack()); - } - } - let c; - if (collectPredicates && inContext) { - let newSemCtx = SemanticContext_1.SemanticContext.and(config.semanticContext, pt.predicate); - c = config.transform(pt.target, false, newSemCtx); - } else { - c = config.transform(pt.target, false); - } - if (ParserATNSimulator3.debug) { - console.log("config from pred transition=" + c); - } - return c; - } - predTransition(config, pt, collectPredicates, inContext) { - if (ParserATNSimulator3.debug) { - console.log("PRED (collectPredicates=" + collectPredicates + ") " + pt.ruleIndex + ":" + pt.predIndex + ", ctx dependent=" + pt.isCtxDependent); - if (this._parser != null) { - console.log("context surrounding pred is " + this._parser.getRuleInvocationStack()); - } - } - let c; - if (collectPredicates && (!pt.isCtxDependent || pt.isCtxDependent && inContext)) { - let newSemCtx = SemanticContext_1.SemanticContext.and(config.semanticContext, pt.predicate); - c = config.transform(pt.target, false, newSemCtx); - } else { - c = config.transform(pt.target, false); - } - if (ParserATNSimulator3.debug) { - console.log("config from pred transition=" + c); - } - return c; - } - ruleTransition(config, t, contextCache) { - if (ParserATNSimulator3.debug) { - console.log("CALL rule " + this.getRuleName(t.target.ruleIndex) + ", ctx=" + config.context); - } - let returnState = t.followState; - let newContext; - if (this.optimize_tail_calls && t.optimizedTailCall && (!this.tail_call_preserves_sll || !PredictionContext_1.PredictionContext.isEmptyLocal(config.context))) { - newContext = config.context; - } else if (contextCache != null) { - newContext = contextCache.getChild(config.context, returnState.stateNumber); - } else { - newContext = config.context.getChild(returnState.stateNumber); - } - return config.transform(t.target, false, newContext); - } - isConflicted(configset, contextCache) { - if (configset.uniqueAlt !== ATN_1.ATN.INVALID_ALT_NUMBER || configset.size <= 1) { - return void 0; - } - let configs = configset.toArray(); - configs.sort(ParserATNSimulator3.STATE_ALT_SORT_COMPARATOR); - let exact = !configset.dipsIntoOuterContext; - let alts = new BitSet_1.BitSet(); - let minAlt = configs[0].alt; - alts.set(minAlt); - let currentState = configs[0].state.nonStopStateNumber; - for (let config of configs) { - let stateNumber = config.state.nonStopStateNumber; - if (stateNumber !== currentState) { - if (config.alt !== minAlt) { - return void 0; - } - currentState = stateNumber; - } - } - let representedAlts; - if (exact) { - currentState = configs[0].state.nonStopStateNumber; - representedAlts = new BitSet_1.BitSet(); - let maxAlt = minAlt; - for (let config of configs) { - if (config.state.nonStopStateNumber !== currentState) { - break; - } - let alt = config.alt; - representedAlts.set(alt); - maxAlt = alt; - } - currentState = configs[0].state.nonStopStateNumber; - let currentAlt = minAlt; - for (let config of configs) { - let stateNumber = config.state.nonStopStateNumber; - let alt = config.alt; - if (stateNumber !== currentState) { - if (currentAlt !== maxAlt) { - exact = false; - break; - } - currentState = stateNumber; - currentAlt = minAlt; - } else if (alt !== currentAlt) { - if (alt !== representedAlts.nextSetBit(currentAlt + 1)) { - exact = false; - break; - } - currentAlt = alt; - } - } - } - currentState = configs[0].state.nonStopStateNumber; - let firstIndexCurrentState = 0; - let lastIndexCurrentStateMinAlt = 0; - let joinedCheckContext = configs[0].context; - for (let i = 1; i < configs.length; i++) { - let config = configs[i]; - if (config.alt !== minAlt) { - break; - } - if (config.state.nonStopStateNumber !== currentState) { - break; - } - lastIndexCurrentStateMinAlt = i; - joinedCheckContext = contextCache.join(joinedCheckContext, configs[i].context); - } - for (let i = lastIndexCurrentStateMinAlt + 1; i < configs.length; i++) { - let config = configs[i]; - let state = config.state; - alts.set(config.alt); - if (state.nonStopStateNumber !== currentState) { - currentState = state.nonStopStateNumber; - firstIndexCurrentState = i; - lastIndexCurrentStateMinAlt = i; - joinedCheckContext = config.context; - for (let j = firstIndexCurrentState + 1; j < configs.length; j++) { - let config2 = configs[j]; - if (config2.alt !== minAlt) { - break; - } - if (config2.state.nonStopStateNumber !== currentState) { - break; - } - lastIndexCurrentStateMinAlt = j; - joinedCheckContext = contextCache.join(joinedCheckContext, config2.context); - } - i = lastIndexCurrentStateMinAlt; - continue; - } - let joinedCheckContext2 = config.context; - let currentAlt = config.alt; - let lastIndexCurrentStateCurrentAlt = i; - for (let j = lastIndexCurrentStateCurrentAlt + 1; j < configs.length; j++) { - let config2 = configs[j]; - if (config2.alt !== currentAlt) { - break; - } - if (config2.state.nonStopStateNumber !== currentState) { - break; - } - lastIndexCurrentStateCurrentAlt = j; - joinedCheckContext2 = contextCache.join(joinedCheckContext2, config2.context); - } - i = lastIndexCurrentStateCurrentAlt; - let check = contextCache.join(joinedCheckContext, joinedCheckContext2); - if (!joinedCheckContext.equals(check)) { - return void 0; - } - exact = exact && joinedCheckContext.equals(joinedCheckContext2); - } - return new ConflictInfo_1.ConflictInfo(alts, exact); - } - getConflictingAltsFromConfigSet(configs) { - let conflictingAlts = configs.conflictingAlts; - if (conflictingAlts == null && configs.uniqueAlt !== ATN_1.ATN.INVALID_ALT_NUMBER) { - conflictingAlts = new BitSet_1.BitSet(); - conflictingAlts.set(configs.uniqueAlt); - } - return conflictingAlts; - } - getTokenName(t) { - if (t === Token_1.Token.EOF) { - return "EOF"; - } - let vocabulary = this._parser != null ? this._parser.vocabulary : VocabularyImpl_1.VocabularyImpl.EMPTY_VOCABULARY; - let displayName = vocabulary.getDisplayName(t); - if (displayName === String(t)) { - return displayName; - } - return displayName + "<" + t + ">"; - } - getLookaheadName(input) { - return this.getTokenName(input.LA(1)); - } - dumpDeadEndConfigs(nvae) { - console.log("dead end configs: "); - let deadEndConfigs = nvae.deadEndConfigs; - if (!deadEndConfigs) { - return; - } - for (let c of deadEndConfigs) { - let trans = "no edges"; - if (c.state.numberOfOptimizedTransitions > 0) { - let t = c.state.getOptimizedTransition(0); - if (t instanceof AtomTransition_1.AtomTransition) { - trans = "Atom " + this.getTokenName(t._label); - } else if (t instanceof SetTransition_1.SetTransition) { - let not = t instanceof NotSetTransition_1.NotSetTransition; - trans = (not ? "~" : "") + "Set " + t.set.toString(); - } - } - console.log(c.toString(this._parser, true) + ":" + trans); - } - } - noViableAlt(input, outerContext, configs, startIndex) { - return new NoViableAltException_1.NoViableAltException(this._parser, input, input.get(startIndex), input.LT(1), configs, outerContext); - } - getUniqueAlt(configs) { - let alt = ATN_1.ATN.INVALID_ALT_NUMBER; - for (let c of configs) { - if (alt === ATN_1.ATN.INVALID_ALT_NUMBER) { - alt = c.alt; - } else if (c.alt !== alt) { - return ATN_1.ATN.INVALID_ALT_NUMBER; - } - } - return alt; - } - configWithAltAtStopState(configs, alt) { - for (let c of configs) { - if (c.alt === alt) { - if (c.state instanceof RuleStopState_1.RuleStopState) { - return true; - } - } - } - return false; - } - addDFAEdge(dfa, fromState, t, contextTransitions, toConfigs, contextCache) { - assert(contextTransitions == null || contextTransitions.isEmpty || dfa.isContextSensitive); - let from = fromState; - let to = this.addDFAState(dfa, toConfigs, contextCache); - if (contextTransitions != null) { - for (let context of contextTransitions.toArray()) { - if (context === PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY) { - if (from.configs.isOutermostConfigSet) { - continue; - } - } - from.setContextSensitive(this.atn); - from.setContextSymbol(t); - let next = from.getContextTarget(context); - if (next != null) { - from = next; - continue; - } - next = this.addDFAContextState(dfa, from.configs, context, contextCache); - assert(context !== PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY || next.configs.isOutermostConfigSet); - from.setContextTarget(context, next); - from = next; - } - } - if (ParserATNSimulator3.debug) { - console.log("EDGE " + from + " -> " + to + " upon " + this.getTokenName(t)); - } - this.setDFAEdge(from, t, to); - if (ParserATNSimulator3.debug) { - console.log("DFA=\n" + dfa.toString(this._parser != null ? this._parser.vocabulary : VocabularyImpl_1.VocabularyImpl.EMPTY_VOCABULARY, this._parser != null ? this._parser.ruleNames : void 0)); - } - return to; - } - setDFAEdge(p, t, q) { - if (p != null) { - p.setTarget(t, q); - } - } - addDFAContextState(dfa, configs, returnContext, contextCache) { - if (returnContext !== PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY) { - let contextConfigs = new ATNConfigSet_1.ATNConfigSet(); - for (let config of configs) { - contextConfigs.add(config.appendContext(returnContext, contextCache)); - } - return this.addDFAState(dfa, contextConfigs, contextCache); - } else { - assert(!configs.isOutermostConfigSet, "Shouldn't be adding a duplicate edge."); - configs = configs.clone(true); - configs.isOutermostConfigSet = true; - return this.addDFAState(dfa, configs, contextCache); - } - } - addDFAState(dfa, configs, contextCache) { - let enableDfa = this.enable_global_context_dfa || !configs.isOutermostConfigSet; - if (enableDfa) { - if (!configs.isReadOnly) { - configs.optimizeConfigs(this); - } - let proposed = this.createDFAState(dfa, configs); - let existing = dfa.states.get(proposed); - if (existing != null) { - return existing; - } - } - if (!configs.isReadOnly) { - if (configs.conflictInfo == null) { - configs.conflictInfo = this.isConflicted(configs, contextCache); - } - } - let newState = this.createDFAState(dfa, configs.clone(true)); - let decisionState = this.atn.getDecisionState(dfa.decision); - let predictedAlt = this.getUniqueAlt(configs); - if (predictedAlt !== ATN_1.ATN.INVALID_ALT_NUMBER) { - newState.acceptStateInfo = new AcceptStateInfo_1.AcceptStateInfo(predictedAlt); - } else if (configs.conflictingAlts != null) { - let conflictingAlts = configs.conflictingAlts; - if (conflictingAlts) { - newState.acceptStateInfo = new AcceptStateInfo_1.AcceptStateInfo(conflictingAlts.nextSetBit(0)); - } - } - if (newState.isAcceptState && configs.hasSemanticContext) { - this.predicateDFAState(newState, configs, decisionState.numberOfTransitions); - } - if (!enableDfa) { - return newState; - } - let added = dfa.addState(newState); - if (ParserATNSimulator3.debug && added === newState) { - console.log("adding new DFA state: " + newState); - } - return added; - } - createDFAState(dfa, configs) { - return new DFAState_1.DFAState(configs); - } - reportAttemptingFullContext(dfa, conflictingAlts, conflictState, startIndex, stopIndex) { - if (ParserATNSimulator3.debug || ParserATNSimulator3.retry_debug) { - let interval = Interval_1.Interval.of(startIndex, stopIndex); - console.log("reportAttemptingFullContext decision=" + dfa.decision + ":" + conflictState.s0.configs + ", input=" + this._parser.inputStream.getText(interval)); - } - if (this._parser != null) { - let listener = this._parser.getErrorListenerDispatch(); - if (listener.reportAttemptingFullContext) { - listener.reportAttemptingFullContext(this._parser, dfa, startIndex, stopIndex, conflictingAlts, conflictState); - } - } - } - reportContextSensitivity(dfa, prediction, acceptState, startIndex, stopIndex) { - if (ParserATNSimulator3.debug || ParserATNSimulator3.retry_debug) { - let interval = Interval_1.Interval.of(startIndex, stopIndex); - console.log("reportContextSensitivity decision=" + dfa.decision + ":" + acceptState.s0.configs + ", input=" + this._parser.inputStream.getText(interval)); - } - if (this._parser != null) { - let listener = this._parser.getErrorListenerDispatch(); - if (listener.reportContextSensitivity) { - listener.reportContextSensitivity(this._parser, dfa, startIndex, stopIndex, prediction, acceptState); - } - } - } - reportAmbiguity(dfa, D, startIndex, stopIndex, exact, ambigAlts, configs) { - if (ParserATNSimulator3.debug || ParserATNSimulator3.retry_debug) { - let interval = Interval_1.Interval.of(startIndex, stopIndex); - console.log("reportAmbiguity " + ambigAlts + ":" + configs + ", input=" + this._parser.inputStream.getText(interval)); - } - if (this._parser != null) { - let listener = this._parser.getErrorListenerDispatch(); - if (listener.reportAmbiguity) { - listener.reportAmbiguity(this._parser, dfa, startIndex, stopIndex, exact, ambigAlts, configs); - } - } - } - getReturnState(context) { - if (context.isEmpty) { - return PredictionContext_1.PredictionContext.EMPTY_FULL_STATE_KEY; - } - let state = this.atn.states[context.invokingState]; - let transition = state.transition(0); - return transition.followState.stateNumber; - } - skipTailCalls(context) { - if (!this.optimize_tail_calls) { - return context; - } - while (!context.isEmpty) { - let state = this.atn.states[context.invokingState]; - assert(state.numberOfTransitions === 1 && state.transition(0).serializationType === 3); - let transition = state.transition(0); - if (!transition.tailCall) { - break; - } - context = context.parent; - } - return context; - } - get parser() { - return this._parser; - } - }; - ParserATNSimulator2.debug = false; - ParserATNSimulator2.dfa_debug = false; - ParserATNSimulator2.retry_debug = false; - ParserATNSimulator2.STATE_ALT_SORT_COMPARATOR = (o1, o2) => { - let diff = o1.state.nonStopStateNumber - o2.state.nonStopStateNumber; - if (diff !== 0) { - return diff; - } - diff = o1.alt - o2.alt; - if (diff !== 0) { - return diff; - } - return 0; - }; - __decorate([ - Decorators_1.NotNull - ], ParserATNSimulator2.prototype, "predictionMode", void 0); - __decorate([ - Decorators_1.NotNull - ], ParserATNSimulator2.prototype, "getPredictionMode", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "setPredictionMode", null); - __decorate([ - Decorators_1.Override - ], ParserATNSimulator2.prototype, "reset", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "adaptivePredict", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "getStartState", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(3, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "execDFA", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(3, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "execATN", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "handleNoViableAlt", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "getExistingTargetState", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "computeTargetState", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "removeAllConfigsNotInRuleStopState", null); - __decorate([ - Decorators_1.NotNull - ], ParserATNSimulator2.prototype, "computeStartState", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "applyPrecedenceFilter", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "getReachableTarget", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "getPredsForAmbigAlts", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "evalSemanticContext", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "evalSemanticContextImpl", null); - __decorate([ - __param(1, Decorators_1.NotNull), - __param(4, Decorators_1.Nullable) - ], ParserATNSimulator2.prototype, "closure", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.Nullable), - __param(3, Decorators_1.NotNull), - __param(6, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "closureImpl", null); - __decorate([ - Decorators_1.NotNull - ], ParserATNSimulator2.prototype, "getRuleName", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "getEpsilonTarget", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "actionTransition", null); - __decorate([ - Decorators_1.Nullable, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "precedenceTransition", null); - __decorate([ - Decorators_1.Nullable, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "predTransition", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.Nullable) - ], ParserATNSimulator2.prototype, "ruleTransition", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "isConflicted", null); - __decorate([ - Decorators_1.NotNull - ], ParserATNSimulator2.prototype, "getTokenName", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "dumpDeadEndConfigs", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "noViableAlt", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "getUniqueAlt", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "configWithAltAtStopState", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(4, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "addDFAEdge", null); - __decorate([ - __param(0, Decorators_1.Nullable), - __param(2, Decorators_1.Nullable) - ], ParserATNSimulator2.prototype, "setDFAEdge", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "addDFAContextState", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "addDFAState", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "createDFAState", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "reportAttemptingFullContext", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "reportContextSensitivity", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(5, Decorators_1.NotNull), - __param(6, Decorators_1.NotNull) - ], ParserATNSimulator2.prototype, "reportAmbiguity", null); - ParserATNSimulator2 = __decorate([ - __param(0, Decorators_1.NotNull) - ], ParserATNSimulator2); - exports.ParserATNSimulator = ParserATNSimulator2; -}); - -// node_modules/antlr4ts/atn/PlusBlockStartState.js -var require_PlusBlockStartState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.PlusBlockStartState = void 0; - var ATNStateType_1 = require_ATNStateType(); - var BlockStartState_1 = require_BlockStartState(); - var Decorators_1 = require_Decorators(); - var PlusBlockStartState = class extends BlockStartState_1.BlockStartState { - get stateType() { - return ATNStateType_1.ATNStateType.PLUS_BLOCK_START; - } - }; - __decorate([ - Decorators_1.Override - ], PlusBlockStartState.prototype, "stateType", null); - exports.PlusBlockStartState = PlusBlockStartState; -}); - -// node_modules/antlr4ts/atn/PlusLoopbackState.js -var require_PlusLoopbackState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.PlusLoopbackState = void 0; - var ATNStateType_1 = require_ATNStateType(); - var DecisionState_1 = require_DecisionState(); - var Decorators_1 = require_Decorators(); - var PlusLoopbackState = class extends DecisionState_1.DecisionState { - get stateType() { - return ATNStateType_1.ATNStateType.PLUS_LOOP_BACK; - } - }; - __decorate([ - Decorators_1.Override - ], PlusLoopbackState.prototype, "stateType", null); - exports.PlusLoopbackState = PlusLoopbackState; -}); - -// node_modules/antlr4ts/atn/PrecedencePredicateTransition.js -var require_PrecedencePredicateTransition = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.PrecedencePredicateTransition = void 0; - var AbstractPredicateTransition_1 = require_AbstractPredicateTransition(); - var Decorators_1 = require_Decorators(); - var SemanticContext_1 = require_SemanticContext(); - var PrecedencePredicateTransition = class PrecedencePredicateTransition extends AbstractPredicateTransition_1.AbstractPredicateTransition { - constructor(target, precedence) { - super(target); - this.precedence = precedence; - } - get serializationType() { - return 10; - } - get isEpsilon() { - return true; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return false; - } - get predicate() { - return new SemanticContext_1.SemanticContext.PrecedencePredicate(this.precedence); - } - toString() { - return this.precedence + " >= _p"; - } - }; - __decorate([ - Decorators_1.Override - ], PrecedencePredicateTransition.prototype, "serializationType", null); - __decorate([ - Decorators_1.Override - ], PrecedencePredicateTransition.prototype, "isEpsilon", null); - __decorate([ - Decorators_1.Override - ], PrecedencePredicateTransition.prototype, "matches", null); - __decorate([ - Decorators_1.Override - ], PrecedencePredicateTransition.prototype, "toString", null); - PrecedencePredicateTransition = __decorate([ - __param(0, Decorators_1.NotNull) - ], PrecedencePredicateTransition); - exports.PrecedencePredicateTransition = PrecedencePredicateTransition; -}); - -// node_modules/antlr4ts/atn/RangeTransition.js -var require_RangeTransition = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RangeTransition = void 0; - var IntervalSet_1 = require_IntervalSet(); - var Decorators_1 = require_Decorators(); - var Transition_1 = require_Transition(); - var RangeTransition = class RangeTransition extends Transition_1.Transition { - constructor(target, from, to) { - super(target); - this.from = from; - this.to = to; - } - get serializationType() { - return 2; - } - get label() { - return IntervalSet_1.IntervalSet.of(this.from, this.to); - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return symbol >= this.from && symbol <= this.to; - } - toString() { - return "'" + String.fromCodePoint(this.from) + "'..'" + String.fromCodePoint(this.to) + "'"; - } - }; - __decorate([ - Decorators_1.Override - ], RangeTransition.prototype, "serializationType", null); - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull - ], RangeTransition.prototype, "label", null); - __decorate([ - Decorators_1.Override - ], RangeTransition.prototype, "matches", null); - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull - ], RangeTransition.prototype, "toString", null); - RangeTransition = __decorate([ - __param(0, Decorators_1.NotNull) - ], RangeTransition); - exports.RangeTransition = RangeTransition; -}); - -// node_modules/antlr4ts/atn/RuleStartState.js -var require_RuleStartState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RuleStartState = void 0; - var ATNState_1 = require_ATNState(); - var ATNStateType_1 = require_ATNStateType(); - var Decorators_1 = require_Decorators(); - var RuleStartState = class extends ATNState_1.ATNState { - constructor() { - super(...arguments); - this.isPrecedenceRule = false; - this.leftFactored = false; - } - get stateType() { - return ATNStateType_1.ATNStateType.RULE_START; - } - }; - __decorate([ - Decorators_1.Override - ], RuleStartState.prototype, "stateType", null); - exports.RuleStartState = RuleStartState; -}); - -// node_modules/antlr4ts/atn/StarBlockStartState.js -var require_StarBlockStartState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.StarBlockStartState = void 0; - var ATNStateType_1 = require_ATNStateType(); - var BlockStartState_1 = require_BlockStartState(); - var Decorators_1 = require_Decorators(); - var StarBlockStartState = class extends BlockStartState_1.BlockStartState { - get stateType() { - return ATNStateType_1.ATNStateType.STAR_BLOCK_START; - } - }; - __decorate([ - Decorators_1.Override - ], StarBlockStartState.prototype, "stateType", null); - exports.StarBlockStartState = StarBlockStartState; -}); - -// node_modules/antlr4ts/atn/StarLoopbackState.js -var require_StarLoopbackState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.StarLoopbackState = void 0; - var ATNState_1 = require_ATNState(); - var ATNStateType_1 = require_ATNStateType(); - var Decorators_1 = require_Decorators(); - var StarLoopbackState = class extends ATNState_1.ATNState { - get loopEntryState() { - return this.transition(0).target; - } - get stateType() { - return ATNStateType_1.ATNStateType.STAR_LOOP_BACK; - } - }; - __decorate([ - Decorators_1.Override - ], StarLoopbackState.prototype, "stateType", null); - exports.StarLoopbackState = StarLoopbackState; -}); - -// node_modules/antlr4ts/atn/TokensStartState.js -var require_TokensStartState = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.TokensStartState = void 0; - var ATNStateType_1 = require_ATNStateType(); - var DecisionState_1 = require_DecisionState(); - var Decorators_1 = require_Decorators(); - var TokensStartState = class extends DecisionState_1.DecisionState { - get stateType() { - return ATNStateType_1.ATNStateType.TOKEN_START; - } - }; - __decorate([ - Decorators_1.Override - ], TokensStartState.prototype, "stateType", null); - exports.TokensStartState = TokensStartState; -}); - -// node_modules/antlr4ts/misc/UUID.js -var require_UUID = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.UUID = void 0; - var MurmurHash_1 = require_MurmurHash(); - var UUID = class { - constructor(mostSigBits, moreSigBits, lessSigBits, leastSigBits) { - this.data = new Uint32Array(4); - this.data[0] = mostSigBits; - this.data[1] = moreSigBits; - this.data[2] = lessSigBits; - this.data[3] = leastSigBits; - } - static fromString(data) { - if (!/^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/.test(data)) { - throw new Error("Incorrectly formatted UUID"); - } - let segments = data.split("-"); - let mostSigBits = parseInt(segments[0], 16); - let moreSigBits = (parseInt(segments[1], 16) << 16 >>> 0) + parseInt(segments[2], 16); - let lessSigBits = (parseInt(segments[3], 16) << 16 >>> 0) + parseInt(segments[4].substr(0, 4), 16); - let leastSigBits = parseInt(segments[4].substr(-8), 16); - return new UUID(mostSigBits, moreSigBits, lessSigBits, leastSigBits); - } - hashCode() { - return MurmurHash_1.MurmurHash.hashCode([this.data[0], this.data[1], this.data[2], this.data[3]]); - } - equals(obj) { - if (obj === this) { - return true; - } else if (!(obj instanceof UUID)) { - return false; - } - return this.data[0] === obj.data[0] && this.data[1] === obj.data[1] && this.data[2] === obj.data[2] && this.data[3] === obj.data[3]; - } - toString() { - return ("00000000" + this.data[0].toString(16)).substr(-8) + "-" + ("0000" + (this.data[1] >>> 16).toString(16)).substr(-4) + "-" + ("0000" + this.data[1].toString(16)).substr(-4) + "-" + ("0000" + (this.data[2] >>> 16).toString(16)).substr(-4) + "-" + ("0000" + this.data[2].toString(16)).substr(-4) + ("00000000" + this.data[3].toString(16)).substr(-8); - } - }; - exports.UUID = UUID; -}); - -// node_modules/antlr4ts/atn/ATNDeserializer.js -var require_ATNDeserializer = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ATNDeserializer = void 0; - var ActionTransition_1 = require_ActionTransition(); - var Array2DHashSet_1 = require_Array2DHashSet(); - var ATN_1 = require_ATN(); - var ATNDeserializationOptions_1 = require_ATNDeserializationOptions(); - var ATNStateType_1 = require_ATNStateType(); - var AtomTransition_1 = require_AtomTransition(); - var BasicBlockStartState_1 = require_BasicBlockStartState(); - var BasicState_1 = require_BasicState(); - var BitSet_1 = require_BitSet(); - var BlockEndState_1 = require_BlockEndState(); - var BlockStartState_1 = require_BlockStartState(); - var DecisionState_1 = require_DecisionState(); - var DFA_1 = require_DFA(); - var EpsilonTransition_1 = require_EpsilonTransition(); - var IntervalSet_1 = require_IntervalSet(); - var InvalidState_1 = require_InvalidState(); - var LexerChannelAction_1 = require_LexerChannelAction(); - var LexerCustomAction_1 = require_LexerCustomAction(); - var LexerModeAction_1 = require_LexerModeAction(); - var LexerMoreAction_1 = require_LexerMoreAction(); - var LexerPopModeAction_1 = require_LexerPopModeAction(); - var LexerPushModeAction_1 = require_LexerPushModeAction(); - var LexerSkipAction_1 = require_LexerSkipAction(); - var LexerTypeAction_1 = require_LexerTypeAction(); - var LoopEndState_1 = require_LoopEndState(); - var Decorators_1 = require_Decorators(); - var NotSetTransition_1 = require_NotSetTransition(); - var ParserATNSimulator_1 = require_ParserATNSimulator(); - var PlusBlockStartState_1 = require_PlusBlockStartState(); - var PlusLoopbackState_1 = require_PlusLoopbackState(); - var PrecedencePredicateTransition_1 = require_PrecedencePredicateTransition(); - var PredicateTransition_1 = require_PredicateTransition(); - var RangeTransition_1 = require_RangeTransition(); - var RuleStartState_1 = require_RuleStartState(); - var RuleStopState_1 = require_RuleStopState(); - var RuleTransition_1 = require_RuleTransition(); - var SetTransition_1 = require_SetTransition(); - var StarBlockStartState_1 = require_StarBlockStartState(); - var StarLoopbackState_1 = require_StarLoopbackState(); - var StarLoopEntryState_1 = require_StarLoopEntryState(); - var Token_1 = require_Token(); - var TokensStartState_1 = require_TokensStartState(); - var UUID_1 = require_UUID(); - var WildcardTransition_1 = require_WildcardTransition(); - var UnicodeDeserializingMode; - (function(UnicodeDeserializingMode2) { - UnicodeDeserializingMode2[UnicodeDeserializingMode2["UNICODE_BMP"] = 0] = "UNICODE_BMP"; - UnicodeDeserializingMode2[UnicodeDeserializingMode2["UNICODE_SMP"] = 1] = "UNICODE_SMP"; - })(UnicodeDeserializingMode || (UnicodeDeserializingMode = {})); - var ATNDeserializer3 = class { - constructor(deserializationOptions) { - if (deserializationOptions === void 0) { - deserializationOptions = ATNDeserializationOptions_1.ATNDeserializationOptions.defaultOptions; - } - this.deserializationOptions = deserializationOptions; - } - static get SERIALIZED_VERSION() { - return 3; - } - static isFeatureSupported(feature, actualUuid) { - let featureIndex = ATNDeserializer3.SUPPORTED_UUIDS.findIndex((e) => e.equals(feature)); - if (featureIndex < 0) { - return false; - } - return ATNDeserializer3.SUPPORTED_UUIDS.findIndex((e) => e.equals(actualUuid)) >= featureIndex; - } - static getUnicodeDeserializer(mode) { - if (mode === 0) { - return { - readUnicode: (data, p) => { - return ATNDeserializer3.toInt(data[p]); - }, - size: 1 - }; - } else { - return { - readUnicode: (data, p) => { - return ATNDeserializer3.toInt32(data, p); - }, - size: 2 - }; - } - } - deserialize(data) { - data = data.slice(0); - for (let i = 1; i < data.length; i++) { - data[i] = data[i] - 2 & 65535; - } - let p = 0; - let version = ATNDeserializer3.toInt(data[p++]); - if (version !== ATNDeserializer3.SERIALIZED_VERSION) { - let reason = `Could not deserialize ATN with version ${version} (expected ${ATNDeserializer3.SERIALIZED_VERSION}).`; - throw new Error(reason); - } - let uuid = ATNDeserializer3.toUUID(data, p); - p += 8; - if (ATNDeserializer3.SUPPORTED_UUIDS.findIndex((e) => e.equals(uuid)) < 0) { - let reason = `Could not deserialize ATN with UUID ${uuid} (expected ${ATNDeserializer3.SERIALIZED_UUID} or a legacy UUID).`; - throw new Error(reason); - } - let supportsLexerActions = ATNDeserializer3.isFeatureSupported(ATNDeserializer3.ADDED_LEXER_ACTIONS, uuid); - let grammarType = ATNDeserializer3.toInt(data[p++]); - let maxTokenType = ATNDeserializer3.toInt(data[p++]); - let atn = new ATN_1.ATN(grammarType, maxTokenType); - let loopBackStateNumbers = []; - let endStateNumbers = []; - let nstates = ATNDeserializer3.toInt(data[p++]); - for (let i = 0; i < nstates; i++) { - let stype = ATNDeserializer3.toInt(data[p++]); - if (stype === ATNStateType_1.ATNStateType.INVALID_TYPE) { - atn.addState(new InvalidState_1.InvalidState()); - continue; - } - let ruleIndex = ATNDeserializer3.toInt(data[p++]); - if (ruleIndex === 65535) { - ruleIndex = -1; - } - let s = this.stateFactory(stype, ruleIndex); - if (stype === ATNStateType_1.ATNStateType.LOOP_END) { - let loopBackStateNumber = ATNDeserializer3.toInt(data[p++]); - loopBackStateNumbers.push([s, loopBackStateNumber]); - } else if (s instanceof BlockStartState_1.BlockStartState) { - let endStateNumber = ATNDeserializer3.toInt(data[p++]); - endStateNumbers.push([s, endStateNumber]); - } - atn.addState(s); - } - for (let pair of loopBackStateNumbers) { - pair[0].loopBackState = atn.states[pair[1]]; - } - for (let pair of endStateNumbers) { - pair[0].endState = atn.states[pair[1]]; - } - let numNonGreedyStates = ATNDeserializer3.toInt(data[p++]); - for (let i = 0; i < numNonGreedyStates; i++) { - let stateNumber = ATNDeserializer3.toInt(data[p++]); - atn.states[stateNumber].nonGreedy = true; - } - let numSllDecisions = ATNDeserializer3.toInt(data[p++]); - for (let i = 0; i < numSllDecisions; i++) { - let stateNumber = ATNDeserializer3.toInt(data[p++]); - atn.states[stateNumber].sll = true; - } - let numPrecedenceStates = ATNDeserializer3.toInt(data[p++]); - for (let i = 0; i < numPrecedenceStates; i++) { - let stateNumber = ATNDeserializer3.toInt(data[p++]); - atn.states[stateNumber].isPrecedenceRule = true; - } - let nrules = ATNDeserializer3.toInt(data[p++]); - if (atn.grammarType === 0) { - atn.ruleToTokenType = new Int32Array(nrules); - } - atn.ruleToStartState = new Array(nrules); - for (let i = 0; i < nrules; i++) { - let s = ATNDeserializer3.toInt(data[p++]); - let startState = atn.states[s]; - startState.leftFactored = ATNDeserializer3.toInt(data[p++]) !== 0; - atn.ruleToStartState[i] = startState; - if (atn.grammarType === 0) { - let tokenType = ATNDeserializer3.toInt(data[p++]); - if (tokenType === 65535) { - tokenType = Token_1.Token.EOF; - } - atn.ruleToTokenType[i] = tokenType; - if (!ATNDeserializer3.isFeatureSupported(ATNDeserializer3.ADDED_LEXER_ACTIONS, uuid)) { - let actionIndexIgnored = ATNDeserializer3.toInt(data[p++]); - if (actionIndexIgnored === 65535) { - actionIndexIgnored = -1; - } - } - } - } - atn.ruleToStopState = new Array(nrules); - for (let state of atn.states) { - if (!(state instanceof RuleStopState_1.RuleStopState)) { - continue; - } - atn.ruleToStopState[state.ruleIndex] = state; - atn.ruleToStartState[state.ruleIndex].stopState = state; - } - let nmodes = ATNDeserializer3.toInt(data[p++]); - for (let i = 0; i < nmodes; i++) { - let s = ATNDeserializer3.toInt(data[p++]); - atn.modeToStartState.push(atn.states[s]); - } - atn.modeToDFA = new Array(nmodes); - for (let i = 0; i < nmodes; i++) { - atn.modeToDFA[i] = new DFA_1.DFA(atn.modeToStartState[i]); - } - let sets = []; - p = this.deserializeSets(data, p, sets, ATNDeserializer3.getUnicodeDeserializer(0)); - if (ATNDeserializer3.isFeatureSupported(ATNDeserializer3.ADDED_UNICODE_SMP, uuid)) { - p = this.deserializeSets(data, p, sets, ATNDeserializer3.getUnicodeDeserializer(1)); - } - let nedges = ATNDeserializer3.toInt(data[p++]); - for (let i = 0; i < nedges; i++) { - let src = ATNDeserializer3.toInt(data[p]); - let trg = ATNDeserializer3.toInt(data[p + 1]); - let ttype = ATNDeserializer3.toInt(data[p + 2]); - let arg1 = ATNDeserializer3.toInt(data[p + 3]); - let arg2 = ATNDeserializer3.toInt(data[p + 4]); - let arg3 = ATNDeserializer3.toInt(data[p + 5]); - let trans = this.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets); - let srcState = atn.states[src]; - srcState.addTransition(trans); - p += 6; - } - let returnTransitionsSet = new Array2DHashSet_1.Array2DHashSet({ - hashCode: (o) => o.stopState ^ o.returnState ^ o.outermostPrecedenceReturn, - equals: (a, b) => { - return a.stopState === b.stopState && a.returnState === b.returnState && a.outermostPrecedenceReturn === b.outermostPrecedenceReturn; - } - }); - let returnTransitions = []; - for (let state of atn.states) { - let returningToLeftFactored = state.ruleIndex >= 0 && atn.ruleToStartState[state.ruleIndex].leftFactored; - for (let i = 0; i < state.numberOfTransitions; i++) { - let t = state.transition(i); - if (!(t instanceof RuleTransition_1.RuleTransition)) { - continue; - } - let ruleTransition = t; - let returningFromLeftFactored = atn.ruleToStartState[ruleTransition.target.ruleIndex].leftFactored; - if (!returningFromLeftFactored && returningToLeftFactored) { - continue; - } - let outermostPrecedenceReturn = -1; - if (atn.ruleToStartState[ruleTransition.target.ruleIndex].isPrecedenceRule) { - if (ruleTransition.precedence === 0) { - outermostPrecedenceReturn = ruleTransition.target.ruleIndex; - } - } - let current = {stopState: ruleTransition.target.ruleIndex, returnState: ruleTransition.followState.stateNumber, outermostPrecedenceReturn}; - if (returnTransitionsSet.add(current)) { - returnTransitions.push(current); - } - } - } - for (let returnTransition of returnTransitions) { - let transition = new EpsilonTransition_1.EpsilonTransition(atn.states[returnTransition.returnState], returnTransition.outermostPrecedenceReturn); - atn.ruleToStopState[returnTransition.stopState].addTransition(transition); - } - for (let state of atn.states) { - if (state instanceof BlockStartState_1.BlockStartState) { - if (state.endState === void 0) { - throw new Error("IllegalStateException"); - } - if (state.endState.startState !== void 0) { - throw new Error("IllegalStateException"); - } - state.endState.startState = state; - } - if (state instanceof PlusLoopbackState_1.PlusLoopbackState) { - let loopbackState = state; - for (let i = 0; i < loopbackState.numberOfTransitions; i++) { - let target = loopbackState.transition(i).target; - if (target instanceof PlusBlockStartState_1.PlusBlockStartState) { - target.loopBackState = loopbackState; - } - } - } else if (state instanceof StarLoopbackState_1.StarLoopbackState) { - let loopbackState = state; - for (let i = 0; i < loopbackState.numberOfTransitions; i++) { - let target = loopbackState.transition(i).target; - if (target instanceof StarLoopEntryState_1.StarLoopEntryState) { - target.loopBackState = loopbackState; - } - } - } - } - let ndecisions = ATNDeserializer3.toInt(data[p++]); - for (let i = 1; i <= ndecisions; i++) { - let s = ATNDeserializer3.toInt(data[p++]); - let decState = atn.states[s]; - atn.decisionToState.push(decState); - decState.decision = i - 1; - } - if (atn.grammarType === 0) { - if (supportsLexerActions) { - atn.lexerActions = new Array(ATNDeserializer3.toInt(data[p++])); - for (let i = 0; i < atn.lexerActions.length; i++) { - let actionType = ATNDeserializer3.toInt(data[p++]); - let data1 = ATNDeserializer3.toInt(data[p++]); - if (data1 === 65535) { - data1 = -1; - } - let data2 = ATNDeserializer3.toInt(data[p++]); - if (data2 === 65535) { - data2 = -1; - } - let lexerAction = this.lexerActionFactory(actionType, data1, data2); - atn.lexerActions[i] = lexerAction; - } - } else { - let legacyLexerActions = []; - for (let state of atn.states) { - for (let i = 0; i < state.numberOfTransitions; i++) { - let transition = state.transition(i); - if (!(transition instanceof ActionTransition_1.ActionTransition)) { - continue; - } - let ruleIndex = transition.ruleIndex; - let actionIndex = transition.actionIndex; - let lexerAction = new LexerCustomAction_1.LexerCustomAction(ruleIndex, actionIndex); - state.setTransition(i, new ActionTransition_1.ActionTransition(transition.target, ruleIndex, legacyLexerActions.length, false)); - legacyLexerActions.push(lexerAction); - } - } - atn.lexerActions = legacyLexerActions; - } - } - this.markPrecedenceDecisions(atn); - atn.decisionToDFA = new Array(ndecisions); - for (let i = 0; i < ndecisions; i++) { - atn.decisionToDFA[i] = new DFA_1.DFA(atn.decisionToState[i], i); - } - if (this.deserializationOptions.isVerifyATN) { - this.verifyATN(atn); - } - if (this.deserializationOptions.isGenerateRuleBypassTransitions && atn.grammarType === 1) { - atn.ruleToTokenType = new Int32Array(atn.ruleToStartState.length); - for (let i = 0; i < atn.ruleToStartState.length; i++) { - atn.ruleToTokenType[i] = atn.maxTokenType + i + 1; - } - for (let i = 0; i < atn.ruleToStartState.length; i++) { - let bypassStart = new BasicBlockStartState_1.BasicBlockStartState(); - bypassStart.ruleIndex = i; - atn.addState(bypassStart); - let bypassStop = new BlockEndState_1.BlockEndState(); - bypassStop.ruleIndex = i; - atn.addState(bypassStop); - bypassStart.endState = bypassStop; - atn.defineDecisionState(bypassStart); - bypassStop.startState = bypassStart; - let endState; - let excludeTransition; - if (atn.ruleToStartState[i].isPrecedenceRule) { - endState = void 0; - for (let state of atn.states) { - if (state.ruleIndex !== i) { - continue; - } - if (!(state instanceof StarLoopEntryState_1.StarLoopEntryState)) { - continue; - } - let maybeLoopEndState = state.transition(state.numberOfTransitions - 1).target; - if (!(maybeLoopEndState instanceof LoopEndState_1.LoopEndState)) { - continue; - } - if (maybeLoopEndState.epsilonOnlyTransitions && maybeLoopEndState.transition(0).target instanceof RuleStopState_1.RuleStopState) { - endState = state; - break; - } - } - if (!endState) { - throw new Error("Couldn't identify final state of the precedence rule prefix section."); - } - excludeTransition = endState.loopBackState.transition(0); - } else { - endState = atn.ruleToStopState[i]; - } - for (let state of atn.states) { - for (let i2 = 0; i2 < state.numberOfTransitions; i2++) { - let transition = state.transition(i2); - if (transition === excludeTransition) { - continue; - } - if (transition.target === endState) { - transition.target = bypassStop; - } - } - } - while (atn.ruleToStartState[i].numberOfTransitions > 0) { - let transition = atn.ruleToStartState[i].removeTransition(atn.ruleToStartState[i].numberOfTransitions - 1); - bypassStart.addTransition(transition); - } - atn.ruleToStartState[i].addTransition(new EpsilonTransition_1.EpsilonTransition(bypassStart)); - bypassStop.addTransition(new EpsilonTransition_1.EpsilonTransition(endState)); - let matchState = new BasicState_1.BasicState(); - atn.addState(matchState); - matchState.addTransition(new AtomTransition_1.AtomTransition(bypassStop, atn.ruleToTokenType[i])); - bypassStart.addTransition(new EpsilonTransition_1.EpsilonTransition(matchState)); - } - if (this.deserializationOptions.isVerifyATN) { - this.verifyATN(atn); - } - } - if (this.deserializationOptions.isOptimize) { - while (true) { - let optimizationCount = 0; - optimizationCount += ATNDeserializer3.inlineSetRules(atn); - optimizationCount += ATNDeserializer3.combineChainedEpsilons(atn); - let preserveOrder = atn.grammarType === 0; - optimizationCount += ATNDeserializer3.optimizeSets(atn, preserveOrder); - if (optimizationCount === 0) { - break; - } - } - if (this.deserializationOptions.isVerifyATN) { - this.verifyATN(atn); - } - } - ATNDeserializer3.identifyTailCalls(atn); - return atn; - } - deserializeSets(data, p, sets, unicodeDeserializer) { - let nsets = ATNDeserializer3.toInt(data[p++]); - for (let i = 0; i < nsets; i++) { - let nintervals = ATNDeserializer3.toInt(data[p]); - p++; - let set = new IntervalSet_1.IntervalSet(); - sets.push(set); - let containsEof = ATNDeserializer3.toInt(data[p++]) !== 0; - if (containsEof) { - set.add(-1); - } - for (let j = 0; j < nintervals; j++) { - let a = unicodeDeserializer.readUnicode(data, p); - p += unicodeDeserializer.size; - let b = unicodeDeserializer.readUnicode(data, p); - p += unicodeDeserializer.size; - set.add(a, b); - } - } - return p; - } - markPrecedenceDecisions(atn) { - let rulePrecedenceDecisions = new Map(); - for (let state of atn.states) { - if (!(state instanceof StarLoopEntryState_1.StarLoopEntryState)) { - continue; - } - if (atn.ruleToStartState[state.ruleIndex].isPrecedenceRule) { - let maybeLoopEndState = state.transition(state.numberOfTransitions - 1).target; - if (maybeLoopEndState instanceof LoopEndState_1.LoopEndState) { - if (maybeLoopEndState.epsilonOnlyTransitions && maybeLoopEndState.transition(0).target instanceof RuleStopState_1.RuleStopState) { - rulePrecedenceDecisions.set(state.ruleIndex, state); - state.precedenceRuleDecision = true; - state.precedenceLoopbackStates = new BitSet_1.BitSet(atn.states.length); - } - } - } - } - for (let precedenceDecision of rulePrecedenceDecisions) { - for (let transition of atn.ruleToStopState[precedenceDecision[0]].getTransitions()) { - if (transition.serializationType !== 1) { - continue; - } - let epsilonTransition = transition; - if (epsilonTransition.outermostPrecedenceReturn !== -1) { - continue; - } - precedenceDecision[1].precedenceLoopbackStates.set(transition.target.stateNumber); - } - } - } - verifyATN(atn) { - for (let state of atn.states) { - this.checkCondition(state !== void 0, "ATN states should not be undefined."); - if (state.stateType === ATNStateType_1.ATNStateType.INVALID_TYPE) { - continue; - } - this.checkCondition(state.onlyHasEpsilonTransitions || state.numberOfTransitions <= 1); - if (state instanceof PlusBlockStartState_1.PlusBlockStartState) { - this.checkCondition(state.loopBackState !== void 0); - } - if (state instanceof StarLoopEntryState_1.StarLoopEntryState) { - let starLoopEntryState = state; - this.checkCondition(starLoopEntryState.loopBackState !== void 0); - this.checkCondition(starLoopEntryState.numberOfTransitions === 2); - if (starLoopEntryState.transition(0).target instanceof StarBlockStartState_1.StarBlockStartState) { - this.checkCondition(starLoopEntryState.transition(1).target instanceof LoopEndState_1.LoopEndState); - this.checkCondition(!starLoopEntryState.nonGreedy); - } else if (starLoopEntryState.transition(0).target instanceof LoopEndState_1.LoopEndState) { - this.checkCondition(starLoopEntryState.transition(1).target instanceof StarBlockStartState_1.StarBlockStartState); - this.checkCondition(starLoopEntryState.nonGreedy); - } else { - throw new Error("IllegalStateException"); - } - } - if (state instanceof StarLoopbackState_1.StarLoopbackState) { - this.checkCondition(state.numberOfTransitions === 1); - this.checkCondition(state.transition(0).target instanceof StarLoopEntryState_1.StarLoopEntryState); - } - if (state instanceof LoopEndState_1.LoopEndState) { - this.checkCondition(state.loopBackState !== void 0); - } - if (state instanceof RuleStartState_1.RuleStartState) { - this.checkCondition(state.stopState !== void 0); - } - if (state instanceof BlockStartState_1.BlockStartState) { - this.checkCondition(state.endState !== void 0); - } - if (state instanceof BlockEndState_1.BlockEndState) { - this.checkCondition(state.startState !== void 0); - } - if (state instanceof DecisionState_1.DecisionState) { - let decisionState = state; - this.checkCondition(decisionState.numberOfTransitions <= 1 || decisionState.decision >= 0); - } else { - this.checkCondition(state.numberOfTransitions <= 1 || state instanceof RuleStopState_1.RuleStopState); - } - } - } - checkCondition(condition, message) { - if (!condition) { - throw new Error("IllegalStateException: " + message); - } - } - static inlineSetRules(atn) { - let inlinedCalls = 0; - let ruleToInlineTransition = new Array(atn.ruleToStartState.length); - for (let i = 0; i < atn.ruleToStartState.length; i++) { - let startState = atn.ruleToStartState[i]; - let middleState = startState; - while (middleState.onlyHasEpsilonTransitions && middleState.numberOfOptimizedTransitions === 1 && middleState.getOptimizedTransition(0).serializationType === 1) { - middleState = middleState.getOptimizedTransition(0).target; - } - if (middleState.numberOfOptimizedTransitions !== 1) { - continue; - } - let matchTransition = middleState.getOptimizedTransition(0); - let matchTarget = matchTransition.target; - if (matchTransition.isEpsilon || !matchTarget.onlyHasEpsilonTransitions || matchTarget.numberOfOptimizedTransitions !== 1 || !(matchTarget.getOptimizedTransition(0).target instanceof RuleStopState_1.RuleStopState)) { - continue; - } - switch (matchTransition.serializationType) { - case 5: - case 2: - case 7: - ruleToInlineTransition[i] = matchTransition; - break; - case 8: - case 9: - continue; - default: - continue; - } - } - for (let state of atn.states) { - if (state.ruleIndex < 0) { - continue; - } - let optimizedTransitions; - for (let i = 0; i < state.numberOfOptimizedTransitions; i++) { - let transition = state.getOptimizedTransition(i); - if (!(transition instanceof RuleTransition_1.RuleTransition)) { - if (optimizedTransitions !== void 0) { - optimizedTransitions.push(transition); - } - continue; - } - let ruleTransition = transition; - let effective = ruleToInlineTransition[ruleTransition.target.ruleIndex]; - if (effective === void 0) { - if (optimizedTransitions !== void 0) { - optimizedTransitions.push(transition); - } - continue; - } - if (optimizedTransitions === void 0) { - optimizedTransitions = []; - for (let j = 0; j < i; j++) { - optimizedTransitions.push(state.getOptimizedTransition(i)); - } - } - inlinedCalls++; - let target = ruleTransition.followState; - let intermediateState = new BasicState_1.BasicState(); - intermediateState.setRuleIndex(target.ruleIndex); - atn.addState(intermediateState); - optimizedTransitions.push(new EpsilonTransition_1.EpsilonTransition(intermediateState)); - switch (effective.serializationType) { - case 5: - intermediateState.addTransition(new AtomTransition_1.AtomTransition(target, effective._label)); - break; - case 2: - intermediateState.addTransition(new RangeTransition_1.RangeTransition(target, effective.from, effective.to)); - break; - case 7: - intermediateState.addTransition(new SetTransition_1.SetTransition(target, effective.label)); - break; - default: - throw new Error("UnsupportedOperationException"); - } - } - if (optimizedTransitions !== void 0) { - if (state.isOptimized) { - while (state.numberOfOptimizedTransitions > 0) { - state.removeOptimizedTransition(state.numberOfOptimizedTransitions - 1); - } - } - for (let transition of optimizedTransitions) { - state.addOptimizedTransition(transition); - } - } - } - if (ParserATNSimulator_1.ParserATNSimulator.debug) { - console.log("ATN runtime optimizer removed " + inlinedCalls + " rule invocations by inlining sets."); - } - return inlinedCalls; - } - static combineChainedEpsilons(atn) { - let removedEdges = 0; - for (let state of atn.states) { - if (!state.onlyHasEpsilonTransitions || state instanceof RuleStopState_1.RuleStopState) { - continue; - } - let optimizedTransitions; - nextTransition: - for (let i = 0; i < state.numberOfOptimizedTransitions; i++) { - let transition = state.getOptimizedTransition(i); - let intermediate = transition.target; - if (transition.serializationType !== 1 || transition.outermostPrecedenceReturn !== -1 || intermediate.stateType !== ATNStateType_1.ATNStateType.BASIC || !intermediate.onlyHasEpsilonTransitions) { - if (optimizedTransitions !== void 0) { - optimizedTransitions.push(transition); - } - continue nextTransition; - } - for (let j = 0; j < intermediate.numberOfOptimizedTransitions; j++) { - if (intermediate.getOptimizedTransition(j).serializationType !== 1 || intermediate.getOptimizedTransition(j).outermostPrecedenceReturn !== -1) { - if (optimizedTransitions !== void 0) { - optimizedTransitions.push(transition); - } - continue nextTransition; - } - } - removedEdges++; - if (optimizedTransitions === void 0) { - optimizedTransitions = []; - for (let j = 0; j < i; j++) { - optimizedTransitions.push(state.getOptimizedTransition(j)); - } - } - for (let j = 0; j < intermediate.numberOfOptimizedTransitions; j++) { - let target = intermediate.getOptimizedTransition(j).target; - optimizedTransitions.push(new EpsilonTransition_1.EpsilonTransition(target)); - } - } - if (optimizedTransitions !== void 0) { - if (state.isOptimized) { - while (state.numberOfOptimizedTransitions > 0) { - state.removeOptimizedTransition(state.numberOfOptimizedTransitions - 1); - } - } - for (let transition of optimizedTransitions) { - state.addOptimizedTransition(transition); - } - } - } - if (ParserATNSimulator_1.ParserATNSimulator.debug) { - console.log("ATN runtime optimizer removed " + removedEdges + " transitions by combining chained epsilon transitions."); - } - return removedEdges; - } - static optimizeSets(atn, preserveOrder) { - if (preserveOrder) { - return 0; - } - let removedPaths = 0; - let decisions = atn.decisionToState; - for (let decision of decisions) { - let setTransitions = new IntervalSet_1.IntervalSet(); - for (let i = 0; i < decision.numberOfOptimizedTransitions; i++) { - let epsTransition = decision.getOptimizedTransition(i); - if (!(epsTransition instanceof EpsilonTransition_1.EpsilonTransition)) { - continue; - } - if (epsTransition.target.numberOfOptimizedTransitions !== 1) { - continue; - } - let transition = epsTransition.target.getOptimizedTransition(0); - if (!(transition.target instanceof BlockEndState_1.BlockEndState)) { - continue; - } - if (transition instanceof NotSetTransition_1.NotSetTransition) { - continue; - } - if (transition instanceof AtomTransition_1.AtomTransition || transition instanceof RangeTransition_1.RangeTransition || transition instanceof SetTransition_1.SetTransition) { - setTransitions.add(i); - } - } - if (setTransitions.size <= 1) { - continue; - } - let optimizedTransitions = []; - for (let i = 0; i < decision.numberOfOptimizedTransitions; i++) { - if (!setTransitions.contains(i)) { - optimizedTransitions.push(decision.getOptimizedTransition(i)); - } - } - let blockEndState = decision.getOptimizedTransition(setTransitions.minElement).target.getOptimizedTransition(0).target; - let matchSet = new IntervalSet_1.IntervalSet(); - for (let interval of setTransitions.intervals) { - for (let j = interval.a; j <= interval.b; j++) { - let matchTransition = decision.getOptimizedTransition(j).target.getOptimizedTransition(0); - if (matchTransition instanceof NotSetTransition_1.NotSetTransition) { - throw new Error("Not yet implemented."); - } else { - matchSet.addAll(matchTransition.label); - } - } - } - let newTransition; - if (matchSet.intervals.length === 1) { - if (matchSet.size === 1) { - newTransition = new AtomTransition_1.AtomTransition(blockEndState, matchSet.minElement); - } else { - let matchInterval = matchSet.intervals[0]; - newTransition = new RangeTransition_1.RangeTransition(blockEndState, matchInterval.a, matchInterval.b); - } - } else { - newTransition = new SetTransition_1.SetTransition(blockEndState, matchSet); - } - let setOptimizedState = new BasicState_1.BasicState(); - setOptimizedState.setRuleIndex(decision.ruleIndex); - atn.addState(setOptimizedState); - setOptimizedState.addTransition(newTransition); - optimizedTransitions.push(new EpsilonTransition_1.EpsilonTransition(setOptimizedState)); - removedPaths += decision.numberOfOptimizedTransitions - optimizedTransitions.length; - if (decision.isOptimized) { - while (decision.numberOfOptimizedTransitions > 0) { - decision.removeOptimizedTransition(decision.numberOfOptimizedTransitions - 1); - } - } - for (let transition of optimizedTransitions) { - decision.addOptimizedTransition(transition); - } - } - if (ParserATNSimulator_1.ParserATNSimulator.debug) { - console.log("ATN runtime optimizer removed " + removedPaths + " paths by collapsing sets."); - } - return removedPaths; - } - static identifyTailCalls(atn) { - for (let state of atn.states) { - for (let i = 0; i < state.numberOfTransitions; i++) { - let transition = state.transition(i); - if (!(transition instanceof RuleTransition_1.RuleTransition)) { - continue; - } - transition.tailCall = this.testTailCall(atn, transition, false); - transition.optimizedTailCall = this.testTailCall(atn, transition, true); - } - if (!state.isOptimized) { - continue; - } - for (let i = 0; i < state.numberOfOptimizedTransitions; i++) { - let transition = state.getOptimizedTransition(i); - if (!(transition instanceof RuleTransition_1.RuleTransition)) { - continue; - } - transition.tailCall = this.testTailCall(atn, transition, false); - transition.optimizedTailCall = this.testTailCall(atn, transition, true); - } - } - } - static testTailCall(atn, transition, optimizedPath) { - if (!optimizedPath && transition.tailCall) { - return true; - } - if (optimizedPath && transition.optimizedTailCall) { - return true; - } - let reachable = new BitSet_1.BitSet(atn.states.length); - let worklist = []; - worklist.push(transition.followState); - while (true) { - let state = worklist.pop(); - if (!state) { - break; - } - if (reachable.get(state.stateNumber)) { - continue; - } - if (state instanceof RuleStopState_1.RuleStopState) { - continue; - } - if (!state.onlyHasEpsilonTransitions) { - return false; - } - let transitionCount = optimizedPath ? state.numberOfOptimizedTransitions : state.numberOfTransitions; - for (let i = 0; i < transitionCount; i++) { - let t = optimizedPath ? state.getOptimizedTransition(i) : state.transition(i); - if (t.serializationType !== 1) { - return false; - } - worklist.push(t.target); - } - } - return true; - } - static toInt(c) { - return c; - } - static toInt32(data, offset) { - return (data[offset] | data[offset + 1] << 16) >>> 0; - } - static toUUID(data, offset) { - let leastSigBits = ATNDeserializer3.toInt32(data, offset); - let lessSigBits = ATNDeserializer3.toInt32(data, offset + 2); - let moreSigBits = ATNDeserializer3.toInt32(data, offset + 4); - let mostSigBits = ATNDeserializer3.toInt32(data, offset + 6); - return new UUID_1.UUID(mostSigBits, moreSigBits, lessSigBits, leastSigBits); - } - edgeFactory(atn, type, src, trg, arg1, arg2, arg3, sets) { - let target = atn.states[trg]; - switch (type) { - case 1: - return new EpsilonTransition_1.EpsilonTransition(target); - case 2: - if (arg3 !== 0) { - return new RangeTransition_1.RangeTransition(target, Token_1.Token.EOF, arg2); - } else { - return new RangeTransition_1.RangeTransition(target, arg1, arg2); - } - case 3: - let rt = new RuleTransition_1.RuleTransition(atn.states[arg1], arg2, arg3, target); - return rt; - case 4: - let pt = new PredicateTransition_1.PredicateTransition(target, arg1, arg2, arg3 !== 0); - return pt; - case 10: - return new PrecedencePredicateTransition_1.PrecedencePredicateTransition(target, arg1); - case 5: - if (arg3 !== 0) { - return new AtomTransition_1.AtomTransition(target, Token_1.Token.EOF); - } else { - return new AtomTransition_1.AtomTransition(target, arg1); - } - case 6: - let a = new ActionTransition_1.ActionTransition(target, arg1, arg2, arg3 !== 0); - return a; - case 7: - return new SetTransition_1.SetTransition(target, sets[arg1]); - case 8: - return new NotSetTransition_1.NotSetTransition(target, sets[arg1]); - case 9: - return new WildcardTransition_1.WildcardTransition(target); - } - throw new Error("The specified transition type is not valid."); - } - stateFactory(type, ruleIndex) { - let s; - switch (type) { - case ATNStateType_1.ATNStateType.INVALID_TYPE: - return new InvalidState_1.InvalidState(); - case ATNStateType_1.ATNStateType.BASIC: - s = new BasicState_1.BasicState(); - break; - case ATNStateType_1.ATNStateType.RULE_START: - s = new RuleStartState_1.RuleStartState(); - break; - case ATNStateType_1.ATNStateType.BLOCK_START: - s = new BasicBlockStartState_1.BasicBlockStartState(); - break; - case ATNStateType_1.ATNStateType.PLUS_BLOCK_START: - s = new PlusBlockStartState_1.PlusBlockStartState(); - break; - case ATNStateType_1.ATNStateType.STAR_BLOCK_START: - s = new StarBlockStartState_1.StarBlockStartState(); - break; - case ATNStateType_1.ATNStateType.TOKEN_START: - s = new TokensStartState_1.TokensStartState(); - break; - case ATNStateType_1.ATNStateType.RULE_STOP: - s = new RuleStopState_1.RuleStopState(); - break; - case ATNStateType_1.ATNStateType.BLOCK_END: - s = new BlockEndState_1.BlockEndState(); - break; - case ATNStateType_1.ATNStateType.STAR_LOOP_BACK: - s = new StarLoopbackState_1.StarLoopbackState(); - break; - case ATNStateType_1.ATNStateType.STAR_LOOP_ENTRY: - s = new StarLoopEntryState_1.StarLoopEntryState(); - break; - case ATNStateType_1.ATNStateType.PLUS_LOOP_BACK: - s = new PlusLoopbackState_1.PlusLoopbackState(); - break; - case ATNStateType_1.ATNStateType.LOOP_END: - s = new LoopEndState_1.LoopEndState(); - break; - default: - let message = `The specified state type ${type} is not valid.`; - throw new Error(message); - } - s.ruleIndex = ruleIndex; - return s; - } - lexerActionFactory(type, data1, data2) { - switch (type) { - case 0: - return new LexerChannelAction_1.LexerChannelAction(data1); - case 1: - return new LexerCustomAction_1.LexerCustomAction(data1, data2); - case 2: - return new LexerModeAction_1.LexerModeAction(data1); - case 3: - return LexerMoreAction_1.LexerMoreAction.INSTANCE; - case 4: - return LexerPopModeAction_1.LexerPopModeAction.INSTANCE; - case 5: - return new LexerPushModeAction_1.LexerPushModeAction(data1); - case 6: - return LexerSkipAction_1.LexerSkipAction.INSTANCE; - case 7: - return new LexerTypeAction_1.LexerTypeAction(data1); - default: - let message = `The specified lexer action type ${type} is not valid.`; - throw new Error(message); - } - } - }; - ATNDeserializer3.BASE_SERIALIZED_UUID = UUID_1.UUID.fromString("E4178468-DF95-44D0-AD87-F22A5D5FB6D3"); - ATNDeserializer3.ADDED_LEXER_ACTIONS = UUID_1.UUID.fromString("AB35191A-1603-487E-B75A-479B831EAF6D"); - ATNDeserializer3.ADDED_UNICODE_SMP = UUID_1.UUID.fromString("C23FEA89-0605-4f51-AFB8-058BCAB8C91B"); - ATNDeserializer3.SUPPORTED_UUIDS = [ - ATNDeserializer3.BASE_SERIALIZED_UUID, - ATNDeserializer3.ADDED_LEXER_ACTIONS, - ATNDeserializer3.ADDED_UNICODE_SMP - ]; - ATNDeserializer3.SERIALIZED_UUID = ATNDeserializer3.ADDED_UNICODE_SMP; - __decorate([ - Decorators_1.NotNull - ], ATNDeserializer3.prototype, "deserializationOptions", void 0); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ATNDeserializer3.prototype, "deserialize", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ATNDeserializer3.prototype, "markPrecedenceDecisions", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], ATNDeserializer3.prototype, "edgeFactory", null); - exports.ATNDeserializer = ATNDeserializer3; -}); - -// node_modules/antlr4ts/atn/ParseInfo.js -var require_ParseInfo = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ParseInfo = void 0; - var Decorators_1 = require_Decorators(); - var ParseInfo = class ParseInfo { - constructor(atnSimulator) { - this.atnSimulator = atnSimulator; - } - getDecisionInfo() { - return this.atnSimulator.getDecisionInfo(); - } - getLLDecisions() { - let decisions = this.atnSimulator.getDecisionInfo(); - let LL = []; - for (let i = 0; i < decisions.length; i++) { - let fallBack = decisions[i].LL_Fallback; - if (fallBack > 0) { - LL.push(i); - } - } - return LL; - } - getTotalTimeInPrediction() { - let decisions = this.atnSimulator.getDecisionInfo(); - let t = 0; - for (let decision of decisions) { - t += decision.timeInPrediction; - } - return t; - } - getTotalSLLLookaheadOps() { - let decisions = this.atnSimulator.getDecisionInfo(); - let k = 0; - for (let decision of decisions) { - k += decision.SLL_TotalLook; - } - return k; - } - getTotalLLLookaheadOps() { - let decisions = this.atnSimulator.getDecisionInfo(); - let k = 0; - for (let decision of decisions) { - k += decision.LL_TotalLook; - } - return k; - } - getTotalSLLATNLookaheadOps() { - let decisions = this.atnSimulator.getDecisionInfo(); - let k = 0; - for (let decision of decisions) { - k += decision.SLL_ATNTransitions; - } - return k; - } - getTotalLLATNLookaheadOps() { - let decisions = this.atnSimulator.getDecisionInfo(); - let k = 0; - for (let decision of decisions) { - k += decision.LL_ATNTransitions; - } - return k; - } - getTotalATNLookaheadOps() { - let decisions = this.atnSimulator.getDecisionInfo(); - let k = 0; - for (let decision of decisions) { - k += decision.SLL_ATNTransitions; - k += decision.LL_ATNTransitions; - } - return k; - } - getDFASize(decision) { - if (decision) { - let decisionToDFA = this.atnSimulator.atn.decisionToDFA[decision]; - return decisionToDFA.states.size; - } else { - let n = 0; - let decisionToDFA = this.atnSimulator.atn.decisionToDFA; - for (let i = 0; i < decisionToDFA.length; i++) { - n += this.getDFASize(i); - } - return n; - } - } - }; - __decorate([ - Decorators_1.NotNull - ], ParseInfo.prototype, "getDecisionInfo", null); - __decorate([ - Decorators_1.NotNull - ], ParseInfo.prototype, "getLLDecisions", null); - ParseInfo = __decorate([ - __param(0, Decorators_1.NotNull) - ], ParseInfo); - exports.ParseInfo = ParseInfo; -}); - -// node_modules/antlr4ts/ProxyParserErrorListener.js -var require_ProxyParserErrorListener = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ProxyParserErrorListener = void 0; - var ProxyErrorListener_1 = require_ProxyErrorListener(); - var Decorators_1 = require_Decorators(); - var ProxyParserErrorListener = class extends ProxyErrorListener_1.ProxyErrorListener { - constructor(delegates) { - super(delegates); - } - reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) { - this.getDelegates().forEach((listener) => { - if (listener.reportAmbiguity) { - listener.reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs); - } - }); - } - reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, conflictState) { - this.getDelegates().forEach((listener) => { - if (listener.reportAttemptingFullContext) { - listener.reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, conflictState); - } - }); - } - reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, acceptState) { - this.getDelegates().forEach((listener) => { - if (listener.reportContextSensitivity) { - listener.reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, acceptState); - } - }); - } - }; - __decorate([ - Decorators_1.Override - ], ProxyParserErrorListener.prototype, "reportAmbiguity", null); - __decorate([ - Decorators_1.Override - ], ProxyParserErrorListener.prototype, "reportAttemptingFullContext", null); - __decorate([ - Decorators_1.Override - ], ProxyParserErrorListener.prototype, "reportContextSensitivity", null); - exports.ProxyParserErrorListener = ProxyParserErrorListener; -}); - -// node_modules/antlr4ts/misc/Character.js -var require_Character = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.isSupplementaryCodePoint = exports.isLowSurrogate = exports.isHighSurrogate = void 0; - function isHighSurrogate(ch) { - return ch >= 55296 && ch <= 56319; - } - exports.isHighSurrogate = isHighSurrogate; - function isLowSurrogate(ch) { - return ch >= 56320 && ch <= 57343; - } - exports.isLowSurrogate = isLowSurrogate; - function isSupplementaryCodePoint(ch) { - return ch >= 65536; - } - exports.isSupplementaryCodePoint = isSupplementaryCodePoint; -}); - -// node_modules/antlr4ts/CodePointBuffer.js -var require_CodePointBuffer = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.CodePointBuffer = void 0; - var assert = require("assert"); - var Character = require_Character(); - var CodePointBuffer = class { - constructor(buffer, size) { - this.buffer = buffer; - this._position = 0; - this._size = size; - } - static withArray(buffer) { - return new CodePointBuffer(buffer, buffer.length); - } - get position() { - return this._position; - } - set position(newPosition) { - if (newPosition < 0 || newPosition > this._size) { - throw new RangeError(); - } - this._position = newPosition; - } - get remaining() { - return this._size - this.position; - } - get(offset) { - return this.buffer[offset]; - } - array() { - return this.buffer.slice(0, this._size); - } - static builder(initialBufferSize) { - return new CodePointBuffer.Builder(initialBufferSize); - } - }; - exports.CodePointBuffer = CodePointBuffer; - (function(CodePointBuffer2) { - let Type; - (function(Type2) { - Type2[Type2["BYTE"] = 0] = "BYTE"; - Type2[Type2["CHAR"] = 1] = "CHAR"; - Type2[Type2["INT"] = 2] = "INT"; - })(Type || (Type = {})); - class Builder { - constructor(initialBufferSize) { - this.type = 0; - this.buffer = new Uint8Array(initialBufferSize); - this.prevHighSurrogate = -1; - this.position = 0; - } - build() { - return new CodePointBuffer2(this.buffer, this.position); - } - static roundUpToNextPowerOfTwo(i) { - let nextPowerOfTwo = 32 - Math.clz32(i - 1); - return Math.pow(2, nextPowerOfTwo); - } - ensureRemaining(remainingNeeded) { - switch (this.type) { - case 0: - if (this.buffer.length - this.position < remainingNeeded) { - let newCapacity = Builder.roundUpToNextPowerOfTwo(this.buffer.length + remainingNeeded); - let newBuffer = new Uint8Array(newCapacity); - newBuffer.set(this.buffer.subarray(0, this.position), 0); - this.buffer = newBuffer; - } - break; - case 1: - if (this.buffer.length - this.position < remainingNeeded) { - let newCapacity = Builder.roundUpToNextPowerOfTwo(this.buffer.length + remainingNeeded); - let newBuffer = new Uint16Array(newCapacity); - newBuffer.set(this.buffer.subarray(0, this.position), 0); - this.buffer = newBuffer; - } - break; - case 2: - if (this.buffer.length - this.position < remainingNeeded) { - let newCapacity = Builder.roundUpToNextPowerOfTwo(this.buffer.length + remainingNeeded); - let newBuffer = new Int32Array(newCapacity); - newBuffer.set(this.buffer.subarray(0, this.position), 0); - this.buffer = newBuffer; - } - break; - } - } - append(utf16In) { - this.ensureRemaining(utf16In.length); - this.appendArray(utf16In); - } - appendArray(utf16In) { - switch (this.type) { - case 0: - this.appendArrayByte(utf16In); - break; - case 1: - this.appendArrayChar(utf16In); - break; - case 2: - this.appendArrayInt(utf16In); - break; - } - } - appendArrayByte(utf16In) { - assert(this.prevHighSurrogate === -1); - let input = utf16In; - let inOffset = 0; - let inLimit = utf16In.length; - let outByte = this.buffer; - let outOffset = this.position; - while (inOffset < inLimit) { - let c = input[inOffset]; - if (c <= 255) { - outByte[outOffset] = c; - } else { - utf16In = utf16In.subarray(inOffset, inLimit); - this.position = outOffset; - if (!Character.isHighSurrogate(c)) { - this.byteToCharBuffer(utf16In.length); - this.appendArrayChar(utf16In); - return; - } else { - this.byteToIntBuffer(utf16In.length); - this.appendArrayInt(utf16In); - return; - } - } - inOffset++; - outOffset++; - } - this.position = outOffset; - } - appendArrayChar(utf16In) { - assert(this.prevHighSurrogate === -1); - let input = utf16In; - let inOffset = 0; - let inLimit = utf16In.length; - let outChar = this.buffer; - let outOffset = this.position; - while (inOffset < inLimit) { - let c = input[inOffset]; - if (!Character.isHighSurrogate(c)) { - outChar[outOffset] = c; - } else { - utf16In = utf16In.subarray(inOffset, inLimit); - this.position = outOffset; - this.charToIntBuffer(utf16In.length); - this.appendArrayInt(utf16In); - return; - } - inOffset++; - outOffset++; - } - this.position = outOffset; - } - appendArrayInt(utf16In) { - let input = utf16In; - let inOffset = 0; - let inLimit = utf16In.length; - let outInt = this.buffer; - let outOffset = this.position; - while (inOffset < inLimit) { - let c = input[inOffset]; - inOffset++; - if (this.prevHighSurrogate !== -1) { - if (Character.isLowSurrogate(c)) { - outInt[outOffset] = String.fromCharCode(this.prevHighSurrogate, c).codePointAt(0); - outOffset++; - this.prevHighSurrogate = -1; - } else { - outInt[outOffset] = this.prevHighSurrogate; - outOffset++; - if (Character.isHighSurrogate(c)) { - this.prevHighSurrogate = c; - } else { - outInt[outOffset] = c; - outOffset++; - this.prevHighSurrogate = -1; - } - } - } else if (Character.isHighSurrogate(c)) { - this.prevHighSurrogate = c; - } else { - outInt[outOffset] = c; - outOffset++; - } - } - if (this.prevHighSurrogate !== -1) { - outInt[outOffset] = this.prevHighSurrogate; - outOffset++; - } - this.position = outOffset; - } - byteToCharBuffer(toAppend) { - let newBuffer = new Uint16Array(Math.max(this.position + toAppend, this.buffer.length >> 1)); - newBuffer.set(this.buffer.subarray(0, this.position), 0); - this.type = 1; - this.buffer = newBuffer; - } - byteToIntBuffer(toAppend) { - let newBuffer = new Int32Array(Math.max(this.position + toAppend, this.buffer.length >> 2)); - newBuffer.set(this.buffer.subarray(0, this.position), 0); - this.type = 2; - this.buffer = newBuffer; - } - charToIntBuffer(toAppend) { - let newBuffer = new Int32Array(Math.max(this.position + toAppend, this.buffer.length >> 1)); - newBuffer.set(this.buffer.subarray(0, this.position), 0); - this.type = 2; - this.buffer = newBuffer; - } - } - CodePointBuffer2.Builder = Builder; - })(CodePointBuffer = exports.CodePointBuffer || (exports.CodePointBuffer = {})); -}); - -// node_modules/antlr4ts/CodePointCharStream.js -var require_CodePointCharStream = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.CodePointCharStream = void 0; - var assert = require("assert"); - var IntStream_1 = require_IntStream(); - var Interval_1 = require_Interval(); - var Decorators_1 = require_Decorators(); - var CodePointCharStream = class { - constructor(array, position, remaining, name) { - assert(position === 0); - this._array = array; - this._size = remaining; - this._name = name; - this._position = 0; - } - get internalStorage() { - return this._array; - } - static fromBuffer(codePointBuffer, name) { - if (name === void 0 || name.length === 0) { - name = IntStream_1.IntStream.UNKNOWN_SOURCE_NAME; - } - return new CodePointCharStream(codePointBuffer.array(), codePointBuffer.position, codePointBuffer.remaining, name); - } - consume() { - if (this._size - this._position === 0) { - assert(this.LA(1) === IntStream_1.IntStream.EOF); - throw new RangeError("cannot consume EOF"); - } - this._position++; - } - get index() { - return this._position; - } - get size() { - return this._size; - } - mark() { - return -1; - } - release(marker) { - } - seek(index) { - this._position = index; - } - get sourceName() { - return this._name; - } - toString() { - return this.getText(Interval_1.Interval.of(0, this.size - 1)); - } - LA(i) { - let offset; - switch (Math.sign(i)) { - case -1: - offset = this.index + i; - if (offset < 0) { - return IntStream_1.IntStream.EOF; - } - return this._array[offset]; - case 0: - return 0; - case 1: - offset = this.index + i - 1; - if (offset >= this.size) { - return IntStream_1.IntStream.EOF; - } - return this._array[offset]; - } - throw new RangeError("Not reached"); - } - getText(interval) { - const startIdx = Math.min(interval.a, this.size); - const len = Math.min(interval.b - interval.a + 1, this.size - startIdx); - if (this._array instanceof Int32Array) { - return String.fromCodePoint(...Array.from(this._array.subarray(startIdx, startIdx + len))); - } else { - return String.fromCharCode(...Array.from(this._array.subarray(startIdx, startIdx + len))); - } - } - }; - __decorate([ - Decorators_1.Override - ], CodePointCharStream.prototype, "consume", null); - __decorate([ - Decorators_1.Override - ], CodePointCharStream.prototype, "index", null); - __decorate([ - Decorators_1.Override - ], CodePointCharStream.prototype, "size", null); - __decorate([ - Decorators_1.Override - ], CodePointCharStream.prototype, "mark", null); - __decorate([ - Decorators_1.Override - ], CodePointCharStream.prototype, "release", null); - __decorate([ - Decorators_1.Override - ], CodePointCharStream.prototype, "seek", null); - __decorate([ - Decorators_1.Override - ], CodePointCharStream.prototype, "sourceName", null); - __decorate([ - Decorators_1.Override - ], CodePointCharStream.prototype, "toString", null); - __decorate([ - Decorators_1.Override - ], CodePointCharStream.prototype, "LA", null); - __decorate([ - Decorators_1.Override - ], CodePointCharStream.prototype, "getText", null); - exports.CodePointCharStream = CodePointCharStream; -}); - -// node_modules/antlr4ts/CharStreams.js -var require_CharStreams = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.CharStreams = void 0; - var CodePointBuffer_1 = require_CodePointBuffer(); - var CodePointCharStream_1 = require_CodePointCharStream(); - var IntStream_1 = require_IntStream(); - var CharStreams; - (function(CharStreams2) { - function fromString(s, sourceName) { - if (sourceName === void 0 || sourceName.length === 0) { - sourceName = IntStream_1.IntStream.UNKNOWN_SOURCE_NAME; - } - let codePointBufferBuilder = CodePointBuffer_1.CodePointBuffer.builder(s.length); - let cb = new Uint16Array(s.length); - for (let i = 0; i < s.length; i++) { - cb[i] = s.charCodeAt(i); - } - codePointBufferBuilder.append(cb); - return CodePointCharStream_1.CodePointCharStream.fromBuffer(codePointBufferBuilder.build(), sourceName); - } - CharStreams2.fromString = fromString; - })(CharStreams = exports.CharStreams || (exports.CharStreams = {})); -}); - -// node_modules/antlr4ts/BufferedTokenStream.js -var require_BufferedTokenStream = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.BufferedTokenStream = void 0; - var assert = require("assert"); - var CommonToken_1 = require_CommonToken(); - var Interval_1 = require_Interval(); - var Lexer_1 = require_Lexer(); - var Decorators_1 = require_Decorators(); - var Token_1 = require_Token(); - var BufferedTokenStream = class BufferedTokenStream { - constructor(tokenSource) { - this.tokens = []; - this.p = -1; - this.fetchedEOF = false; - if (tokenSource == null) { - throw new Error("tokenSource cannot be null"); - } - this._tokenSource = tokenSource; - } - get tokenSource() { - return this._tokenSource; - } - set tokenSource(tokenSource) { - this._tokenSource = tokenSource; - this.tokens.length = 0; - this.p = -1; - this.fetchedEOF = false; - } - get index() { - return this.p; - } - mark() { - return 0; - } - release(marker) { - } - seek(index) { - this.lazyInit(); - this.p = this.adjustSeekIndex(index); - } - get size() { - return this.tokens.length; - } - consume() { - let skipEofCheck; - if (this.p >= 0) { - if (this.fetchedEOF) { - skipEofCheck = this.p < this.tokens.length - 1; - } else { - skipEofCheck = this.p < this.tokens.length; - } - } else { - skipEofCheck = false; - } - if (!skipEofCheck && this.LA(1) === Token_1.Token.EOF) { - throw new Error("cannot consume EOF"); - } - if (this.sync(this.p + 1)) { - this.p = this.adjustSeekIndex(this.p + 1); - } - } - sync(i) { - assert(i >= 0); - let n = i - this.tokens.length + 1; - if (n > 0) { - let fetched = this.fetch(n); - return fetched >= n; - } - return true; - } - fetch(n) { - if (this.fetchedEOF) { - return 0; - } - for (let i = 0; i < n; i++) { - let t = this.tokenSource.nextToken(); - if (this.isWritableToken(t)) { - t.tokenIndex = this.tokens.length; - } - this.tokens.push(t); - if (t.type === Token_1.Token.EOF) { - this.fetchedEOF = true; - return i + 1; - } - } - return n; - } - get(i) { - if (i < 0 || i >= this.tokens.length) { - throw new RangeError("token index " + i + " out of range 0.." + (this.tokens.length - 1)); - } - return this.tokens[i]; - } - getRange(start, stop) { - if (start < 0 || stop < 0) { - return []; - } - this.lazyInit(); - let subset = new Array(); - if (stop >= this.tokens.length) { - stop = this.tokens.length - 1; - } - for (let i = start; i <= stop; i++) { - let t = this.tokens[i]; - if (t.type === Token_1.Token.EOF) { - break; - } - subset.push(t); - } - return subset; - } - LA(i) { - let token = this.LT(i); - if (!token) { - return Token_1.Token.INVALID_TYPE; - } - return token.type; - } - tryLB(k) { - if (this.p - k < 0) { - return void 0; - } - return this.tokens[this.p - k]; - } - LT(k) { - let result = this.tryLT(k); - if (result === void 0) { - throw new RangeError("requested lookback index out of range"); - } - return result; - } - tryLT(k) { - this.lazyInit(); - if (k === 0) { - throw new RangeError("0 is not a valid lookahead index"); - } - if (k < 0) { - return this.tryLB(-k); - } - let i = this.p + k - 1; - this.sync(i); - if (i >= this.tokens.length) { - return this.tokens[this.tokens.length - 1]; - } - return this.tokens[i]; - } - adjustSeekIndex(i) { - return i; - } - lazyInit() { - if (this.p === -1) { - this.setup(); - } - } - setup() { - this.sync(0); - this.p = this.adjustSeekIndex(0); - } - getTokens(start, stop, types) { - this.lazyInit(); - if (start === void 0) { - assert(stop === void 0 && types === void 0); - return this.tokens; - } else if (stop === void 0) { - stop = this.tokens.length - 1; - } - if (start < 0 || stop >= this.tokens.length || stop < 0 || start >= this.tokens.length) { - throw new RangeError("start " + start + " or stop " + stop + " not in 0.." + (this.tokens.length - 1)); - } - if (start > stop) { - return []; - } - if (types === void 0) { - return this.tokens.slice(start, stop + 1); - } else if (typeof types === "number") { - types = new Set().add(types); - } - let typesSet = types; - let filteredTokens = this.tokens.slice(start, stop + 1); - filteredTokens = filteredTokens.filter((value) => typesSet.has(value.type)); - return filteredTokens; - } - nextTokenOnChannel(i, channel) { - this.sync(i); - if (i >= this.size) { - return this.size - 1; - } - let token = this.tokens[i]; - while (token.channel !== channel) { - if (token.type === Token_1.Token.EOF) { - return i; - } - i++; - this.sync(i); - token = this.tokens[i]; - } - return i; - } - previousTokenOnChannel(i, channel) { - this.sync(i); - if (i >= this.size) { - return this.size - 1; - } - while (i >= 0) { - let token = this.tokens[i]; - if (token.type === Token_1.Token.EOF || token.channel === channel) { - return i; - } - i--; - } - return i; - } - getHiddenTokensToRight(tokenIndex, channel = -1) { - this.lazyInit(); - if (tokenIndex < 0 || tokenIndex >= this.tokens.length) { - throw new RangeError(tokenIndex + " not in 0.." + (this.tokens.length - 1)); - } - let nextOnChannel = this.nextTokenOnChannel(tokenIndex + 1, Lexer_1.Lexer.DEFAULT_TOKEN_CHANNEL); - let to; - let from = tokenIndex + 1; - if (nextOnChannel === -1) { - to = this.size - 1; - } else { - to = nextOnChannel; - } - return this.filterForChannel(from, to, channel); - } - getHiddenTokensToLeft(tokenIndex, channel = -1) { - this.lazyInit(); - if (tokenIndex < 0 || tokenIndex >= this.tokens.length) { - throw new RangeError(tokenIndex + " not in 0.." + (this.tokens.length - 1)); - } - if (tokenIndex === 0) { - return []; - } - let prevOnChannel = this.previousTokenOnChannel(tokenIndex - 1, Lexer_1.Lexer.DEFAULT_TOKEN_CHANNEL); - if (prevOnChannel === tokenIndex - 1) { - return []; - } - let from = prevOnChannel + 1; - let to = tokenIndex - 1; - return this.filterForChannel(from, to, channel); - } - filterForChannel(from, to, channel) { - let hidden = new Array(); - for (let i = from; i <= to; i++) { - let t = this.tokens[i]; - if (channel === -1) { - if (t.channel !== Lexer_1.Lexer.DEFAULT_TOKEN_CHANNEL) { - hidden.push(t); - } - } else { - if (t.channel === channel) { - hidden.push(t); - } - } - } - return hidden; - } - get sourceName() { - return this.tokenSource.sourceName; - } - getText(interval) { - if (interval === void 0) { - interval = Interval_1.Interval.of(0, this.size - 1); - } else if (!(interval instanceof Interval_1.Interval)) { - interval = interval.sourceInterval; - } - let start = interval.a; - let stop = interval.b; - if (start < 0 || stop < 0) { - return ""; - } - this.fill(); - if (stop >= this.tokens.length) { - stop = this.tokens.length - 1; - } - let buf = ""; - for (let i = start; i <= stop; i++) { - let t = this.tokens[i]; - if (t.type === Token_1.Token.EOF) { - break; - } - buf += t.text; - } - return buf.toString(); - } - getTextFromRange(start, stop) { - if (this.isToken(start) && this.isToken(stop)) { - return this.getText(Interval_1.Interval.of(start.tokenIndex, stop.tokenIndex)); - } - return ""; - } - fill() { - this.lazyInit(); - const blockSize = 1e3; - while (true) { - let fetched = this.fetch(blockSize); - if (fetched < blockSize) { - return; - } - } - } - isWritableToken(t) { - return t instanceof CommonToken_1.CommonToken; - } - isToken(t) { - return t instanceof CommonToken_1.CommonToken; - } - }; - __decorate([ - Decorators_1.NotNull - ], BufferedTokenStream.prototype, "_tokenSource", void 0); - __decorate([ - Decorators_1.Override - ], BufferedTokenStream.prototype, "tokenSource", null); - __decorate([ - Decorators_1.Override - ], BufferedTokenStream.prototype, "index", null); - __decorate([ - Decorators_1.Override - ], BufferedTokenStream.prototype, "mark", null); - __decorate([ - Decorators_1.Override - ], BufferedTokenStream.prototype, "release", null); - __decorate([ - Decorators_1.Override - ], BufferedTokenStream.prototype, "seek", null); - __decorate([ - Decorators_1.Override - ], BufferedTokenStream.prototype, "size", null); - __decorate([ - Decorators_1.Override - ], BufferedTokenStream.prototype, "consume", null); - __decorate([ - Decorators_1.Override - ], BufferedTokenStream.prototype, "get", null); - __decorate([ - Decorators_1.Override - ], BufferedTokenStream.prototype, "LA", null); - __decorate([ - Decorators_1.NotNull, - Decorators_1.Override - ], BufferedTokenStream.prototype, "LT", null); - __decorate([ - Decorators_1.Override - ], BufferedTokenStream.prototype, "sourceName", null); - __decorate([ - Decorators_1.NotNull, - Decorators_1.Override - ], BufferedTokenStream.prototype, "getText", null); - __decorate([ - Decorators_1.NotNull, - Decorators_1.Override - ], BufferedTokenStream.prototype, "getTextFromRange", null); - BufferedTokenStream = __decorate([ - __param(0, Decorators_1.NotNull) - ], BufferedTokenStream); - exports.BufferedTokenStream = BufferedTokenStream; -}); - -// node_modules/antlr4ts/CommonTokenStream.js -var require_CommonTokenStream = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.CommonTokenStream = void 0; - var BufferedTokenStream_1 = require_BufferedTokenStream(); - var Decorators_1 = require_Decorators(); - var Token_1 = require_Token(); - var CommonTokenStream2 = class CommonTokenStream extends BufferedTokenStream_1.BufferedTokenStream { - constructor(tokenSource, channel = Token_1.Token.DEFAULT_CHANNEL) { - super(tokenSource); - this.channel = channel; - } - adjustSeekIndex(i) { - return this.nextTokenOnChannel(i, this.channel); - } - tryLB(k) { - if (this.p - k < 0) { - return void 0; - } - let i = this.p; - let n = 1; - while (n <= k && i > 0) { - i = this.previousTokenOnChannel(i - 1, this.channel); - n++; - } - if (i < 0) { - return void 0; - } - return this.tokens[i]; - } - tryLT(k) { - this.lazyInit(); - if (k === 0) { - throw new RangeError("0 is not a valid lookahead index"); - } - if (k < 0) { - return this.tryLB(-k); - } - let i = this.p; - let n = 1; - while (n < k) { - if (this.sync(i + 1)) { - i = this.nextTokenOnChannel(i + 1, this.channel); - } - n++; - } - return this.tokens[i]; - } - getNumberOfOnChannelTokens() { - let n = 0; - this.fill(); - for (let t of this.tokens) { - if (t.channel === this.channel) { - n++; - } - if (t.type === Token_1.Token.EOF) { - break; - } - } - return n; - } - }; - __decorate([ - Decorators_1.Override - ], CommonTokenStream2.prototype, "adjustSeekIndex", null); - __decorate([ - Decorators_1.Override - ], CommonTokenStream2.prototype, "tryLB", null); - __decorate([ - Decorators_1.Override - ], CommonTokenStream2.prototype, "tryLT", null); - CommonTokenStream2 = __decorate([ - __param(0, Decorators_1.NotNull) - ], CommonTokenStream2); - exports.CommonTokenStream = CommonTokenStream2; -}); - -// node_modules/antlr4ts/ListTokenSource.js -var require_ListTokenSource = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ListTokenSource = void 0; - var CommonTokenFactory_1 = require_CommonTokenFactory(); - var Decorators_1 = require_Decorators(); - var Token_1 = require_Token(); - var ListTokenSource = class ListTokenSource { - constructor(tokens2, sourceName) { - this.i = 0; - this._factory = CommonTokenFactory_1.CommonTokenFactory.DEFAULT; - if (tokens2 == null) { - throw new Error("tokens cannot be null"); - } - this.tokens = tokens2; - this._sourceName = sourceName; - } - get charPositionInLine() { - if (this.i < this.tokens.length) { - return this.tokens[this.i].charPositionInLine; - } else if (this.eofToken != null) { - return this.eofToken.charPositionInLine; - } else if (this.tokens.length > 0) { - let lastToken = this.tokens[this.tokens.length - 1]; - let tokenText = lastToken.text; - if (tokenText != null) { - let lastNewLine = tokenText.lastIndexOf("\n"); - if (lastNewLine >= 0) { - return tokenText.length - lastNewLine - 1; - } - } - return lastToken.charPositionInLine + lastToken.stopIndex - lastToken.startIndex + 1; - } - return 0; - } - nextToken() { - if (this.i >= this.tokens.length) { - if (this.eofToken == null) { - let start = -1; - if (this.tokens.length > 0) { - let previousStop = this.tokens[this.tokens.length - 1].stopIndex; - if (previousStop !== -1) { - start = previousStop + 1; - } - } - let stop = Math.max(-1, start - 1); - this.eofToken = this._factory.create({source: this, stream: this.inputStream}, Token_1.Token.EOF, "EOF", Token_1.Token.DEFAULT_CHANNEL, start, stop, this.line, this.charPositionInLine); - } - return this.eofToken; - } - let t = this.tokens[this.i]; - if (this.i === this.tokens.length - 1 && t.type === Token_1.Token.EOF) { - this.eofToken = t; - } - this.i++; - return t; - } - get line() { - if (this.i < this.tokens.length) { - return this.tokens[this.i].line; - } else if (this.eofToken != null) { - return this.eofToken.line; - } else if (this.tokens.length > 0) { - let lastToken = this.tokens[this.tokens.length - 1]; - let line = lastToken.line; - let tokenText = lastToken.text; - if (tokenText != null) { - for (let i = 0; i < tokenText.length; i++) { - if (tokenText.charAt(i) === "\n") { - line++; - } - } - } - return line; - } - return 1; - } - get inputStream() { - if (this.i < this.tokens.length) { - return this.tokens[this.i].inputStream; - } else if (this.eofToken != null) { - return this.eofToken.inputStream; - } else if (this.tokens.length > 0) { - return this.tokens[this.tokens.length - 1].inputStream; - } - return void 0; - } - get sourceName() { - if (this._sourceName) { - return this._sourceName; - } - let inputStream = this.inputStream; - if (inputStream != null) { - return inputStream.sourceName; - } - return "List"; - } - set tokenFactory(factory) { - this._factory = factory; - } - get tokenFactory() { - return this._factory; - } - }; - __decorate([ - Decorators_1.Override - ], ListTokenSource.prototype, "charPositionInLine", null); - __decorate([ - Decorators_1.Override - ], ListTokenSource.prototype, "nextToken", null); - __decorate([ - Decorators_1.Override - ], ListTokenSource.prototype, "line", null); - __decorate([ - Decorators_1.Override - ], ListTokenSource.prototype, "inputStream", null); - __decorate([ - Decorators_1.Override - ], ListTokenSource.prototype, "sourceName", null); - __decorate([ - Decorators_1.Override, - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], ListTokenSource.prototype, "tokenFactory", null); - ListTokenSource = __decorate([ - __param(0, Decorators_1.NotNull) - ], ListTokenSource); - exports.ListTokenSource = ListTokenSource; -}); - -// node_modules/antlr4ts/misc/MultiMap.js -var require_MultiMap = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.MultiMap = void 0; - var MultiMap = class extends Map { - constructor() { - super(); - } - map(key, value) { - let elementsForKey = super.get(key); - if (!elementsForKey) { - elementsForKey = []; - super.set(key, elementsForKey); - } - elementsForKey.push(value); - } - getPairs() { - let pairs = []; - this.forEach((values, key) => { - values.forEach((v) => { - pairs.push([key, v]); - }); - }); - return pairs; - } - }; - exports.MultiMap = MultiMap; -}); - -// node_modules/antlr4ts/misc/ParseCancellationException.js -var require_ParseCancellationException = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ParseCancellationException = void 0; - var ParseCancellationException = class extends Error { - constructor(cause) { - super(cause.message); - this.cause = cause; - this.stack = cause.stack; - } - getCause() { - return this.cause; - } - }; - exports.ParseCancellationException = ParseCancellationException; -}); - -// node_modules/antlr4ts/InterpreterRuleContext.js -var require_InterpreterRuleContext = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.InterpreterRuleContext = void 0; - var Decorators_1 = require_Decorators(); - var ParserRuleContext_1 = require_ParserRuleContext(); - var InterpreterRuleContext = class extends ParserRuleContext_1.ParserRuleContext { - constructor(ruleIndex, parent, invokingStateNumber) { - if (invokingStateNumber !== void 0) { - super(parent, invokingStateNumber); - } else { - super(); - } - this._ruleIndex = ruleIndex; - } - get ruleIndex() { - return this._ruleIndex; - } - }; - __decorate([ - Decorators_1.Override - ], InterpreterRuleContext.prototype, "ruleIndex", null); - exports.InterpreterRuleContext = InterpreterRuleContext; -}); - -// node_modules/antlr4ts/ParserInterpreter.js -var require_ParserInterpreter = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ParserInterpreter = void 0; - var ATNState_1 = require_ATNState(); - var ATNStateType_1 = require_ATNStateType(); - var BitSet_1 = require_BitSet(); - var FailedPredicateException_1 = require_FailedPredicateException(); - var InputMismatchException_1 = require_InputMismatchException(); - var InterpreterRuleContext_1 = require_InterpreterRuleContext(); - var LoopEndState_1 = require_LoopEndState(); - var Decorators_1 = require_Decorators(); - var Decorators_2 = require_Decorators(); - var Parser_1 = require_Parser(); - var ParserATNSimulator_1 = require_ParserATNSimulator(); - var RecognitionException_1 = require_RecognitionException(); - var StarLoopEntryState_1 = require_StarLoopEntryState(); - var Token_1 = require_Token(); - var ParserInterpreter = class ParserInterpreter2 extends Parser_1.Parser { - constructor(grammarFileName, vocabulary, ruleNames, atn, input) { - super(grammarFileName instanceof ParserInterpreter2 ? grammarFileName.inputStream : input); - this._parentContextStack = []; - this.overrideDecision = -1; - this.overrideDecisionInputIndex = -1; - this.overrideDecisionAlt = -1; - this.overrideDecisionReached = false; - this._overrideDecisionRoot = void 0; - if (grammarFileName instanceof ParserInterpreter2) { - let old = grammarFileName; - this._grammarFileName = old._grammarFileName; - this._atn = old._atn; - this.pushRecursionContextStates = old.pushRecursionContextStates; - this._ruleNames = old._ruleNames; - this._vocabulary = old._vocabulary; - this.interpreter = new ParserATNSimulator_1.ParserATNSimulator(this._atn, this); - } else { - vocabulary = vocabulary; - ruleNames = ruleNames; - atn = atn; - this._grammarFileName = grammarFileName; - this._atn = atn; - this._ruleNames = ruleNames.slice(0); - this._vocabulary = vocabulary; - this.pushRecursionContextStates = new BitSet_1.BitSet(atn.states.length); - for (let state of atn.states) { - if (!(state instanceof StarLoopEntryState_1.StarLoopEntryState)) { - continue; - } - if (state.precedenceRuleDecision) { - this.pushRecursionContextStates.set(state.stateNumber); - } - } - this.interpreter = new ParserATNSimulator_1.ParserATNSimulator(atn, this); - } - } - reset(resetInput) { - if (resetInput === void 0) { - super.reset(); - } else { - super.reset(resetInput); - } - this.overrideDecisionReached = false; - this._overrideDecisionRoot = void 0; - } - get atn() { - return this._atn; - } - get vocabulary() { - return this._vocabulary; - } - get ruleNames() { - return this._ruleNames; - } - get grammarFileName() { - return this._grammarFileName; - } - parse(startRuleIndex) { - let startRuleStartState = this._atn.ruleToStartState[startRuleIndex]; - this._rootContext = this.createInterpreterRuleContext(void 0, ATNState_1.ATNState.INVALID_STATE_NUMBER, startRuleIndex); - if (startRuleStartState.isPrecedenceRule) { - this.enterRecursionRule(this._rootContext, startRuleStartState.stateNumber, startRuleIndex, 0); - } else { - this.enterRule(this._rootContext, startRuleStartState.stateNumber, startRuleIndex); - } - while (true) { - let p = this.atnState; - switch (p.stateType) { - case ATNStateType_1.ATNStateType.RULE_STOP: - if (this._ctx.isEmpty) { - if (startRuleStartState.isPrecedenceRule) { - let result = this._ctx; - let parentContext = this._parentContextStack.pop(); - this.unrollRecursionContexts(parentContext[0]); - return result; - } else { - this.exitRule(); - return this._rootContext; - } - } - this.visitRuleStopState(p); - break; - default: - try { - this.visitState(p); - } catch (e) { - if (e instanceof RecognitionException_1.RecognitionException) { - this.state = this._atn.ruleToStopState[p.ruleIndex].stateNumber; - this.context.exception = e; - this.errorHandler.reportError(this, e); - this.recover(e); - } else { - throw e; - } - } - break; - } - } - } - enterRecursionRule(localctx, state, ruleIndex, precedence) { - this._parentContextStack.push([this._ctx, localctx.invokingState]); - super.enterRecursionRule(localctx, state, ruleIndex, precedence); - } - get atnState() { - return this._atn.states[this.state]; - } - visitState(p) { - let predictedAlt = 1; - if (p.numberOfTransitions > 1) { - predictedAlt = this.visitDecisionState(p); - } - let transition = p.transition(predictedAlt - 1); - switch (transition.serializationType) { - case 1: - if (this.pushRecursionContextStates.get(p.stateNumber) && !(transition.target instanceof LoopEndState_1.LoopEndState)) { - let parentContext = this._parentContextStack[this._parentContextStack.length - 1]; - let localctx = this.createInterpreterRuleContext(parentContext[0], parentContext[1], this._ctx.ruleIndex); - this.pushNewRecursionContext(localctx, this._atn.ruleToStartState[p.ruleIndex].stateNumber, this._ctx.ruleIndex); - } - break; - case 5: - this.match(transition._label); - break; - case 2: - case 7: - case 8: - if (!transition.matches(this._input.LA(1), Token_1.Token.MIN_USER_TOKEN_TYPE, 65535)) { - this.recoverInline(); - } - this.matchWildcard(); - break; - case 9: - this.matchWildcard(); - break; - case 3: - let ruleStartState = transition.target; - let ruleIndex = ruleStartState.ruleIndex; - let newctx = this.createInterpreterRuleContext(this._ctx, p.stateNumber, ruleIndex); - if (ruleStartState.isPrecedenceRule) { - this.enterRecursionRule(newctx, ruleStartState.stateNumber, ruleIndex, transition.precedence); - } else { - this.enterRule(newctx, transition.target.stateNumber, ruleIndex); - } - break; - case 4: - let predicateTransition = transition; - if (!this.sempred(this._ctx, predicateTransition.ruleIndex, predicateTransition.predIndex)) { - throw new FailedPredicateException_1.FailedPredicateException(this); - } - break; - case 6: - let actionTransition = transition; - this.action(this._ctx, actionTransition.ruleIndex, actionTransition.actionIndex); - break; - case 10: - if (!this.precpred(this._ctx, transition.precedence)) { - let precedence = transition.precedence; - throw new FailedPredicateException_1.FailedPredicateException(this, `precpred(_ctx, ${precedence})`); - } - break; - default: - throw new Error("UnsupportedOperationException: Unrecognized ATN transition type."); - } - this.state = transition.target.stateNumber; - } - visitDecisionState(p) { - let predictedAlt; - this.errorHandler.sync(this); - let decision = p.decision; - if (decision === this.overrideDecision && this._input.index === this.overrideDecisionInputIndex && !this.overrideDecisionReached) { - predictedAlt = this.overrideDecisionAlt; - this.overrideDecisionReached = true; - } else { - predictedAlt = this.interpreter.adaptivePredict(this._input, decision, this._ctx); - } - return predictedAlt; - } - createInterpreterRuleContext(parent, invokingStateNumber, ruleIndex) { - return new InterpreterRuleContext_1.InterpreterRuleContext(ruleIndex, parent, invokingStateNumber); - } - visitRuleStopState(p) { - let ruleStartState = this._atn.ruleToStartState[p.ruleIndex]; - if (ruleStartState.isPrecedenceRule) { - let parentContext = this._parentContextStack.pop(); - this.unrollRecursionContexts(parentContext[0]); - this.state = parentContext[1]; - } else { - this.exitRule(); - } - let ruleTransition = this._atn.states[this.state].transition(0); - this.state = ruleTransition.followState.stateNumber; - } - addDecisionOverride(decision, tokenIndex, forcedAlt) { - this.overrideDecision = decision; - this.overrideDecisionInputIndex = tokenIndex; - this.overrideDecisionAlt = forcedAlt; - } - get overrideDecisionRoot() { - return this._overrideDecisionRoot; - } - recover(e) { - let i = this._input.index; - this.errorHandler.recover(this, e); - if (this._input.index === i) { - let tok = e.getOffendingToken(); - if (!tok) { - throw new Error("Expected exception to have an offending token"); - } - let source = tok.tokenSource; - let stream = source !== void 0 ? source.inputStream : void 0; - let sourcePair = {source, stream}; - if (e instanceof InputMismatchException_1.InputMismatchException) { - let expectedTokens = e.expectedTokens; - if (expectedTokens === void 0) { - throw new Error("Expected the exception to provide expected tokens"); - } - let expectedTokenType = Token_1.Token.INVALID_TYPE; - if (!expectedTokens.isNil) { - expectedTokenType = expectedTokens.minElement; - } - let errToken = this.tokenFactory.create(sourcePair, expectedTokenType, tok.text, Token_1.Token.DEFAULT_CHANNEL, -1, -1, tok.line, tok.charPositionInLine); - this._ctx.addErrorNode(this.createErrorNode(this._ctx, errToken)); - } else { - let source2 = tok.tokenSource; - let errToken = this.tokenFactory.create(sourcePair, Token_1.Token.INVALID_TYPE, tok.text, Token_1.Token.DEFAULT_CHANNEL, -1, -1, tok.line, tok.charPositionInLine); - this._ctx.addErrorNode(this.createErrorNode(this._ctx, errToken)); - } - } - } - recoverInline() { - return this._errHandler.recoverInline(this); - } - get rootContext() { - return this._rootContext; - } - }; - __decorate([ - Decorators_1.NotNull - ], ParserInterpreter.prototype, "_vocabulary", void 0); - __decorate([ - Decorators_2.Override - ], ParserInterpreter.prototype, "reset", null); - __decorate([ - Decorators_2.Override - ], ParserInterpreter.prototype, "atn", null); - __decorate([ - Decorators_2.Override - ], ParserInterpreter.prototype, "vocabulary", null); - __decorate([ - Decorators_2.Override - ], ParserInterpreter.prototype, "ruleNames", null); - __decorate([ - Decorators_2.Override - ], ParserInterpreter.prototype, "grammarFileName", null); - __decorate([ - Decorators_2.Override - ], ParserInterpreter.prototype, "enterRecursionRule", null); - ParserInterpreter = __decorate([ - __param(1, Decorators_1.NotNull) - ], ParserInterpreter); - exports.ParserInterpreter = ParserInterpreter; -}); - -// node_modules/antlr4ts/tree/pattern/ParseTreeMatch.js -var require_ParseTreeMatch = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ParseTreeMatch = void 0; - var Decorators_1 = require_Decorators(); - var ParseTreeMatch = class ParseTreeMatch { - constructor(tree, pattern, labels, mismatchedNode) { - if (!tree) { - throw new Error("tree cannot be null"); - } - if (!pattern) { - throw new Error("pattern cannot be null"); - } - if (!labels) { - throw new Error("labels cannot be null"); - } - this._tree = tree; - this._pattern = pattern; - this._labels = labels; - this._mismatchedNode = mismatchedNode; - } - get(label) { - let parseTrees = this._labels.get(label); - if (!parseTrees || parseTrees.length === 0) { - return void 0; - } - return parseTrees[parseTrees.length - 1]; - } - getAll(label) { - const nodes = this._labels.get(label); - if (!nodes) { - return []; - } - return nodes; - } - get labels() { - return this._labels; - } - get mismatchedNode() { - return this._mismatchedNode; - } - get succeeded() { - return !this._mismatchedNode; - } - get pattern() { - return this._pattern; - } - get tree() { - return this._tree; - } - toString() { - return `Match ${this.succeeded ? "succeeded" : "failed"}; found ${this.labels.size} labels`; - } - }; - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], ParseTreeMatch.prototype, "getAll", null); - __decorate([ - Decorators_1.NotNull - ], ParseTreeMatch.prototype, "labels", null); - __decorate([ - Decorators_1.NotNull - ], ParseTreeMatch.prototype, "pattern", null); - __decorate([ - Decorators_1.NotNull - ], ParseTreeMatch.prototype, "tree", null); - __decorate([ - Decorators_1.Override - ], ParseTreeMatch.prototype, "toString", null); - ParseTreeMatch = __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ParseTreeMatch); - exports.ParseTreeMatch = ParseTreeMatch; -}); - -// node_modules/antlr4ts/tree/xpath/XPathLexer.js -var require_XPathLexer = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.XPathLexer = void 0; - var ATNDeserializer_1 = require_ATNDeserializer(); - var Lexer_1 = require_Lexer(); - var LexerATNSimulator_1 = require_LexerATNSimulator(); - var VocabularyImpl_1 = require_VocabularyImpl(); - var Utils3 = require_Utils(); - var XPathLexer = class extends Lexer_1.Lexer { - constructor(input) { - super(input); - this._interp = new LexerATNSimulator_1.LexerATNSimulator(XPathLexer._ATN, this); - } - get vocabulary() { - return XPathLexer.VOCABULARY; - } - get grammarFileName() { - return "XPathLexer.g4"; - } - get ruleNames() { - return XPathLexer.ruleNames; - } - get serializedATN() { - return XPathLexer._serializedATN; - } - get channelNames() { - return XPathLexer.channelNames; - } - get modeNames() { - return XPathLexer.modeNames; - } - action(_localctx, ruleIndex, actionIndex) { - switch (ruleIndex) { - case 4: - this.ID_action(_localctx, actionIndex); - break; - } - } - ID_action(_localctx, actionIndex) { - switch (actionIndex) { - case 0: - let text = this.text; - if (text.charAt(0) === text.charAt(0).toUpperCase()) { - this.type = XPathLexer.TOKEN_REF; - } else { - this.type = XPathLexer.RULE_REF; - } - break; - } - } - static get _ATN() { - if (!XPathLexer.__ATN) { - XPathLexer.__ATN = new ATNDeserializer_1.ATNDeserializer().deserialize(Utils3.toCharArray(XPathLexer._serializedATN)); - } - return XPathLexer.__ATN; - } - }; - exports.XPathLexer = XPathLexer; - XPathLexer.TOKEN_REF = 1; - XPathLexer.RULE_REF = 2; - XPathLexer.ANYWHERE = 3; - XPathLexer.ROOT = 4; - XPathLexer.WILDCARD = 5; - XPathLexer.BANG = 6; - XPathLexer.ID = 7; - XPathLexer.STRING = 8; - XPathLexer.channelNames = [ - "DEFAULT_TOKEN_CHANNEL", - "HIDDEN" - ]; - XPathLexer.modeNames = [ - "DEFAULT_MODE" - ]; - XPathLexer.ruleNames = [ - "ANYWHERE", - "ROOT", - "WILDCARD", - "BANG", - "ID", - "NameChar", - "NameStartChar", - "STRING" - ]; - XPathLexer._LITERAL_NAMES = [ - void 0, - void 0, - void 0, - "'//'", - "'/'", - "'*'", - "'!'" - ]; - XPathLexer._SYMBOLIC_NAMES = [ - void 0, - "TOKEN_REF", - "RULE_REF", - "ANYWHERE", - "ROOT", - "WILDCARD", - "BANG", - "ID", - "STRING" - ]; - XPathLexer.VOCABULARY = new VocabularyImpl_1.VocabularyImpl(XPathLexer._LITERAL_NAMES, XPathLexer._SYMBOLIC_NAMES, []); - XPathLexer._serializedATNSegments = 2; - XPathLexer._serializedATNSegment0 = '\uC91D\uCABA\u058D\uAFBA\u4F53\u0607\uEA8B\uC241\n2\b     \x07 \x07\b \b \x07\n\f"\v\x07\x07\b\b  \x07 ,\n \f  /\v   -\n\x07\x07 \b\v \r\n\u02B6\n2;C\\aac|\x81\xA1\xAC\xAC\xAF\xAF\xB7\xB7\xBC\xBC\xC2\xD8\xDA\xF8\xFA\u02C3\u02C8\u02D3\u02E2\u02E6\u02EE\u02EE\u02F0\u02F0\u0302\u0376\u0378\u0379\u037C\u037F\u0381\u0381\u0388\u0388\u038A\u038C\u038E\u038E\u0390\u03A3\u03A5\u03F7\u03F9\u0483\u0485\u0489\u048C\u0531\u0533\u0558\u055B\u055B\u0563\u0589\u0593\u05BF\u05C1\u05C1\u05C3\u05C4\u05C6\u05C7\u05C9\u05C9\u05D2\u05EC\u05F2\u05F4\u0602\u0607\u0612\u061C\u061E\u061E\u0622\u066B\u0670\u06D5\u06D7\u06DF\u06E1\u06EA\u06EC\u06FE\u0701\u0701\u0711\u074C\u074F\u07B3\u07C2\u07F7\u07FC\u07FC\u0802\u082F\u0842\u085D\u08A2\u08B6\u08B8\u08BF\u08D6\u0965\u0968\u0971\u0973\u0985\u0987\u098E\u0991\u0992\u0995\u09AA\u09AC\u09B2\u09B4\u09B4\u09B8\u09BB\u09BE\u09C6\u09C9\u09CA\u09CD\u09D0\u09D9\u09D9\u09DE\u09DF\u09E1\u09E5\u09E8\u09F3\u0A03\u0A05\u0A07\u0A0C\u0A11\u0A12\u0A15\u0A2A\u0A2C\u0A32\u0A34\u0A35\u0A37\u0A38\u0A3A\u0A3B\u0A3E\u0A3E\u0A40\u0A44\u0A49\u0A4A\u0A4D\u0A4F\u0A53\u0A53\u0A5B\u0A5E\u0A60\u0A60\u0A68\u0A77\u0A83\u0A85\u0A87\u0A8F\u0A91\u0A93\u0A95\u0AAA\u0AAC\u0AB2\u0AB4\u0AB5\u0AB7\u0ABB\u0ABE\u0AC7\u0AC9\u0ACB\u0ACD\u0ACF\u0AD2\u0AD2\u0AE2\u0AE5\u0AE8\u0AF1\u0AFB\u0AFB\u0B03\u0B05\u0B07\u0B0E\u0B11\u0B12\u0B15\u0B2A\u0B2C\u0B32\u0B34\u0B35\u0B37\u0B3B\u0B3E\u0B46\u0B49\u0B4A\u0B4D\u0B4F\u0B58\u0B59\u0B5E\u0B5F\u0B61\u0B65\u0B68\u0B71\u0B73\u0B73\u0B84\u0B85\u0B87\u0B8C\u0B90\u0B92\u0B94\u0B97\u0B9B\u0B9C\u0B9E\u0B9E\u0BA0\u0BA1\u0BA5\u0BA6\u0BAA\u0BAC\u0BB0\u0BBB\u0BC0\u0BC4\u0BC8\u0BCA\u0BCC\u0BCF\u0BD2\u0BD2\u0BD9\u0BD9\u0BE8\u0BF1\u0C02\u0C05\u0C07\u0C0E\u0C10\u0C12\u0C14\u0C2A\u0C2C\u0C3B\u0C3F\u0C46\u0C48\u0C4A\u0C4C\u0C4F\u0C57\u0C58\u0C5A\u0C5C\u0C62\u0C65\u0C68\u0C71\u0C82\u0C85\u0C87\u0C8E\u0C90\u0C92\u0C94\u0CAA\u0CAC\u0CB5\u0CB7\u0CBB\u0CBE\u0CC6\u0CC8\u0CCA\u0CCC\u0CCF\u0CD7\u0CD8\u0CE0\u0CE0\u0CE2\u0CE5\u0CE8\u0CF1\u0CF3\u0CF4\u0D03\u0D05\u0D07\u0D0E\u0D10\u0D12\u0D14\u0D3C\u0D3F\u0D46\u0D48\u0D4A\u0D4C\u0D50\u0D56\u0D59\u0D61\u0D65\u0D68\u0D71\u0D7C\u0D81\u0D84\u0D85\u0D87\u0D98\u0D9C\u0DB3\u0DB5\u0DBD\u0DBF\u0DBF\u0DC2\u0DC8\u0DCC\u0DCC\u0DD1\u0DD6\u0DD8\u0DD8\u0DDA\u0DE1\u0DE8\u0DF1\u0DF4\u0DF5\u0E03\u0E3C\u0E42\u0E50\u0E52\u0E5B\u0E83\u0E84\u0E86\u0E86\u0E89\u0E8A\u0E8C\u0E8C\u0E8F\u0E8F\u0E96\u0E99\u0E9B\u0EA1\u0EA3\u0EA5\u0EA7\u0EA7\u0EA9\u0EA9\u0EAC\u0EAD\u0EAF\u0EBB\u0EBD\u0EBF\u0EC2\u0EC6\u0EC8\u0EC8\u0ECA\u0ECF\u0ED2\u0EDB\u0EDE\u0EE1\u0F02\u0F02\u0F1A\u0F1B\u0F22\u0F2B\u0F37\u0F37\u0F39\u0F39\u0F3B\u0F3B\u0F40\u0F49\u0F4B\u0F6E\u0F73\u0F86\u0F88\u0F99\u0F9B\u0FBE\u0FC8\u0FC8\u1002\u104B\u1052\u109F\u10A2\u10C7\u10C9\u10C9\u10CF\u10CF\u10D2\u10FC\u10FE\u124A\u124C\u124F\u1252\u1258\u125A\u125A\u125C\u125F\u1262\u128A\u128C\u128F\u1292\u12B2\u12B4\u12B7\u12BA\u12C0\u12C2\u12C2\u12C4\u12C7\u12CA\u12D8\u12DA\u1312\u1314\u1317\u131A\u135C\u135F\u1361\u1382\u1391\u13A2\u13F7\u13FA\u13FF\u1403\u166E\u1671\u1681\u1683\u169C\u16A2\u16EC\u16F0\u16FA\u1702\u170E\u1710\u1716\u1722\u1736\u1742\u1755\u1762\u176E\u1770\u1772\u1774\u1775\u1782\u17D5\u17D9\u17D9\u17DE\u17DF\u17E2\u17EB\u180D\u1810\u1812\u181B\u1822\u1879\u1882\u18AC\u18B2\u18F7\u1902\u1920\u1922\u192D\u1932\u193D\u1948\u196F\u1972\u1976\u1982\u19AD\u19B2\u19CB\u19D2\u19DB\u1A02\u1A1D\u1A22\u1A60\u1A62\u1A7E\u1A81\u1A8B\u1A92\u1A9B\u1AA9\u1AA9\u1AB2\u1ABF\u1B02\u1B4D\u1B52\u1B5B\u1B6D\u1B75\u1B82\u1BF5\u1C02\u1C39\u1C42\u1C4B\u1C4F\u1C7F\u1C82\u1C8A\u1CD2\u1CD4\u1CD6\u1CF8\u1CFA\u1CFB\u1D02\u1DF7\u1DFD\u1F17\u1F1A\u1F1F\u1F22\u1F47\u1F4A\u1F4F\u1F52\u1F59\u1F5B\u1F5B\u1F5D\u1F5D\u1F5F\u1F5F\u1F61\u1F7F\u1F82\u1FB6\u1FB8\u1FBE\u1FC0\u1FC0\u1FC4\u1FC6\u1FC8\u1FCE\u1FD2\u1FD5\u1FD8\u1FDD\u1FE2\u1FEE\u1FF4\u1FF6\u1FF8\u1FFE\u200D\u2011\u202C\u2030\u2041\u2042\u2056\u2056\u2062\u2066\u2068\u2071\u2073\u2073\u2081\u2081\u2092\u209E\u20D2\u20DE\u20E3\u20E3\u20E7\u20F2\u2104\u2104\u2109\u2109\u210C\u2115\u2117\u2117\u211B\u211F\u2126\u2126\u2128\u2128\u212A\u212A\u212C\u212F\u2131\u213B\u213E\u2141\u2147\u214B\u2150\u2150\u2162\u218A\u2C02\u2C30\u2C32\u2C60\u2C62\u2CE6\u2CED\u2CF5\u2D02\u2D27\u2D29\u2D29\u2D2F\u2D2F\u2D32\u2D69\u2D71\u2D71\u2D81\u2D98\u2DA2\u2DA8\u2DAA\u2DB0\u2DB2\u2DB8\u2DBA\u2DC0\u2DC2\u2DC8\u2DCA\u2DD0\u2DD2\u2DD8\u2DDA\u2DE0\u2DE2\u2E01\u2E31\u2E31\u3007\u3009\u3023\u3031\u3033\u3037\u303A\u303E\u3043\u3098\u309B\u309C\u309F\u30A1\u30A3\u30FC\u30FE\u3101\u3107\u312F\u3133\u3190\u31A2\u31BC\u31F2\u3201\u3402\u4DB7\u4E02\u9FD7\uA002\uA48E\uA4D2\uA4FF\uA502\uA60E\uA612\uA62D\uA642\uA671\uA676\uA67F\uA681\uA6F3\uA719\uA721\uA724\uA78A\uA78D\uA7B0\uA7B2\uA7B9\uA7F9\uA829\uA842\uA875\uA882\uA8C7\uA8D2\uA8DB\uA8E2\uA8F9\uA8FD\uA8FD\uA8FF\uA8FF\uA902\uA92F\uA932\uA955\uA962\uA97E\uA982\uA9C2\uA9D1\uA9DB\uA9E2\uAA00\uAA02\uAA38\uAA42\uAA4F\uAA52\uAA5B\uAA62\uAA78\uAA7C\uAAC4\uAADD\uAADF\uAAE2\uAAF1\uAAF4\uAAF8\uAB03\uAB08\uAB0B\uAB10\uAB13\uAB18\uAB22\uAB28\uAB2A\uAB30\uAB32\uAB5C\uAB5E\uAB67\uAB72\uABEC\uABEE\uABEF\uABF2\uABFB\uAC02\uD7A5\uD7B2\uD7C8\uD7CD\uD7FD\uF902\uFA6F\uFA72\uFADB\uFB02\uFB08\uFB15\uFB19\uFB1F\uFB2A\uFB2C\uFB38\uFB3A\uFB3E\uFB40\uFB40\uFB42\uFB43\uFB45\uFB46\uFB48\uFBB3\uFBD5\uFD3F\uFD52\uFD91\uFD94\uFDC9\uFDF2\uFDFD\uFE02\uFE11\uFE22\uFE31\uFE35\uFE36\uFE4F\uFE51\uFE72\uFE76\uFE78\uFEFE\uFF01\uFF01\uFF12\uFF1B\uFF23\uFF3C\uFF41\uFF41\uFF43\uFF5C\uFF68\uFFC0\uFFC4\uFFC9\uFFCC\uFFD1\uFFD4\uFFD9\uFFDC\uFFDE\uFFFB\uFFFD\r(*<>?AOR_\x82\xFC\u0142\u0176\u01FF\u01FF\u0282\u029E\u02A2\u02D2\u02E2\u02E2\u0302\u0321\u0332\u034C\u0352\u037C\u0382\u039F\u03A2\u03C5\u03CA\u03D1\u03D3\u03D7\u0402\u049F\u04A2\u04AB\u04B2\u04D5\u04DA\u04FD\u0502\u0529\u0532\u0565\u0602\u0738\u0742\u0757\u0762\u0769\u0802\u0807\u080A\u080A\u080C\u0837\u0839\u083A\u083E\u083E\u0841\u0857\u0862\u0878\u0882\u08A0\u08E2\u08F4\u08F6\u08F7\u0902\u0917\u0922\u093B\u0982\u09B9\u09C0\u09C1\u0A02\u0A05\u0A07\u0A08\u0A0E\u0A15\u0A17\u0A19\u0A1B\u0A35\u0A3A\u0A3C\u0A41\u0A41\u0A62\u0A7E\u0A82\u0A9E\u0AC2\u0AC9\u0ACB\u0AE8\u0B02\u0B37\u0B42\u0B57\u0B62\u0B74\u0B82\u0B93\u0C02\u0C4A\u0C82\u0CB4\u0CC2\u0CF4\u1002\u1048\u1068\u1071\u1081\u10BC\u10BF\u10BF\u10D2\u10EA\u10F2\u10FB\u1102\u1136\u1138\u1141\u1152\u1175\u1178\u1178\u1182\u11C6\u11CC\u11CE\u11D2\u11DC\u11DE\u11DE\u1202\u1213\u1215\u1239\u1240\u1240\u1282\u1288\u128A\u128A\u128C\u128F\u1291\u129F\u12A1\u12AA\u12B2\u12EC\u12F2\u12FB\u1302\u1305\u1307\u130E\u1311\u1312\u1315\u132A\u132C\u1332\u1334\u1335\u1337\u133B\u133E\u1346\u1349\u134A\u134D\u134F\u1352\u1352\u1359\u1359\u135F\u1365\u1368\u136E\u1372\u1376\u1402\u144C\u1452\u145B\u1482\u14C7\u14C9\u14C9\u14D2\u14DB\u1582\u15B7\u15BA\u15C2\u15DA\u15DF\u1602\u1642\u1646\u1646\u1652\u165B\u1682\u16B9\u16C2\u16CB\u1702\u171B\u171F\u172D\u1732\u173B\u18A2\u18EB\u1901\u1901\u1AC2\u1AFA\u1C02\u1C0A\u1C0C\u1C38\u1C3A\u1C42\u1C52\u1C5B\u1C74\u1C91\u1C94\u1CA9\u1CAB\u1CB8\u2002\u239B\u2402\u2470\u2482\u2545\u3002\u3430\u4402\u4648\u6802\u6A3A\u6A42\u6A60\u6A62\u6A6B\u6AD2\u6AEF\u6AF2\u6AF6\u6B02\u6B38\u6B42\u6B45\u6B52\u6B5B\u6B65\u6B79\u6B7F\u6B91\u6F02\u6F46\u6F52\u6F80\u6F91\u6FA1\u6FE2\u6FE2\u7002\u87EE\u8802\u8AF4\uB002\uB003\uBC02\uBC6C\uBC72\uBC7E\uBC82\uBC8A\uBC92\uBC9B\uBC9F\uBCA0\uBCA2\uBCA5\uD167\uD16B\uD16F\uD184\uD187\uD18D\uD1AC\uD1AF\uD244\uD246\uD402\uD456\uD458\uD49E\uD4A0\uD4A1\uD4A4\uD4A4\uD4A7\uD4A8\uD4AB\uD4AE\uD4B0\uD4BB\uD4BD\uD4BD\uD4BF\uD4C5\uD4C7\uD507\uD509\uD50C\uD50F\uD516\uD518\uD51E\uD520\uD53B\uD53D\uD540\uD542\uD546\uD548\uD548\uD54C\uD552\uD554\uD6A7\uD6AA\uD6C2\uD6C4\uD6DC\uD6DE\uD6FC\uD6FE\uD716\uD718\uD736\uD738\uD750\uD752\uD770\uD772\uD78A\uD78C\uD7AA\uD7AC\uD7C4\uD7C6\uD7CD\uD7D0\uD801\uDA02\uDA38\uDA3D\uDA6E\uDA77\uDA77\uDA86\uDA86\uDA9D\uDAA1\uDAA3\uDAB1\uE002\uE008\uE00A\uE01A\uE01D\uE023\uE025\uE026\uE028\uE02C\uE802\uE8C6\uE8D2\uE8D8\uE902\uE94C\uE952\uE95B\uEE02\uEE05\uEE07\uEE21\uEE23\uEE24\uEE26\uEE26\uEE29\uEE29\uEE2B\uEE34\uEE36\uEE39\uEE3B\uEE3B\uEE3D\uEE3D\uEE44\uEE44\uEE49\uEE49\uEE4B\uEE4B\uEE4D\uEE4D\uEE4F\uEE51\uEE53\uEE54\uEE56\uEE56\uEE59\uEE59\uEE5B\uEE5B\uEE5D\uEE5D\uEE5F\uEE5F\uEE61\uEE61\uEE63\uEE64\uEE66\uEE66\uEE69\uEE6C\uEE6E\uEE74\uEE76\uEE79\uEE7B\uEE7E\uEE80\uEE80\uEE82\uEE8B\uEE8D\uEE9D\uEEA3\uEEA5\uEEA7\uEEAB\uEEAD\uEEBD\uA6D8\uA702\uB736\uB742\uB81F\uB822\uCEA3\uF802\uFA1F"\x81\u0102\u01F1\u0240C\\c|\xAC\xAC\xB7\xB7\xBC\xBC\xC2\xD8\xDA\xF8\xFA\u02C3\u02C8\u02D3\u02E2\u02E6\u02EE\u02EE\u02F0\u02F0\u0372\u0376\u0378\u0379\u037C\u037F\u0381\u0381\u0388\u0388\u038A\u038C\u038E\u038E\u0390\u03A3\u03A5\u03F7\u03F9\u0483\u048C\u0531\u0533\u0558\u055B\u055B\u0563\u0589\u05D2\u05EC\u05F2\u05F4\u0622\u064C\u0670\u0671\u0673\u06D5\u06D7\u06D7\u06E7\u06E8\u06F0\u06F1\u06FC\u06FE\u0701\u0701\u0712\u0712\u0714\u0731\u074F\u07A7\u07B3\u07B3\u07CC\u07EC\u07F6\u07F7\u07FC\u07FC\u0802\u0817\u081C\u081C\u0826\u0826\u082A\u082A\u0842\u085A\u08A2\u08B6\u08B8\u08BF\u0906\u093B\u093F\u093F\u0952\u0952\u095A\u0963\u0973\u0982\u0987\u098E\u0991\u0992\u0995\u09AA\u09AC\u09B2\u09B4\u09B4\u09B8\u09BB\u09BF\u09BF\u09D0\u09D0\u09DE\u09DF\u09E1\u09E3\u09F2\u09F3\u0A07\u0A0C\u0A11\u0A12\u0A15\u0A2A\u0A2C\u0A32\u0A34\u0A35\u0A37\u0A38\u0A3A\u0A3B\u0A5B\u0A5E\u0A60\u0A60\u0A74\u0A76\u0A87\u0A8F\u0A91\u0A93\u0A95\u0AAA\u0AAC\u0AB2\u0AB4\u0AB5\u0AB7\u0ABB\u0ABF\u0ABF\u0AD2\u0AD2\u0AE2\u0AE3\u0AFB\u0AFB\u0B07\u0B0E\u0B11\u0B12\u0B15\u0B2A\u0B2C\u0B32\u0B34\u0B35\u0B37\u0B3B\u0B3F\u0B3F\u0B5E\u0B5F\u0B61\u0B63\u0B73\u0B73\u0B85\u0B85\u0B87\u0B8C\u0B90\u0B92\u0B94\u0B97\u0B9B\u0B9C\u0B9E\u0B9E\u0BA0\u0BA1\u0BA5\u0BA6\u0BAA\u0BAC\u0BB0\u0BBB\u0BD2\u0BD2\u0C07\u0C0E\u0C10\u0C12\u0C14\u0C2A\u0C2C\u0C3B\u0C3F\u0C3F\u0C5A\u0C5C\u0C62\u0C63\u0C82\u0C82\u0C87\u0C8E\u0C90\u0C92\u0C94\u0CAA\u0CAC\u0CB5\u0CB7\u0CBB\u0CBF\u0CBF\u0CE0\u0CE0\u0CE2\u0CE3\u0CF3\u0CF4\u0D07\u0D0E\u0D10\u0D12\u0D14\u0D3C\u0D3F\u0D3F\u0D50\u0D50\u0D56\u0D58\u0D61\u0D63\u0D7C\u0D81\u0D87\u0D98\u0D9C\u0DB3\u0DB5\u0DBD\u0DBF\u0DBF\u0DC2\u0DC8\u0E03\u0E32\u0E34\u0E35\u0E42\u0E48\u0E83\u0E84\u0E86\u0E86\u0E89\u0E8A\u0E8C\u0E8C\u0E8F\u0E8F\u0E96\u0E99\u0E9B\u0EA1\u0EA3\u0EA5\u0EA7\u0EA7\u0EA9\u0EA9\u0EAC\u0EAD\u0EAF\u0EB2\u0EB4\u0EB5\u0EBF\u0EBF\u0EC2\u0EC6\u0EC8\u0EC8\u0EDE\u0EE1\u0F02\u0F02\u0F42\u0F49\u0F4B\u0F6E\u0F8A\u0F8E\u1002\u102C\u1041\u1041\u1052\u1057\u105C\u105F\u1063\u1063\u1067\u1068\u1070\u1072\u1077\u1083\u1090\u1090\u10A2\u10C7\u10C9\u10C9\u10CF\u10CF\u10D2\u10FC\u10FE\u124A\u124C\u124F\u1252\u1258\u125A\u125A\u125C\u125F\u1262\u128A\u128C\u128F\u1292\u12B2\u12B4\u12B7\u12BA\u12C0\u12C2\u12C2\u12C4\u12C7\u12CA\u12D8\u12DA\u1312\u1314\u1317\u131A\u135C\u1382\u1391\u13A2\u13F7\u13FA\u13FF\u1403\u166E\u1671\u1681\u1683\u169C\u16A2\u16EC\u16F0\u16FA\u1702\u170E\u1710\u1713\u1722\u1733\u1742\u1753\u1762\u176E\u1770\u1772\u1782\u17B5\u17D9\u17D9\u17DE\u17DE\u1822\u1879\u1882\u1886\u1889\u18AA\u18AC\u18AC\u18B2\u18F7\u1902\u1920\u1952\u196F\u1972\u1976\u1982\u19AD\u19B2\u19CB\u1A02\u1A18\u1A22\u1A56\u1AA9\u1AA9\u1B07\u1B35\u1B47\u1B4D\u1B85\u1BA2\u1BB0\u1BB1\u1BBC\u1BE7\u1C02\u1C25\u1C4F\u1C51\u1C5C\u1C7F\u1C82\u1C8A\u1CEB\u1CEE\u1CF0\u1CF3\u1CF7\u1CF8\u1D02\u1DC1\u1E02\u1F17\u1F1A\u1F1F\u1F22\u1F47\u1F4A\u1F4F\u1F52\u1F59\u1F5B\u1F5B\u1F5D\u1F5D\u1F5F\u1F5F\u1F61\u1F7F\u1F82\u1FB6\u1FB8\u1FBE\u1FC0\u1FC0\u1FC4\u1FC6\u1FC8\u1FCE\u1FD2\u1FD5\u1FD8\u1FDD\u1FE2\u1FEE\u1FF4\u1FF6\u1FF8\u1FFE\u2073\u2073\u2081\u2081\u2092\u209E\u2104\u2104\u2109\u2109\u210C\u2115\u2117\u2117\u211B\u211F\u2126\u2126\u2128\u2128\u212A\u212A\u212C\u212F\u2131\u213B\u213E\u2141\u2147\u214B\u2150\u2150\u2162\u218A\u2C02\u2C30\u2C32\u2C60\u2C62\u2CE6\u2CED\u2CF0\u2CF4\u2CF5\u2D02\u2D27\u2D29\u2D29\u2D2F\u2D2F\u2D32\u2D69\u2D71\u2D71\u2D82\u2D98\u2DA2\u2DA8\u2DAA\u2DB0\u2DB2\u2DB8\u2DBA\u2DC0\u2DC2\u2DC8\u2DCA\u2DD0\u2DD2\u2DD8\u2DDA\u2DE0\u2E31\u2E31\u3007\u3009\u3023\u302B\u3033\u3037\u303A\u303E\u3043\u3098\u309F\u30A1\u30A3\u30FC\u30FE\u3101\u3107\u312F\u3133\u3190\u31A2\u31BC\u31F2\u3201\u3402\u4DB7\u4E02\u9FD7\uA002\uA48E\uA4D2\uA4FF\uA502\uA60E\uA612\uA621\uA62C\uA62D\uA642\uA670\uA681\uA69F\uA6A2\uA6F1\uA719\uA721\uA724\uA78A\uA78D\uA7B0\uA7B2\uA7B9\uA7F9\uA803\uA805\uA807\uA809\uA80C\uA80E\uA824\uA842\uA875\uA884\uA8B5\uA8F4\uA8F9\uA8FD\uA8FD\uA8FF\uA8FF\uA90C\uA927\uA932\uA948\uA962\uA97E\uA986\uA9B4\uA9D1\uA9D1\uA9E2\uA9E6\uA9E8\uA9F1\uA9FC\uAA00\uAA02\uAA2A\uAA42\uAA44\uAA46\uAA4D\uAA62\uAA78\uAA7C\uAA7C\uAA80\uAAB1\uAAB3\uAAB3\uAAB7\uAAB8\uAABB\uAABF\uAAC2\uAAC2\uAAC4\uAAC4\uAADD\uAADF\uAAE2\uAAEC\uAAF4\uAAF6\uAB03\uAB08\uAB0B\uAB10\uAB13\uAB18\uAB22\uAB28\uAB2A\uAB30\uAB32\uAB5C\uAB5E\uAB67\uAB72\uABE4\uAC02\uD7A5\uD7B2\uD7C8\uD7CD\uD7FD\uF902\uFA6F\uFA72\uFADB\uFB02\uFB08\uFB15\uFB19\uFB1F\uFB1F\uFB21\uFB2A\uFB2C\uFB38\uFB3A\uFB3E\uFB40\uFB40\uFB42\uFB43\uFB45\uFB46\uFB48\uFBB3\uFBD5\uFD3F\uFD52\uFD91\uFD94\uFDC9\uFDF2\uFDFD\uFE72\uFE76\uFE78\uFEFE\uFF23\uFF3C\uFF43\uFF5C\uFF68\uFFC0\uFFC4\uFFC9\uFFCC\uFFD1\uFFD4\uFFD9\uFFDC\uFFDE\r(*<>?AOR_\x82\xFC\u0142\u0176\u0282\u029E\u02A2\u02D2\u0302\u0321\u0332\u034C\u0352\u0377\u0382\u039F\u03A2\u03C5\u03CA\u03D1\u03D3\u03D7\u0402\u049F\u04B2\u04D5\u04DA\u04FD\u0502\u0529\u0532\u0565\u0602\u0738\u0742\u0757\u0762\u0769\u0802\u0807\u080A\u080A\u080C\u0837\u0839\u083A\u083E\u083E\u0841\u0857\u0862\u0878\u0882\u08A0\u08E2\u08F4\u08F6\u08F7\u0902\u0917\u0922\u093B\u0982\u09B9\u09C0\u09C1\u0A02\u0A02\u0A12\u0A15\u0A17\u0A19\u0A1B\u0A35\u0A62\u0A7E\u0A82\u0A9E\u0AC2\u0AC9\u0ACB\u0AE6\u0B02\u0B37\u0B42\u0B57\u0B62\u0B74\u0B82\u0B93\u0C02\u0C4A\u0C82\u0CB4\u0CC2\u0CF4\u1005\u1039\u1085\u10B1\u10D2\u10EA\u1105\u1128\u1152\u1174\u1178\u1178\u1185\u11B4\u11C3\u11C6\u11DC\u11DC\u11DE\u11DE\u1202\u1213\u1215\u122D\u1282\u1288\u128A\u128A\u128C\u128F\u1291\u129F\u12A1\u12AA\u12B2\u12E0\u1307\u130E\u1311\u1312\u1315\u132A\u132C\u1332\u1334\u1335\u1337\u133B\u133F\u133F\u1352\u1352\u135F\u1363\u1402\u1436\u1449\u144C\u1482\u14B1\u14C6\u14C7\u14C9\u14C9\u1582\u15B0\u15DA\u15DD\u1602\u1631\u1646\u1646\u1682\u16AC\u1702\u171B\u18A2\u18E1\u1901\u1901\u1AC2\u1AFA\u1C02\u1C0A\u1C0C\u1C30\u1C42\u1C42\u1C74\u1C91\u2002\u239B\u2402\u2470\u2482\u2545\u3002\u3430\u4402\u4648\u6802\u6A3A\u6A42\u6A60\u6AD2\u6AEF\u6B02\u6B31\u6B42\u6B45\u6B65\u6B79\u6B7F\u6B91\u6F02\u6F46\u6F52\u6F52\u6F95\u6FA1\u6FE2\u6FE2\u7002\u87EE\u8802\u8AF4\uB002\uB003\uBC02\uBC6C\uBC72\uBC7E\uBC82\uBC8A\uBC92\uBC9B\uD402\uD456\uD458\uD49E\uD4A0\uD4A1\uD4A4\uD4A4\uD4A7\uD4A8\uD4AB\uD4AE\uD4B0\uD4BB\uD4BD\uD4BD\uD4BF\uD4C5\uD4C7\uD507\uD509\uD50C\uD50F\uD516\uD518\uD51E\uD520\uD53B\uD53D\uD540\uD542\uD546\uD548\uD548'; - XPathLexer._serializedATNSegment1 = `\uD54C\uD552\uD554\uD6A7\uD6AA\uD6C2\uD6C4\uD6DC\uD6DE\uD6FC\uD6FE\uD716\uD718\uD736\uD738\uD750\uD752\uD770\uD772\uD78A\uD78C\uD7AA\uD7AC\uD7C4\uD7C6\uD7CD\uE802\uE8C6\uE902\uE945\uEE02\uEE05\uEE07\uEE21\uEE23\uEE24\uEE26\uEE26\uEE29\uEE29\uEE2B\uEE34\uEE36\uEE39\uEE3B\uEE3B\uEE3D\uEE3D\uEE44\uEE44\uEE49\uEE49\uEE4B\uEE4B\uEE4D\uEE4D\uEE4F\uEE51\uEE53\uEE54\uEE56\uEE56\uEE59\uEE59\uEE5B\uEE5B\uEE5D\uEE5D\uEE5F\uEE5F\uEE61\uEE61\uEE63\uEE64\uEE66\uEE66\uEE69\uEE6C\uEE6E\uEE74\uEE76\uEE79\uEE7B\uEE7E\uEE80\uEE80\uEE82\uEE8B\uEE8D\uEE9D\uEEA3\uEEA5\uEEA7\uEEAB\uEEAD\uEEBD\uA6D8\uA702\uB736\uB742\uB81F\uB822\uCEA3\uF802\uFA1F1\x07 \v\x07 \v\r%')\x071\x071\x071\x07,\b\x07# - \b\r\x07"  !!#" #$\b$\f%& &'( ()-\x07)*,\v+*,/-.-+.0/-01\x07)1 -`; - XPathLexer._serializedATN = Utils3.join([ - XPathLexer._serializedATNSegment0, - XPathLexer._serializedATNSegment1 - ], ""); -}); - -// node_modules/antlr4ts/tree/xpath/XPathLexerErrorListener.js -var require_XPathLexerErrorListener = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.XPathLexerErrorListener = void 0; - var Decorators_1 = require_Decorators(); - var XPathLexerErrorListener = class { - syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e) { - } - }; - __decorate([ - Decorators_1.Override - ], XPathLexerErrorListener.prototype, "syntaxError", null); - exports.XPathLexerErrorListener = XPathLexerErrorListener; -}); - -// node_modules/antlr4ts/tree/xpath/XPathElement.js -var require_XPathElement = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.XPathElement = void 0; - var Decorators_1 = require_Decorators(); - var XPathElement = class { - constructor(nodeName) { - this.nodeName = nodeName; - this.invert = false; - } - toString() { - let inv = this.invert ? "!" : ""; - let className = Object.constructor.name; - return className + "[" + inv + this.nodeName + "]"; - } - }; - __decorate([ - Decorators_1.Override - ], XPathElement.prototype, "toString", null); - exports.XPathElement = XPathElement; -}); - -// node_modules/antlr4ts/tree/xpath/XPathRuleAnywhereElement.js -var require_XPathRuleAnywhereElement = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.XPathRuleAnywhereElement = void 0; - var Decorators_1 = require_Decorators(); - var Trees_1 = require_Trees(); - var XPathElement_1 = require_XPathElement(); - var XPathRuleAnywhereElement = class extends XPathElement_1.XPathElement { - constructor(ruleName, ruleIndex) { - super(ruleName); - this.ruleIndex = ruleIndex; - } - evaluate(t) { - return Trees_1.Trees.findAllRuleNodes(t, this.ruleIndex); - } - }; - __decorate([ - Decorators_1.Override - ], XPathRuleAnywhereElement.prototype, "evaluate", null); - exports.XPathRuleAnywhereElement = XPathRuleAnywhereElement; -}); - -// node_modules/antlr4ts/tree/xpath/XPathRuleElement.js -var require_XPathRuleElement = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.XPathRuleElement = void 0; - var ParserRuleContext_1 = require_ParserRuleContext(); - var Decorators_1 = require_Decorators(); - var Trees_1 = require_Trees(); - var XPathElement_1 = require_XPathElement(); - var XPathRuleElement = class extends XPathElement_1.XPathElement { - constructor(ruleName, ruleIndex) { - super(ruleName); - this.ruleIndex = ruleIndex; - } - evaluate(t) { - let nodes = []; - for (let c of Trees_1.Trees.getChildren(t)) { - if (c instanceof ParserRuleContext_1.ParserRuleContext) { - if (c.ruleIndex === this.ruleIndex && !this.invert || c.ruleIndex !== this.ruleIndex && this.invert) { - nodes.push(c); - } - } - } - return nodes; - } - }; - __decorate([ - Decorators_1.Override - ], XPathRuleElement.prototype, "evaluate", null); - exports.XPathRuleElement = XPathRuleElement; -}); - -// node_modules/antlr4ts/tree/xpath/XPathTokenAnywhereElement.js -var require_XPathTokenAnywhereElement = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.XPathTokenAnywhereElement = void 0; - var Decorators_1 = require_Decorators(); - var Trees_1 = require_Trees(); - var XPathElement_1 = require_XPathElement(); - var XPathTokenAnywhereElement = class extends XPathElement_1.XPathElement { - constructor(tokenName, tokenType) { - super(tokenName); - this.tokenType = tokenType; - } - evaluate(t) { - return Trees_1.Trees.findAllTokenNodes(t, this.tokenType); - } - }; - __decorate([ - Decorators_1.Override - ], XPathTokenAnywhereElement.prototype, "evaluate", null); - exports.XPathTokenAnywhereElement = XPathTokenAnywhereElement; -}); - -// node_modules/antlr4ts/tree/xpath/XPathTokenElement.js -var require_XPathTokenElement = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.XPathTokenElement = void 0; - var Decorators_1 = require_Decorators(); - var TerminalNode_1 = require_TerminalNode(); - var Trees_1 = require_Trees(); - var XPathElement_1 = require_XPathElement(); - var XPathTokenElement = class extends XPathElement_1.XPathElement { - constructor(tokenName, tokenType) { - super(tokenName); - this.tokenType = tokenType; - } - evaluate(t) { - let nodes = []; - for (let c of Trees_1.Trees.getChildren(t)) { - if (c instanceof TerminalNode_1.TerminalNode) { - if (c.symbol.type === this.tokenType && !this.invert || c.symbol.type !== this.tokenType && this.invert) { - nodes.push(c); - } - } - } - return nodes; - } - }; - __decorate([ - Decorators_1.Override - ], XPathTokenElement.prototype, "evaluate", null); - exports.XPathTokenElement = XPathTokenElement; -}); - -// node_modules/antlr4ts/tree/xpath/XPathWildcardAnywhereElement.js -var require_XPathWildcardAnywhereElement = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.XPathWildcardAnywhereElement = void 0; - var Decorators_1 = require_Decorators(); - var Trees_1 = require_Trees(); - var XPath_1 = require_XPath(); - var XPathElement_1 = require_XPathElement(); - var XPathWildcardAnywhereElement = class extends XPathElement_1.XPathElement { - constructor() { - super(XPath_1.XPath.WILDCARD); - } - evaluate(t) { - if (this.invert) { - return []; - } - return Trees_1.Trees.getDescendants(t); - } - }; - __decorate([ - Decorators_1.Override - ], XPathWildcardAnywhereElement.prototype, "evaluate", null); - exports.XPathWildcardAnywhereElement = XPathWildcardAnywhereElement; -}); - -// node_modules/antlr4ts/tree/xpath/XPathWildcardElement.js -var require_XPathWildcardElement = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.XPathWildcardElement = void 0; - var Decorators_1 = require_Decorators(); - var Trees_1 = require_Trees(); - var XPath_1 = require_XPath(); - var XPathElement_1 = require_XPathElement(); - var XPathWildcardElement = class extends XPathElement_1.XPathElement { - constructor() { - super(XPath_1.XPath.WILDCARD); - } - evaluate(t) { - let kids = []; - if (this.invert) { - return kids; - } - for (let c of Trees_1.Trees.getChildren(t)) { - kids.push(c); - } - return kids; - } - }; - __decorate([ - Decorators_1.Override - ], XPathWildcardElement.prototype, "evaluate", null); - exports.XPathWildcardElement = XPathWildcardElement; -}); - -// node_modules/antlr4ts/tree/xpath/XPath.js -var require_XPath = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.XPath = void 0; - var CharStreams_1 = require_CharStreams(); - var CommonTokenStream_1 = require_CommonTokenStream(); - var LexerNoViableAltException_1 = require_LexerNoViableAltException(); - var ParserRuleContext_1 = require_ParserRuleContext(); - var Token_1 = require_Token(); - var XPathLexer_1 = require_XPathLexer(); - var XPathLexerErrorListener_1 = require_XPathLexerErrorListener(); - var XPathRuleAnywhereElement_1 = require_XPathRuleAnywhereElement(); - var XPathRuleElement_1 = require_XPathRuleElement(); - var XPathTokenAnywhereElement_1 = require_XPathTokenAnywhereElement(); - var XPathTokenElement_1 = require_XPathTokenElement(); - var XPathWildcardAnywhereElement_1 = require_XPathWildcardAnywhereElement(); - var XPathWildcardElement_1 = require_XPathWildcardElement(); - var XPath = class { - constructor(parser, path) { - this.parser = parser; - this.path = path; - this.elements = this.split(path); - } - split(path) { - let lexer = new XPathLexer_1.XPathLexer(CharStreams_1.CharStreams.fromString(path)); - lexer.recover = (e) => { - throw e; - }; - lexer.removeErrorListeners(); - lexer.addErrorListener(new XPathLexerErrorListener_1.XPathLexerErrorListener()); - let tokenStream = new CommonTokenStream_1.CommonTokenStream(lexer); - try { - tokenStream.fill(); - } catch (e) { - if (e instanceof LexerNoViableAltException_1.LexerNoViableAltException) { - let pos = lexer.charPositionInLine; - let msg = "Invalid tokens or characters at index " + pos + " in path '" + path + "' -- " + e.message; - throw new RangeError(msg); - } - throw e; - } - let tokens2 = tokenStream.getTokens(); - let elements = []; - let n = tokens2.length; - let i = 0; - loop: - while (i < n) { - let el = tokens2[i]; - let next; - switch (el.type) { - case XPathLexer_1.XPathLexer.ROOT: - case XPathLexer_1.XPathLexer.ANYWHERE: - let anywhere = el.type === XPathLexer_1.XPathLexer.ANYWHERE; - i++; - next = tokens2[i]; - let invert = next.type === XPathLexer_1.XPathLexer.BANG; - if (invert) { - i++; - next = tokens2[i]; - } - let pathElement = this.getXPathElement(next, anywhere); - pathElement.invert = invert; - elements.push(pathElement); - i++; - break; - case XPathLexer_1.XPathLexer.TOKEN_REF: - case XPathLexer_1.XPathLexer.RULE_REF: - case XPathLexer_1.XPathLexer.WILDCARD: - elements.push(this.getXPathElement(el, false)); - i++; - break; - case Token_1.Token.EOF: - break loop; - default: - throw new Error("Unknowth path element " + el); - } - } - return elements; - } - getXPathElement(wordToken, anywhere) { - if (wordToken.type === Token_1.Token.EOF) { - throw new Error("Missing path element at end of path"); - } - let word = wordToken.text; - if (word == null) { - throw new Error("Expected wordToken to have text content."); - } - let ttype = this.parser.getTokenType(word); - let ruleIndex = this.parser.getRuleIndex(word); - switch (wordToken.type) { - case XPathLexer_1.XPathLexer.WILDCARD: - return anywhere ? new XPathWildcardAnywhereElement_1.XPathWildcardAnywhereElement() : new XPathWildcardElement_1.XPathWildcardElement(); - case XPathLexer_1.XPathLexer.TOKEN_REF: - case XPathLexer_1.XPathLexer.STRING: - if (ttype === Token_1.Token.INVALID_TYPE) { - throw new Error(word + " at index " + wordToken.startIndex + " isn't a valid token name"); - } - return anywhere ? new XPathTokenAnywhereElement_1.XPathTokenAnywhereElement(word, ttype) : new XPathTokenElement_1.XPathTokenElement(word, ttype); - default: - if (ruleIndex === -1) { - throw new Error(word + " at index " + wordToken.startIndex + " isn't a valid rule name"); - } - return anywhere ? new XPathRuleAnywhereElement_1.XPathRuleAnywhereElement(word, ruleIndex) : new XPathRuleElement_1.XPathRuleElement(word, ruleIndex); - } - } - static findAll(tree, xpath, parser) { - let p = new XPath(parser, xpath); - return p.evaluate(tree); - } - evaluate(t) { - let dummyRoot = new ParserRuleContext_1.ParserRuleContext(); - dummyRoot.addChild(t); - let work = new Set([dummyRoot]); - let i = 0; - while (i < this.elements.length) { - let next = new Set(); - for (let node of work) { - if (node.childCount > 0) { - let matching = this.elements[i].evaluate(node); - matching.forEach(next.add, next); - } - } - i++; - work = next; - } - return work; - } - }; - exports.XPath = XPath; - XPath.WILDCARD = "*"; - XPath.NOT = "!"; -}); - -// node_modules/antlr4ts/tree/pattern/ParseTreePattern.js -var require_ParseTreePattern = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ParseTreePattern = void 0; - var Decorators_1 = require_Decorators(); - var XPath_1 = require_XPath(); - var ParseTreePattern = class ParseTreePattern { - constructor(matcher, pattern, patternRuleIndex, patternTree) { - this._matcher = matcher; - this._patternRuleIndex = patternRuleIndex; - this._pattern = pattern; - this._patternTree = patternTree; - } - match(tree) { - return this._matcher.match(tree, this); - } - matches(tree) { - return this._matcher.match(tree, this).succeeded; - } - findAll(tree, xpath) { - let subtrees = XPath_1.XPath.findAll(tree, xpath, this._matcher.parser); - let matches = []; - for (let t of subtrees) { - let match = this.match(t); - if (match.succeeded) { - matches.push(match); - } - } - return matches; - } - get matcher() { - return this._matcher; - } - get pattern() { - return this._pattern; - } - get patternRuleIndex() { - return this._patternRuleIndex; - } - get patternTree() { - return this._patternTree; - } - }; - __decorate([ - Decorators_1.NotNull - ], ParseTreePattern.prototype, "_pattern", void 0); - __decorate([ - Decorators_1.NotNull - ], ParseTreePattern.prototype, "_patternTree", void 0); - __decorate([ - Decorators_1.NotNull - ], ParseTreePattern.prototype, "_matcher", void 0); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], ParseTreePattern.prototype, "match", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], ParseTreePattern.prototype, "matches", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], ParseTreePattern.prototype, "findAll", null); - __decorate([ - Decorators_1.NotNull - ], ParseTreePattern.prototype, "matcher", null); - __decorate([ - Decorators_1.NotNull - ], ParseTreePattern.prototype, "pattern", null); - __decorate([ - Decorators_1.NotNull - ], ParseTreePattern.prototype, "patternTree", null); - ParseTreePattern = __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(3, Decorators_1.NotNull) - ], ParseTreePattern); - exports.ParseTreePattern = ParseTreePattern; -}); - -// node_modules/antlr4ts/tree/pattern/RuleTagToken.js -var require_RuleTagToken = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RuleTagToken = void 0; - var Decorators_1 = require_Decorators(); - var Token_1 = require_Token(); - var RuleTagToken = class RuleTagToken { - constructor(ruleName, bypassTokenType, label) { - if (ruleName == null || ruleName.length === 0) { - throw new Error("ruleName cannot be null or empty."); - } - this._ruleName = ruleName; - this.bypassTokenType = bypassTokenType; - this._label = label; - } - get ruleName() { - return this._ruleName; - } - get label() { - return this._label; - } - get channel() { - return Token_1.Token.DEFAULT_CHANNEL; - } - get text() { - if (this._label != null) { - return "<" + this._label + ":" + this._ruleName + ">"; - } - return "<" + this._ruleName + ">"; - } - get type() { - return this.bypassTokenType; - } - get line() { - return 0; - } - get charPositionInLine() { - return -1; - } - get tokenIndex() { - return -1; - } - get startIndex() { - return -1; - } - get stopIndex() { - return -1; - } - get tokenSource() { - return void 0; - } - get inputStream() { - return void 0; - } - toString() { - return this._ruleName + ":" + this.bypassTokenType; - } - }; - __decorate([ - Decorators_1.NotNull - ], RuleTagToken.prototype, "ruleName", null); - __decorate([ - Decorators_1.Override - ], RuleTagToken.prototype, "channel", null); - __decorate([ - Decorators_1.Override - ], RuleTagToken.prototype, "text", null); - __decorate([ - Decorators_1.Override - ], RuleTagToken.prototype, "type", null); - __decorate([ - Decorators_1.Override - ], RuleTagToken.prototype, "line", null); - __decorate([ - Decorators_1.Override - ], RuleTagToken.prototype, "charPositionInLine", null); - __decorate([ - Decorators_1.Override - ], RuleTagToken.prototype, "tokenIndex", null); - __decorate([ - Decorators_1.Override - ], RuleTagToken.prototype, "startIndex", null); - __decorate([ - Decorators_1.Override - ], RuleTagToken.prototype, "stopIndex", null); - __decorate([ - Decorators_1.Override - ], RuleTagToken.prototype, "tokenSource", null); - __decorate([ - Decorators_1.Override - ], RuleTagToken.prototype, "inputStream", null); - __decorate([ - Decorators_1.Override - ], RuleTagToken.prototype, "toString", null); - RuleTagToken = __decorate([ - __param(0, Decorators_1.NotNull) - ], RuleTagToken); - exports.RuleTagToken = RuleTagToken; -}); - -// node_modules/antlr4ts/tree/pattern/Chunk.js -var require_Chunk = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Chunk = void 0; - var Chunk = class { - }; - exports.Chunk = Chunk; -}); - -// node_modules/antlr4ts/tree/pattern/TagChunk.js -var require_TagChunk = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.TagChunk = void 0; - var Chunk_1 = require_Chunk(); - var Decorators_1 = require_Decorators(); - var TagChunk = class extends Chunk_1.Chunk { - constructor(tag, label) { - super(); - if (tag == null || tag.length === 0) { - throw new Error("tag cannot be null or empty"); - } - this._tag = tag; - this._label = label; - } - get tag() { - return this._tag; - } - get label() { - return this._label; - } - toString() { - if (this._label != null) { - return this._label + ":" + this._tag; - } - return this._tag; - } - }; - __decorate([ - Decorators_1.NotNull - ], TagChunk.prototype, "tag", null); - __decorate([ - Decorators_1.Override - ], TagChunk.prototype, "toString", null); - exports.TagChunk = TagChunk; -}); - -// node_modules/antlr4ts/tree/pattern/TextChunk.js -var require_TextChunk = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.TextChunk = void 0; - var Chunk_1 = require_Chunk(); - var Decorators_1 = require_Decorators(); - var TextChunk = class TextChunk extends Chunk_1.Chunk { - constructor(text) { - super(); - if (text == null) { - throw new Error("text cannot be null"); - } - this._text = text; - } - get text() { - return this._text; - } - toString() { - return "'" + this._text + "'"; - } - }; - __decorate([ - Decorators_1.NotNull - ], TextChunk.prototype, "_text", void 0); - __decorate([ - Decorators_1.NotNull - ], TextChunk.prototype, "text", null); - __decorate([ - Decorators_1.Override - ], TextChunk.prototype, "toString", null); - TextChunk = __decorate([ - __param(0, Decorators_1.NotNull) - ], TextChunk); - exports.TextChunk = TextChunk; -}); - -// node_modules/antlr4ts/tree/pattern/TokenTagToken.js -var require_TokenTagToken = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.TokenTagToken = void 0; - var CommonToken_1 = require_CommonToken(); - var Decorators_1 = require_Decorators(); - var TokenTagToken = class TokenTagToken extends CommonToken_1.CommonToken { - constructor(tokenName, type, label) { - super(type); - this._tokenName = tokenName; - this._label = label; - } - get tokenName() { - return this._tokenName; - } - get label() { - return this._label; - } - get text() { - if (this._label != null) { - return "<" + this._label + ":" + this._tokenName + ">"; - } - return "<" + this._tokenName + ">"; - } - toString() { - return this._tokenName + ":" + this.type; - } - }; - __decorate([ - Decorators_1.NotNull - ], TokenTagToken.prototype, "_tokenName", void 0); - __decorate([ - Decorators_1.NotNull - ], TokenTagToken.prototype, "tokenName", null); - __decorate([ - Decorators_1.Override - ], TokenTagToken.prototype, "text", null); - __decorate([ - Decorators_1.Override - ], TokenTagToken.prototype, "toString", null); - TokenTagToken = __decorate([ - __param(0, Decorators_1.NotNull) - ], TokenTagToken); - exports.TokenTagToken = TokenTagToken; -}); - -// node_modules/antlr4ts/tree/pattern/ParseTreePatternMatcher.js -var require_ParseTreePatternMatcher = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ParseTreePatternMatcher = void 0; - var BailErrorStrategy_1 = require_BailErrorStrategy(); - var CharStreams_1 = require_CharStreams(); - var CommonTokenStream_1 = require_CommonTokenStream(); - var ListTokenSource_1 = require_ListTokenSource(); - var MultiMap_1 = require_MultiMap(); - var Decorators_1 = require_Decorators(); - var ParseCancellationException_1 = require_ParseCancellationException(); - var ParserInterpreter_1 = require_ParserInterpreter(); - var ParserRuleContext_1 = require_ParserRuleContext(); - var ParseTreeMatch_1 = require_ParseTreeMatch(); - var ParseTreePattern_1 = require_ParseTreePattern(); - var RecognitionException_1 = require_RecognitionException(); - var RuleNode_1 = require_RuleNode(); - var RuleTagToken_1 = require_RuleTagToken(); - var TagChunk_1 = require_TagChunk(); - var TerminalNode_1 = require_TerminalNode(); - var TextChunk_1 = require_TextChunk(); - var Token_1 = require_Token(); - var TokenTagToken_1 = require_TokenTagToken(); - var ParseTreePatternMatcher2 = class { - constructor(lexer, parser) { - this.start = "<"; - this.stop = ">"; - this.escape = "\\"; - this.escapeRE = /\\/g; - this._lexer = lexer; - this._parser = parser; - } - setDelimiters(start, stop, escapeLeft) { - if (!start) { - throw new Error("start cannot be null or empty"); - } - if (!stop) { - throw new Error("stop cannot be null or empty"); - } - this.start = start; - this.stop = stop; - this.escape = escapeLeft; - this.escapeRE = new RegExp(escapeLeft.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"), "g"); - } - matches(tree, pattern, patternRuleIndex = 0) { - if (typeof pattern === "string") { - let p = this.compile(pattern, patternRuleIndex); - return this.matches(tree, p); - } else { - let labels = new MultiMap_1.MultiMap(); - let mismatchedNode = this.matchImpl(tree, pattern.patternTree, labels); - return !mismatchedNode; - } - } - match(tree, pattern, patternRuleIndex = 0) { - if (typeof pattern === "string") { - let p = this.compile(pattern, patternRuleIndex); - return this.match(tree, p); - } else { - let labels = new MultiMap_1.MultiMap(); - let mismatchedNode = this.matchImpl(tree, pattern.patternTree, labels); - return new ParseTreeMatch_1.ParseTreeMatch(tree, pattern, labels, mismatchedNode); - } - } - compile(pattern, patternRuleIndex) { - let tokenList = this.tokenize(pattern); - let tokenSrc = new ListTokenSource_1.ListTokenSource(tokenList); - let tokens2 = new CommonTokenStream_1.CommonTokenStream(tokenSrc); - const parser = this._parser; - let parserInterp = new ParserInterpreter_1.ParserInterpreter(parser.grammarFileName, parser.vocabulary, parser.ruleNames, parser.getATNWithBypassAlts(), tokens2); - let tree; - try { - parserInterp.errorHandler = new BailErrorStrategy_1.BailErrorStrategy(); - tree = parserInterp.parse(patternRuleIndex); - } catch (e) { - if (e instanceof ParseCancellationException_1.ParseCancellationException) { - throw e.getCause(); - } else if (e instanceof RecognitionException_1.RecognitionException) { - throw e; - } else if (e instanceof Error) { - throw new ParseTreePatternMatcher2.CannotInvokeStartRule(e); - } else { - throw e; - } - } - if (tokens2.LA(1) !== Token_1.Token.EOF) { - throw new ParseTreePatternMatcher2.StartRuleDoesNotConsumeFullPattern(); - } - return new ParseTreePattern_1.ParseTreePattern(this, pattern, patternRuleIndex, tree); - } - get lexer() { - return this._lexer; - } - get parser() { - return this._parser; - } - matchImpl(tree, patternTree, labels) { - if (!tree) { - throw new TypeError("tree cannot be null"); - } - if (!patternTree) { - throw new TypeError("patternTree cannot be null"); - } - if (tree instanceof TerminalNode_1.TerminalNode && patternTree instanceof TerminalNode_1.TerminalNode) { - let mismatchedNode; - if (tree.symbol.type === patternTree.symbol.type) { - if (patternTree.symbol instanceof TokenTagToken_1.TokenTagToken) { - let tokenTagToken = patternTree.symbol; - labels.map(tokenTagToken.tokenName, tree); - const l = tokenTagToken.label; - if (l) { - labels.map(l, tree); - } - } else if (tree.text === patternTree.text) { - } else { - if (!mismatchedNode) { - mismatchedNode = tree; - } - } - } else { - if (!mismatchedNode) { - mismatchedNode = tree; - } - } - return mismatchedNode; - } - if (tree instanceof ParserRuleContext_1.ParserRuleContext && patternTree instanceof ParserRuleContext_1.ParserRuleContext) { - let mismatchedNode; - let ruleTagToken = this.getRuleTagToken(patternTree); - if (ruleTagToken) { - let m; - if (tree.ruleContext.ruleIndex === patternTree.ruleContext.ruleIndex) { - labels.map(ruleTagToken.ruleName, tree); - const l = ruleTagToken.label; - if (l) { - labels.map(l, tree); - } - } else { - if (!mismatchedNode) { - mismatchedNode = tree; - } - } - return mismatchedNode; - } - if (tree.childCount !== patternTree.childCount) { - if (!mismatchedNode) { - mismatchedNode = tree; - } - return mismatchedNode; - } - let n = tree.childCount; - for (let i = 0; i < n; i++) { - let childMatch = this.matchImpl(tree.getChild(i), patternTree.getChild(i), labels); - if (childMatch) { - return childMatch; - } - } - return mismatchedNode; - } - return tree; - } - getRuleTagToken(t) { - if (t instanceof RuleNode_1.RuleNode) { - if (t.childCount === 1 && t.getChild(0) instanceof TerminalNode_1.TerminalNode) { - let c = t.getChild(0); - if (c.symbol instanceof RuleTagToken_1.RuleTagToken) { - return c.symbol; - } - } - } - return void 0; - } - tokenize(pattern) { - let chunks = this.split(pattern); - let tokens2 = []; - for (let chunk of chunks) { - if (chunk instanceof TagChunk_1.TagChunk) { - let tagChunk = chunk; - const firstChar = tagChunk.tag.substr(0, 1); - if (firstChar === firstChar.toUpperCase()) { - let ttype = this._parser.getTokenType(tagChunk.tag); - if (ttype === Token_1.Token.INVALID_TYPE) { - throw new Error("Unknown token " + tagChunk.tag + " in pattern: " + pattern); - } - let t = new TokenTagToken_1.TokenTagToken(tagChunk.tag, ttype, tagChunk.label); - tokens2.push(t); - } else if (firstChar === firstChar.toLowerCase()) { - let ruleIndex = this._parser.getRuleIndex(tagChunk.tag); - if (ruleIndex === -1) { - throw new Error("Unknown rule " + tagChunk.tag + " in pattern: " + pattern); - } - let ruleImaginaryTokenType = this._parser.getATNWithBypassAlts().ruleToTokenType[ruleIndex]; - tokens2.push(new RuleTagToken_1.RuleTagToken(tagChunk.tag, ruleImaginaryTokenType, tagChunk.label)); - } else { - throw new Error("invalid tag: " + tagChunk.tag + " in pattern: " + pattern); - } - } else { - let textChunk = chunk; - this._lexer.inputStream = CharStreams_1.CharStreams.fromString(textChunk.text); - let t = this._lexer.nextToken(); - while (t.type !== Token_1.Token.EOF) { - tokens2.push(t); - t = this._lexer.nextToken(); - } - } - } - return tokens2; - } - split(pattern) { - let p = 0; - let n = pattern.length; - let chunks = []; - let buf; - let starts = []; - let stops = []; - while (p < n) { - if (p === pattern.indexOf(this.escape + this.start, p)) { - p += this.escape.length + this.start.length; - } else if (p === pattern.indexOf(this.escape + this.stop, p)) { - p += this.escape.length + this.stop.length; - } else if (p === pattern.indexOf(this.start, p)) { - starts.push(p); - p += this.start.length; - } else if (p === pattern.indexOf(this.stop, p)) { - stops.push(p); - p += this.stop.length; - } else { - p++; - } - } - if (starts.length > stops.length) { - throw new Error("unterminated tag in pattern: " + pattern); - } - if (starts.length < stops.length) { - throw new Error("missing start tag in pattern: " + pattern); - } - let ntags = starts.length; - for (let i = 0; i < ntags; i++) { - if (starts[i] >= stops[i]) { - throw new Error("tag delimiters out of order in pattern: " + pattern); - } - } - if (ntags === 0) { - let text = pattern.substring(0, n); - chunks.push(new TextChunk_1.TextChunk(text)); - } - if (ntags > 0 && starts[0] > 0) { - let text = pattern.substring(0, starts[0]); - chunks.push(new TextChunk_1.TextChunk(text)); - } - for (let i = 0; i < ntags; i++) { - let tag = pattern.substring(starts[i] + this.start.length, stops[i]); - let ruleOrToken = tag; - let label; - let colon = tag.indexOf(":"); - if (colon >= 0) { - label = tag.substring(0, colon); - ruleOrToken = tag.substring(colon + 1, tag.length); - } - chunks.push(new TagChunk_1.TagChunk(ruleOrToken, label)); - if (i + 1 < ntags) { - let text = pattern.substring(stops[i] + this.stop.length, starts[i + 1]); - chunks.push(new TextChunk_1.TextChunk(text)); - } - } - if (ntags > 0) { - let afterLastTag = stops[ntags - 1] + this.stop.length; - if (afterLastTag < n) { - let text = pattern.substring(afterLastTag, n); - chunks.push(new TextChunk_1.TextChunk(text)); - } - } - for (let i = 0; i < chunks.length; i++) { - let c = chunks[i]; - if (c instanceof TextChunk_1.TextChunk) { - let unescaped = c.text.replace(this.escapeRE, ""); - if (unescaped.length < c.text.length) { - chunks[i] = new TextChunk_1.TextChunk(unescaped); - } - } - } - return chunks; - } - }; - __decorate([ - Decorators_1.NotNull, - __param(1, Decorators_1.NotNull) - ], ParseTreePatternMatcher2.prototype, "match", null); - __decorate([ - Decorators_1.NotNull - ], ParseTreePatternMatcher2.prototype, "lexer", null); - __decorate([ - Decorators_1.NotNull - ], ParseTreePatternMatcher2.prototype, "parser", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ParseTreePatternMatcher2.prototype, "matchImpl", null); - exports.ParseTreePatternMatcher = ParseTreePatternMatcher2; - (function(ParseTreePatternMatcher3) { - class CannotInvokeStartRule extends Error { - constructor(error) { - super(`CannotInvokeStartRule: ${error}`); - this.error = error; - } - } - ParseTreePatternMatcher3.CannotInvokeStartRule = CannotInvokeStartRule; - class StartRuleDoesNotConsumeFullPattern extends Error { - constructor() { - super("StartRuleDoesNotConsumeFullPattern"); - } - } - ParseTreePatternMatcher3.StartRuleDoesNotConsumeFullPattern = StartRuleDoesNotConsumeFullPattern; - })(ParseTreePatternMatcher2 = exports.ParseTreePatternMatcher || (exports.ParseTreePatternMatcher = {})); -}); - -// node_modules/antlr4ts/atn/DecisionEventInfo.js -var require_DecisionEventInfo = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.DecisionEventInfo = void 0; - var Decorators_1 = require_Decorators(); - var DecisionEventInfo = class DecisionEventInfo { - constructor(decision, state, input, startIndex, stopIndex, fullCtx) { - this.decision = decision; - this.fullCtx = fullCtx; - this.stopIndex = stopIndex; - this.input = input; - this.startIndex = startIndex; - this.state = state; - } - }; - __decorate([ - Decorators_1.NotNull - ], DecisionEventInfo.prototype, "input", void 0); - DecisionEventInfo = __decorate([ - __param(2, Decorators_1.NotNull) - ], DecisionEventInfo); - exports.DecisionEventInfo = DecisionEventInfo; -}); - -// node_modules/antlr4ts/atn/AmbiguityInfo.js -var require_AmbiguityInfo = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.AmbiguityInfo = void 0; - var DecisionEventInfo_1 = require_DecisionEventInfo(); - var Decorators_1 = require_Decorators(); - var AmbiguityInfo = class AmbiguityInfo extends DecisionEventInfo_1.DecisionEventInfo { - constructor(decision, state, ambigAlts, input, startIndex, stopIndex) { - super(decision, state, input, startIndex, stopIndex, state.useContext); - this.ambigAlts = ambigAlts; - } - get ambiguousAlternatives() { - return this.ambigAlts; - } - }; - __decorate([ - Decorators_1.NotNull - ], AmbiguityInfo.prototype, "ambigAlts", void 0); - __decorate([ - Decorators_1.NotNull - ], AmbiguityInfo.prototype, "ambiguousAlternatives", null); - AmbiguityInfo = __decorate([ - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull), - __param(3, Decorators_1.NotNull) - ], AmbiguityInfo); - exports.AmbiguityInfo = AmbiguityInfo; -}); - -// node_modules/antlr4ts/atn/ContextSensitivityInfo.js -var require_ContextSensitivityInfo = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ContextSensitivityInfo = void 0; - var DecisionEventInfo_1 = require_DecisionEventInfo(); - var Decorators_1 = require_Decorators(); - var ContextSensitivityInfo = class ContextSensitivityInfo extends DecisionEventInfo_1.DecisionEventInfo { - constructor(decision, state, input, startIndex, stopIndex) { - super(decision, state, input, startIndex, stopIndex, true); - } - }; - ContextSensitivityInfo = __decorate([ - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ContextSensitivityInfo); - exports.ContextSensitivityInfo = ContextSensitivityInfo; -}); - -// node_modules/antlr4ts/atn/DecisionInfo.js -var require_DecisionInfo = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.DecisionInfo = void 0; - var Decorators_1 = require_Decorators(); - var DecisionInfo = class { - constructor(decision) { - this.invocations = 0; - this.timeInPrediction = 0; - this.SLL_TotalLook = 0; - this.SLL_MinLook = 0; - this.SLL_MaxLook = 0; - this.LL_TotalLook = 0; - this.LL_MinLook = 0; - this.LL_MaxLook = 0; - this.contextSensitivities = []; - this.errors = []; - this.ambiguities = []; - this.predicateEvals = []; - this.SLL_ATNTransitions = 0; - this.SLL_DFATransitions = 0; - this.LL_Fallback = 0; - this.LL_ATNTransitions = 0; - this.LL_DFATransitions = 0; - this.decision = decision; - } - toString() { - return "{decision=" + this.decision + ", contextSensitivities=" + this.contextSensitivities.length + ", errors=" + this.errors.length + ", ambiguities=" + this.ambiguities.length + ", SLL_lookahead=" + this.SLL_TotalLook + ", SLL_ATNTransitions=" + this.SLL_ATNTransitions + ", SLL_DFATransitions=" + this.SLL_DFATransitions + ", LL_Fallback=" + this.LL_Fallback + ", LL_lookahead=" + this.LL_TotalLook + ", LL_ATNTransitions=" + this.LL_ATNTransitions + "}"; - } - }; - __decorate([ - Decorators_1.Override - ], DecisionInfo.prototype, "toString", null); - exports.DecisionInfo = DecisionInfo; -}); - -// node_modules/antlr4ts/atn/ErrorInfo.js -var require_ErrorInfo = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ErrorInfo = void 0; - var DecisionEventInfo_1 = require_DecisionEventInfo(); - var Decorators_1 = require_Decorators(); - var ErrorInfo = class ErrorInfo extends DecisionEventInfo_1.DecisionEventInfo { - constructor(decision, state, input, startIndex, stopIndex) { - super(decision, state, input, startIndex, stopIndex, state.useContext); - } - }; - ErrorInfo = __decorate([ - __param(1, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull) - ], ErrorInfo); - exports.ErrorInfo = ErrorInfo; -}); - -// node_modules/antlr4ts/atn/LookaheadEventInfo.js -var require_LookaheadEventInfo = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LookaheadEventInfo = void 0; - var DecisionEventInfo_1 = require_DecisionEventInfo(); - var Decorators_1 = require_Decorators(); - var LookaheadEventInfo = class LookaheadEventInfo extends DecisionEventInfo_1.DecisionEventInfo { - constructor(decision, state, predictedAlt, input, startIndex, stopIndex, fullCtx) { - super(decision, state, input, startIndex, stopIndex, fullCtx); - this.predictedAlt = predictedAlt; - } - }; - LookaheadEventInfo = __decorate([ - __param(3, Decorators_1.NotNull) - ], LookaheadEventInfo); - exports.LookaheadEventInfo = LookaheadEventInfo; -}); - -// node_modules/antlr4ts/atn/PredicateEvalInfo.js -var require_PredicateEvalInfo = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.PredicateEvalInfo = void 0; - var DecisionEventInfo_1 = require_DecisionEventInfo(); - var Decorators_1 = require_Decorators(); - var PredicateEvalInfo = class PredicateEvalInfo extends DecisionEventInfo_1.DecisionEventInfo { - constructor(state, decision, input, startIndex, stopIndex, semctx, evalResult, predictedAlt) { - super(decision, state, input, startIndex, stopIndex, state.useContext); - this.semctx = semctx; - this.evalResult = evalResult; - this.predictedAlt = predictedAlt; - } - }; - PredicateEvalInfo = __decorate([ - __param(0, Decorators_1.NotNull), - __param(2, Decorators_1.NotNull), - __param(5, Decorators_1.NotNull) - ], PredicateEvalInfo); - exports.PredicateEvalInfo = PredicateEvalInfo; -}); - -// node_modules/antlr4ts/atn/ProfilingATNSimulator.js -var require_ProfilingATNSimulator = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.ProfilingATNSimulator = void 0; - var AmbiguityInfo_1 = require_AmbiguityInfo(); - var ATN_1 = require_ATN(); - var ATNSimulator_1 = require_ATNSimulator(); - var ContextSensitivityInfo_1 = require_ContextSensitivityInfo(); - var DecisionInfo_1 = require_DecisionInfo(); - var ErrorInfo_1 = require_ErrorInfo(); - var Decorators_1 = require_Decorators(); - var LookaheadEventInfo_1 = require_LookaheadEventInfo(); - var ParserATNSimulator_1 = require_ParserATNSimulator(); - var PredicateEvalInfo_1 = require_PredicateEvalInfo(); - var SemanticContext_1 = require_SemanticContext(); - var SimulatorState_1 = require_SimulatorState(); - var ProfilingATNSimulator = class extends ParserATNSimulator_1.ParserATNSimulator { - constructor(parser) { - super(parser.interpreter.atn, parser); - this._startIndex = 0; - this._sllStopIndex = 0; - this._llStopIndex = 0; - this.currentDecision = 0; - this.conflictingAltResolvedBySLL = 0; - this.optimize_ll1 = false; - this.reportAmbiguities = true; - this.numDecisions = this.atn.decisionToState.length; - this.decisions = []; - for (let i = 0; i < this.numDecisions; i++) { - this.decisions.push(new DecisionInfo_1.DecisionInfo(i)); - } - } - adaptivePredict(input, decision, outerContext, useContext) { - if (useContext !== void 0) { - return super.adaptivePredict(input, decision, outerContext, useContext); - } - try { - this._input = input; - this._startIndex = input.index; - this._sllStopIndex = this._startIndex - 1; - this._llStopIndex = -1; - this.currentDecision = decision; - this.currentState = void 0; - this.conflictingAltResolvedBySLL = ATN_1.ATN.INVALID_ALT_NUMBER; - let start = process.hrtime(); - let alt = super.adaptivePredict(input, decision, outerContext); - let stop = process.hrtime(); - let nanoseconds = (stop[0] - start[0]) * 1e9; - if (nanoseconds === 0) { - nanoseconds = stop[1] - start[1]; - } else { - nanoseconds += 1e9 - start[1] + stop[1]; - } - this.decisions[decision].timeInPrediction += nanoseconds; - this.decisions[decision].invocations++; - let SLL_k = this._sllStopIndex - this._startIndex + 1; - this.decisions[decision].SLL_TotalLook += SLL_k; - this.decisions[decision].SLL_MinLook = this.decisions[decision].SLL_MinLook === 0 ? SLL_k : Math.min(this.decisions[decision].SLL_MinLook, SLL_k); - if (SLL_k > this.decisions[decision].SLL_MaxLook) { - this.decisions[decision].SLL_MaxLook = SLL_k; - this.decisions[decision].SLL_MaxLookEvent = new LookaheadEventInfo_1.LookaheadEventInfo(decision, void 0, alt, input, this._startIndex, this._sllStopIndex, false); - } - if (this._llStopIndex >= 0) { - let LL_k = this._llStopIndex - this._startIndex + 1; - this.decisions[decision].LL_TotalLook += LL_k; - this.decisions[decision].LL_MinLook = this.decisions[decision].LL_MinLook === 0 ? LL_k : Math.min(this.decisions[decision].LL_MinLook, LL_k); - if (LL_k > this.decisions[decision].LL_MaxLook) { - this.decisions[decision].LL_MaxLook = LL_k; - this.decisions[decision].LL_MaxLookEvent = new LookaheadEventInfo_1.LookaheadEventInfo(decision, void 0, alt, input, this._startIndex, this._llStopIndex, true); - } - } - return alt; - } finally { - this._input = void 0; - this.currentDecision = -1; - } - } - getStartState(dfa, input, outerContext, useContext) { - let state = super.getStartState(dfa, input, outerContext, useContext); - this.currentState = state; - return state; - } - computeStartState(dfa, globalContext, useContext) { - let state = super.computeStartState(dfa, globalContext, useContext); - this.currentState = state; - return state; - } - computeReachSet(dfa, previous, t, contextCache) { - if (this._input === void 0) { - throw new Error("Invalid state"); - } - let reachState = super.computeReachSet(dfa, previous, t, contextCache); - if (reachState == null) { - this.decisions[this.currentDecision].errors.push(new ErrorInfo_1.ErrorInfo(this.currentDecision, previous, this._input, this._startIndex, this._input.index)); - } - this.currentState = reachState; - return reachState; - } - getExistingTargetState(previousD, t) { - if (this.currentState === void 0 || this._input === void 0) { - throw new Error("Invalid state"); - } - if (this.currentState.useContext) { - this._llStopIndex = this._input.index; - } else { - this._sllStopIndex = this._input.index; - } - let existingTargetState = super.getExistingTargetState(previousD, t); - if (existingTargetState != null) { - this.currentState = new SimulatorState_1.SimulatorState(this.currentState.outerContext, existingTargetState, this.currentState.useContext, this.currentState.remainingOuterContext); - if (this.currentState.useContext) { - this.decisions[this.currentDecision].LL_DFATransitions++; - } else { - this.decisions[this.currentDecision].SLL_DFATransitions++; - } - if (existingTargetState === ATNSimulator_1.ATNSimulator.ERROR) { - let state = new SimulatorState_1.SimulatorState(this.currentState.outerContext, previousD, this.currentState.useContext, this.currentState.remainingOuterContext); - this.decisions[this.currentDecision].errors.push(new ErrorInfo_1.ErrorInfo(this.currentDecision, state, this._input, this._startIndex, this._input.index)); - } - } - return existingTargetState; - } - computeTargetState(dfa, s, remainingGlobalContext, t, useContext, contextCache) { - let targetState = super.computeTargetState(dfa, s, remainingGlobalContext, t, useContext, contextCache); - if (useContext) { - this.decisions[this.currentDecision].LL_ATNTransitions++; - } else { - this.decisions[this.currentDecision].SLL_ATNTransitions++; - } - return targetState; - } - evalSemanticContextImpl(pred, parserCallStack, alt) { - if (this.currentState === void 0 || this._input === void 0) { - throw new Error("Invalid state"); - } - let result = super.evalSemanticContextImpl(pred, parserCallStack, alt); - if (!(pred instanceof SemanticContext_1.SemanticContext.PrecedencePredicate)) { - let fullContext = this._llStopIndex >= 0; - let stopIndex = fullContext ? this._llStopIndex : this._sllStopIndex; - this.decisions[this.currentDecision].predicateEvals.push(new PredicateEvalInfo_1.PredicateEvalInfo(this.currentState, this.currentDecision, this._input, this._startIndex, stopIndex, pred, result, alt)); - } - return result; - } - reportContextSensitivity(dfa, prediction, acceptState, startIndex, stopIndex) { - if (this._input === void 0) { - throw new Error("Invalid state"); - } - if (prediction !== this.conflictingAltResolvedBySLL) { - this.decisions[this.currentDecision].contextSensitivities.push(new ContextSensitivityInfo_1.ContextSensitivityInfo(this.currentDecision, acceptState, this._input, startIndex, stopIndex)); - } - super.reportContextSensitivity(dfa, prediction, acceptState, startIndex, stopIndex); - } - reportAttemptingFullContext(dfa, conflictingAlts, conflictState, startIndex, stopIndex) { - if (conflictingAlts != null) { - this.conflictingAltResolvedBySLL = conflictingAlts.nextSetBit(0); - } else { - this.conflictingAltResolvedBySLL = conflictState.s0.configs.getRepresentedAlternatives().nextSetBit(0); - } - this.decisions[this.currentDecision].LL_Fallback++; - super.reportAttemptingFullContext(dfa, conflictingAlts, conflictState, startIndex, stopIndex); - } - reportAmbiguity(dfa, D, startIndex, stopIndex, exact, ambigAlts, configs) { - if (this.currentState === void 0 || this._input === void 0) { - throw new Error("Invalid state"); - } - let prediction; - if (ambigAlts != null) { - prediction = ambigAlts.nextSetBit(0); - } else { - prediction = configs.getRepresentedAlternatives().nextSetBit(0); - } - if (this.conflictingAltResolvedBySLL !== ATN_1.ATN.INVALID_ALT_NUMBER && prediction !== this.conflictingAltResolvedBySLL) { - this.decisions[this.currentDecision].contextSensitivities.push(new ContextSensitivityInfo_1.ContextSensitivityInfo(this.currentDecision, this.currentState, this._input, startIndex, stopIndex)); - } - this.decisions[this.currentDecision].ambiguities.push(new AmbiguityInfo_1.AmbiguityInfo(this.currentDecision, this.currentState, ambigAlts, this._input, startIndex, stopIndex)); - super.reportAmbiguity(dfa, D, startIndex, stopIndex, exact, ambigAlts, configs); - } - getDecisionInfo() { - return this.decisions; - } - getCurrentState() { - return this.currentState; - } - }; - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], ProfilingATNSimulator.prototype, "adaptivePredict", null); - __decorate([ - Decorators_1.Override - ], ProfilingATNSimulator.prototype, "getStartState", null); - __decorate([ - Decorators_1.Override - ], ProfilingATNSimulator.prototype, "computeStartState", null); - __decorate([ - Decorators_1.Override - ], ProfilingATNSimulator.prototype, "computeReachSet", null); - __decorate([ - Decorators_1.Override - ], ProfilingATNSimulator.prototype, "getExistingTargetState", null); - __decorate([ - Decorators_1.Override - ], ProfilingATNSimulator.prototype, "computeTargetState", null); - __decorate([ - Decorators_1.Override - ], ProfilingATNSimulator.prototype, "evalSemanticContextImpl", null); - __decorate([ - Decorators_1.Override - ], ProfilingATNSimulator.prototype, "reportContextSensitivity", null); - __decorate([ - Decorators_1.Override - ], ProfilingATNSimulator.prototype, "reportAttemptingFullContext", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull), - __param(5, Decorators_1.NotNull), - __param(6, Decorators_1.NotNull) - ], ProfilingATNSimulator.prototype, "reportAmbiguity", null); - exports.ProfilingATNSimulator = ProfilingATNSimulator; -}); - -// node_modules/antlr4ts/Parser.js -var require_Parser = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - var __awaiter = exports && exports.__awaiter || function(thisArg, _arguments, P, generator) { - function adopt(value) { - return value instanceof P ? value : new P(function(resolve) { - resolve(value); - }); - } - return new (P || (P = Promise))(function(resolve, reject) { - function fulfilled(value) { - try { - step(generator.next(value)); - } catch (e) { - reject(e); - } - } - function rejected(value) { - try { - step(generator["throw"](value)); - } catch (e) { - reject(e); - } - } - function step(result) { - result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); - } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Parser = void 0; - var Utils3 = require_Utils(); - var ATNDeserializationOptions_1 = require_ATNDeserializationOptions(); - var ATNDeserializer_1 = require_ATNDeserializer(); - var DefaultErrorStrategy_1 = require_DefaultErrorStrategy(); - var ErrorNode_1 = require_ErrorNode(); - var IntegerStack_1 = require_IntegerStack(); - var Lexer_1 = require_Lexer(); - var Decorators_1 = require_Decorators(); - var ParseInfo_1 = require_ParseInfo(); - var ParserATNSimulator_1 = require_ParserATNSimulator(); - var ProxyParserErrorListener_1 = require_ProxyParserErrorListener(); - var Recognizer_1 = require_Recognizer(); - var TerminalNode_1 = require_TerminalNode(); - var Token_1 = require_Token(); - var TraceListener = class { - constructor(ruleNames, tokenStream) { - this.ruleNames = ruleNames; - this.tokenStream = tokenStream; - } - enterEveryRule(ctx) { - console.log("enter " + this.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.tokenStream.LT(1).text); - } - exitEveryRule(ctx) { - console.log("exit " + this.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.tokenStream.LT(1).text); - } - visitErrorNode(node) { - } - visitTerminal(node) { - let parent = node.parent.ruleContext; - let token = node.symbol; - console.log("consume " + token + " rule " + this.ruleNames[parent.ruleIndex]); - } - }; - __decorate([ - Decorators_1.Override - ], TraceListener.prototype, "enterEveryRule", null); - __decorate([ - Decorators_1.Override - ], TraceListener.prototype, "exitEveryRule", null); - __decorate([ - Decorators_1.Override - ], TraceListener.prototype, "visitErrorNode", null); - __decorate([ - Decorators_1.Override - ], TraceListener.prototype, "visitTerminal", null); - var Parser2 = class extends Recognizer_1.Recognizer { - constructor(input) { - super(); - this._errHandler = new DefaultErrorStrategy_1.DefaultErrorStrategy(); - this._precedenceStack = new IntegerStack_1.IntegerStack(); - this._buildParseTrees = true; - this._parseListeners = []; - this._syntaxErrors = 0; - this.matchedEOF = false; - this._precedenceStack.push(0); - this.inputStream = input; - } - reset(resetInput) { - if (resetInput === void 0 || resetInput) { - this.inputStream.seek(0); - } - this._errHandler.reset(this); - this._ctx = void 0; - this._syntaxErrors = 0; - this.matchedEOF = false; - this.isTrace = false; - this._precedenceStack.clear(); - this._precedenceStack.push(0); - let interpreter = this.interpreter; - if (interpreter != null) { - interpreter.reset(); - } - } - match(ttype) { - let t = this.currentToken; - if (t.type === ttype) { - if (ttype === Token_1.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } else { - t = this._errHandler.recoverInline(this); - if (this._buildParseTrees && t.tokenIndex === -1) { - this._ctx.addErrorNode(this.createErrorNode(this._ctx, t)); - } - } - return t; - } - matchWildcard() { - let t = this.currentToken; - if (t.type > 0) { - this._errHandler.reportMatch(this); - this.consume(); - } else { - t = this._errHandler.recoverInline(this); - if (this._buildParseTrees && t.tokenIndex === -1) { - this._ctx.addErrorNode(this.createErrorNode(this._ctx, t)); - } - } - return t; - } - set buildParseTree(buildParseTrees) { - this._buildParseTrees = buildParseTrees; - } - get buildParseTree() { - return this._buildParseTrees; - } - getParseListeners() { - return this._parseListeners; - } - addParseListener(listener) { - if (listener == null) { - throw new TypeError("listener cannot be null"); - } - this._parseListeners.push(listener); - } - removeParseListener(listener) { - let index = this._parseListeners.findIndex((l) => l === listener); - if (index !== -1) { - this._parseListeners.splice(index, 1); - } - } - removeParseListeners() { - this._parseListeners.length = 0; - } - triggerEnterRuleEvent() { - for (let listener of this._parseListeners) { - if (listener.enterEveryRule) { - listener.enterEveryRule(this._ctx); - } - this._ctx.enterRule(listener); - } - } - triggerExitRuleEvent() { - for (let i = this._parseListeners.length - 1; i >= 0; i--) { - let listener = this._parseListeners[i]; - this._ctx.exitRule(listener); - if (listener.exitEveryRule) { - listener.exitEveryRule(this._ctx); - } - } - } - get numberOfSyntaxErrors() { - return this._syntaxErrors; - } - get tokenFactory() { - return this._input.tokenSource.tokenFactory; - } - getATNWithBypassAlts() { - let serializedAtn = this.serializedATN; - if (serializedAtn == null) { - throw new Error("The current parser does not support an ATN with bypass alternatives."); - } - let result = Parser2.bypassAltsAtnCache.get(serializedAtn); - if (result == null) { - let deserializationOptions = new ATNDeserializationOptions_1.ATNDeserializationOptions(); - deserializationOptions.isGenerateRuleBypassTransitions = true; - result = new ATNDeserializer_1.ATNDeserializer(deserializationOptions).deserialize(Utils3.toCharArray(serializedAtn)); - Parser2.bypassAltsAtnCache.set(serializedAtn, result); - } - return result; - } - compileParseTreePattern(pattern, patternRuleIndex, lexer) { - return __awaiter(this, void 0, void 0, function* () { - if (!lexer) { - if (this.inputStream) { - let tokenSource = this.inputStream.tokenSource; - if (tokenSource instanceof Lexer_1.Lexer) { - lexer = tokenSource; - } - } - if (!lexer) { - throw new Error("Parser can't discover a lexer to use"); - } - } - let currentLexer = lexer; - let m = yield Promise.resolve().then(() => require_ParseTreePatternMatcher()); - let matcher = new m.ParseTreePatternMatcher(currentLexer, this); - return matcher.compile(pattern, patternRuleIndex); - }); - } - get errorHandler() { - return this._errHandler; - } - set errorHandler(handler) { - this._errHandler = handler; - } - get inputStream() { - return this._input; - } - set inputStream(input) { - this.reset(false); - this._input = input; - } - get currentToken() { - return this._input.LT(1); - } - notifyErrorListeners(msg, offendingToken, e) { - if (offendingToken === void 0) { - offendingToken = this.currentToken; - } else if (offendingToken === null) { - offendingToken = void 0; - } - this._syntaxErrors++; - let line = -1; - let charPositionInLine = -1; - if (offendingToken != null) { - line = offendingToken.line; - charPositionInLine = offendingToken.charPositionInLine; - } - let listener = this.getErrorListenerDispatch(); - if (listener.syntaxError) { - listener.syntaxError(this, offendingToken, line, charPositionInLine, msg, e); - } - } - consume() { - let o = this.currentToken; - if (o.type !== Parser2.EOF) { - this.inputStream.consume(); - } - let hasListener = this._parseListeners.length !== 0; - if (this._buildParseTrees || hasListener) { - if (this._errHandler.inErrorRecoveryMode(this)) { - let node = this._ctx.addErrorNode(this.createErrorNode(this._ctx, o)); - if (hasListener) { - for (let listener of this._parseListeners) { - if (listener.visitErrorNode) { - listener.visitErrorNode(node); - } - } - } - } else { - let node = this.createTerminalNode(this._ctx, o); - this._ctx.addChild(node); - if (hasListener) { - for (let listener of this._parseListeners) { - if (listener.visitTerminal) { - listener.visitTerminal(node); - } - } - } - } - } - return o; - } - createTerminalNode(parent, t) { - return new TerminalNode_1.TerminalNode(t); - } - createErrorNode(parent, t) { - return new ErrorNode_1.ErrorNode(t); - } - addContextToParseTree() { - let parent = this._ctx._parent; - if (parent != null) { - parent.addChild(this._ctx); - } - } - enterRule(localctx, state, ruleIndex) { - this.state = state; - this._ctx = localctx; - this._ctx._start = this._input.LT(1); - if (this._buildParseTrees) { - this.addContextToParseTree(); - } - this.triggerEnterRuleEvent(); - } - enterLeftFactoredRule(localctx, state, ruleIndex) { - this.state = state; - if (this._buildParseTrees) { - let factoredContext = this._ctx.getChild(this._ctx.childCount - 1); - this._ctx.removeLastChild(); - factoredContext._parent = localctx; - localctx.addChild(factoredContext); - } - this._ctx = localctx; - this._ctx._start = this._input.LT(1); - if (this._buildParseTrees) { - this.addContextToParseTree(); - } - this.triggerEnterRuleEvent(); - } - exitRule() { - if (this.matchedEOF) { - this._ctx._stop = this._input.LT(1); - } else { - this._ctx._stop = this._input.tryLT(-1); - } - this.triggerExitRuleEvent(); - this.state = this._ctx.invokingState; - this._ctx = this._ctx._parent; - } - enterOuterAlt(localctx, altNum) { - localctx.altNumber = altNum; - if (this._buildParseTrees && this._ctx !== localctx) { - let parent = this._ctx._parent; - if (parent != null) { - parent.removeLastChild(); - parent.addChild(localctx); - } - } - this._ctx = localctx; - } - get precedence() { - if (this._precedenceStack.isEmpty) { - return -1; - } - return this._precedenceStack.peek(); - } - enterRecursionRule(localctx, state, ruleIndex, precedence) { - this.state = state; - this._precedenceStack.push(precedence); - this._ctx = localctx; - this._ctx._start = this._input.LT(1); - this.triggerEnterRuleEvent(); - } - pushNewRecursionContext(localctx, state, ruleIndex) { - let previous = this._ctx; - previous._parent = localctx; - previous.invokingState = state; - previous._stop = this._input.tryLT(-1); - this._ctx = localctx; - this._ctx._start = previous._start; - if (this._buildParseTrees) { - this._ctx.addChild(previous); - } - this.triggerEnterRuleEvent(); - } - unrollRecursionContexts(_parentctx) { - this._precedenceStack.pop(); - this._ctx._stop = this._input.tryLT(-1); - let retctx = this._ctx; - if (this._parseListeners.length > 0) { - while (this._ctx !== _parentctx) { - this.triggerExitRuleEvent(); - this._ctx = this._ctx._parent; - } - } else { - this._ctx = _parentctx; - } - retctx._parent = _parentctx; - if (this._buildParseTrees && _parentctx != null) { - _parentctx.addChild(retctx); - } - } - getInvokingContext(ruleIndex) { - let p = this._ctx; - while (p && p.ruleIndex !== ruleIndex) { - p = p._parent; - } - return p; - } - get context() { - return this._ctx; - } - set context(ctx) { - this._ctx = ctx; - } - precpred(localctx, precedence) { - return precedence >= this._precedenceStack.peek(); - } - getErrorListenerDispatch() { - return new ProxyParserErrorListener_1.ProxyParserErrorListener(this.getErrorListeners()); - } - inContext(context) { - return false; - } - isExpectedToken(symbol) { - let atn = this.interpreter.atn; - let ctx = this._ctx; - let s = atn.states[this.state]; - let following = atn.nextTokens(s); - if (following.contains(symbol)) { - return true; - } - if (!following.contains(Token_1.Token.EPSILON)) { - return false; - } - while (ctx != null && ctx.invokingState >= 0 && following.contains(Token_1.Token.EPSILON)) { - let invokingState = atn.states[ctx.invokingState]; - let rt = invokingState.transition(0); - following = atn.nextTokens(rt.followState); - if (following.contains(symbol)) { - return true; - } - ctx = ctx._parent; - } - if (following.contains(Token_1.Token.EPSILON) && symbol === Token_1.Token.EOF) { - return true; - } - return false; - } - get isMatchedEOF() { - return this.matchedEOF; - } - getExpectedTokens() { - return this.atn.getExpectedTokens(this.state, this.context); - } - getExpectedTokensWithinCurrentRule() { - let atn = this.interpreter.atn; - let s = atn.states[this.state]; - return atn.nextTokens(s); - } - getRuleIndex(ruleName) { - let ruleIndex = this.getRuleIndexMap().get(ruleName); - if (ruleIndex != null) { - return ruleIndex; - } - return -1; - } - get ruleContext() { - return this._ctx; - } - getRuleInvocationStack(ctx = this._ctx) { - let p = ctx; - let ruleNames = this.ruleNames; - let stack = []; - while (p != null) { - let ruleIndex = p.ruleIndex; - if (ruleIndex < 0) { - stack.push("n/a"); - } else { - stack.push(ruleNames[ruleIndex]); - } - p = p._parent; - } - return stack; - } - getDFAStrings() { - let s = []; - for (let dfa of this._interp.atn.decisionToDFA) { - s.push(dfa.toString(this.vocabulary, this.ruleNames)); - } - return s; - } - dumpDFA() { - let seenOne = false; - for (let dfa of this._interp.atn.decisionToDFA) { - if (!dfa.isEmpty) { - if (seenOne) { - console.log(); - } - console.log("Decision " + dfa.decision + ":"); - process.stdout.write(dfa.toString(this.vocabulary, this.ruleNames)); - seenOne = true; - } - } - } - get sourceName() { - return this._input.sourceName; - } - get parseInfo() { - return Promise.resolve().then(() => require_ProfilingATNSimulator()).then((m) => { - let interp = this.interpreter; - if (interp instanceof m.ProfilingATNSimulator) { - return new ParseInfo_1.ParseInfo(interp); - } - return void 0; - }); - } - setProfile(profile) { - return __awaiter(this, void 0, void 0, function* () { - let m = yield Promise.resolve().then(() => require_ProfilingATNSimulator()); - let interp = this.interpreter; - if (profile) { - if (!(interp instanceof m.ProfilingATNSimulator)) { - this.interpreter = new m.ProfilingATNSimulator(this); - } - } else if (interp instanceof m.ProfilingATNSimulator) { - this.interpreter = new ParserATNSimulator_1.ParserATNSimulator(this.atn, this); - } - this.interpreter.setPredictionMode(interp.getPredictionMode()); - }); - } - set isTrace(trace) { - if (!trace) { - if (this._tracer) { - this.removeParseListener(this._tracer); - this._tracer = void 0; - } - } else { - if (this._tracer) { - this.removeParseListener(this._tracer); - } else { - this._tracer = new TraceListener(this.ruleNames, this._input); - } - this.addParseListener(this._tracer); - } - } - get isTrace() { - return this._tracer != null; - } - }; - Parser2.bypassAltsAtnCache = new Map(); - __decorate([ - Decorators_1.NotNull - ], Parser2.prototype, "_errHandler", void 0); - __decorate([ - Decorators_1.NotNull - ], Parser2.prototype, "match", null); - __decorate([ - Decorators_1.NotNull - ], Parser2.prototype, "matchWildcard", null); - __decorate([ - Decorators_1.NotNull - ], Parser2.prototype, "getParseListeners", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], Parser2.prototype, "addParseListener", null); - __decorate([ - Decorators_1.NotNull - ], Parser2.prototype, "getATNWithBypassAlts", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], Parser2.prototype, "errorHandler", null); - __decorate([ - Decorators_1.Override - ], Parser2.prototype, "inputStream", null); - __decorate([ - Decorators_1.NotNull - ], Parser2.prototype, "currentToken", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], Parser2.prototype, "enterRule", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.Nullable) - ], Parser2.prototype, "precpred", null); - __decorate([ - Decorators_1.Override - ], Parser2.prototype, "getErrorListenerDispatch", null); - __decorate([ - Decorators_1.NotNull - ], Parser2.prototype, "getExpectedTokens", null); - __decorate([ - Decorators_1.NotNull - ], Parser2.prototype, "getExpectedTokensWithinCurrentRule", null); - __decorate([ - Decorators_1.Override - ], Parser2.prototype, "parseInfo", null); - exports.Parser = Parser2; -}); - -// node_modules/antlr4ts/NoViableAltException.js -var require_NoViableAltException = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.NoViableAltException = void 0; - var Parser_1 = require_Parser(); - var RecognitionException_1 = require_RecognitionException(); - var Decorators_1 = require_Decorators(); - var NoViableAltException2 = class extends RecognitionException_1.RecognitionException { - constructor(recognizer, input, startToken, offendingToken, deadEndConfigs, ctx) { - if (recognizer instanceof Parser_1.Parser) { - if (input === void 0) { - input = recognizer.inputStream; - } - if (startToken === void 0) { - startToken = recognizer.currentToken; - } - if (offendingToken === void 0) { - offendingToken = recognizer.currentToken; - } - if (ctx === void 0) { - ctx = recognizer.context; - } - } - super(recognizer, input, ctx); - this._deadEndConfigs = deadEndConfigs; - this._startToken = startToken; - this.setOffendingToken(recognizer, offendingToken); - } - get startToken() { - return this._startToken; - } - get deadEndConfigs() { - return this._deadEndConfigs; - } - }; - __decorate([ - Decorators_1.NotNull - ], NoViableAltException2.prototype, "_startToken", void 0); - exports.NoViableAltException = NoViableAltException2; -}); - -// node_modules/antlr4ts/DefaultErrorStrategy.js -var require_DefaultErrorStrategy = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.DefaultErrorStrategy = void 0; - var ATNState_1 = require_ATNState(); - var ATNStateType_1 = require_ATNStateType(); - var FailedPredicateException_1 = require_FailedPredicateException(); - var InputMismatchException_1 = require_InputMismatchException(); - var IntervalSet_1 = require_IntervalSet(); - var NoViableAltException_1 = require_NoViableAltException(); - var PredictionContext_1 = require_PredictionContext(); - var Token_1 = require_Token(); - var Decorators_1 = require_Decorators(); - var DefaultErrorStrategy = class { - constructor() { - this.errorRecoveryMode = false; - this.lastErrorIndex = -1; - this.nextTokensState = ATNState_1.ATNState.INVALID_STATE_NUMBER; - } - reset(recognizer) { - this.endErrorCondition(recognizer); - } - beginErrorCondition(recognizer) { - this.errorRecoveryMode = true; - } - inErrorRecoveryMode(recognizer) { - return this.errorRecoveryMode; - } - endErrorCondition(recognizer) { - this.errorRecoveryMode = false; - this.lastErrorStates = void 0; - this.lastErrorIndex = -1; - } - reportMatch(recognizer) { - this.endErrorCondition(recognizer); - } - reportError(recognizer, e) { - if (this.inErrorRecoveryMode(recognizer)) { - return; - } - this.beginErrorCondition(recognizer); - if (e instanceof NoViableAltException_1.NoViableAltException) { - this.reportNoViableAlternative(recognizer, e); - } else if (e instanceof InputMismatchException_1.InputMismatchException) { - this.reportInputMismatch(recognizer, e); - } else if (e instanceof FailedPredicateException_1.FailedPredicateException) { - this.reportFailedPredicate(recognizer, e); - } else { - console.error(`unknown recognition error type: ${e}`); - this.notifyErrorListeners(recognizer, e.toString(), e); - } - } - notifyErrorListeners(recognizer, message, e) { - let offendingToken = e.getOffendingToken(recognizer); - if (offendingToken === void 0) { - offendingToken = null; - } - recognizer.notifyErrorListeners(message, offendingToken, e); - } - recover(recognizer, e) { - if (this.lastErrorIndex === recognizer.inputStream.index && this.lastErrorStates && this.lastErrorStates.contains(recognizer.state)) { - recognizer.consume(); - } - this.lastErrorIndex = recognizer.inputStream.index; - if (!this.lastErrorStates) { - this.lastErrorStates = new IntervalSet_1.IntervalSet(); - } - this.lastErrorStates.add(recognizer.state); - let followSet = this.getErrorRecoverySet(recognizer); - this.consumeUntil(recognizer, followSet); - } - sync(recognizer) { - let s = recognizer.interpreter.atn.states[recognizer.state]; - if (this.inErrorRecoveryMode(recognizer)) { - return; - } - let tokens2 = recognizer.inputStream; - let la = tokens2.LA(1); - let nextTokens = recognizer.atn.nextTokens(s); - if (nextTokens.contains(la)) { - this.nextTokensContext = void 0; - this.nextTokensState = ATNState_1.ATNState.INVALID_STATE_NUMBER; - return; - } - if (nextTokens.contains(Token_1.Token.EPSILON)) { - if (this.nextTokensContext === void 0) { - this.nextTokensContext = recognizer.context; - this.nextTokensState = recognizer.state; - } - return; - } - switch (s.stateType) { - case ATNStateType_1.ATNStateType.BLOCK_START: - case ATNStateType_1.ATNStateType.STAR_BLOCK_START: - case ATNStateType_1.ATNStateType.PLUS_BLOCK_START: - case ATNStateType_1.ATNStateType.STAR_LOOP_ENTRY: - if (this.singleTokenDeletion(recognizer)) { - return; - } - throw new InputMismatchException_1.InputMismatchException(recognizer); - case ATNStateType_1.ATNStateType.PLUS_LOOP_BACK: - case ATNStateType_1.ATNStateType.STAR_LOOP_BACK: - this.reportUnwantedToken(recognizer); - let expecting = recognizer.getExpectedTokens(); - let whatFollowsLoopIterationOrRule = expecting.or(this.getErrorRecoverySet(recognizer)); - this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule); - break; - default: - break; - } - } - reportNoViableAlternative(recognizer, e) { - let tokens2 = recognizer.inputStream; - let input; - if (tokens2) { - if (e.startToken.type === Token_1.Token.EOF) { - input = ""; - } else { - input = tokens2.getTextFromRange(e.startToken, e.getOffendingToken()); - } - } else { - input = ""; - } - let msg = "no viable alternative at input " + this.escapeWSAndQuote(input); - this.notifyErrorListeners(recognizer, msg, e); - } - reportInputMismatch(recognizer, e) { - let expected = e.expectedTokens; - let expectedString = expected ? expected.toStringVocabulary(recognizer.vocabulary) : ""; - let msg = "mismatched input " + this.getTokenErrorDisplay(e.getOffendingToken(recognizer)) + " expecting " + expectedString; - this.notifyErrorListeners(recognizer, msg, e); - } - reportFailedPredicate(recognizer, e) { - let ruleName = recognizer.ruleNames[recognizer.context.ruleIndex]; - let msg = "rule " + ruleName + " " + e.message; - this.notifyErrorListeners(recognizer, msg, e); - } - reportUnwantedToken(recognizer) { - if (this.inErrorRecoveryMode(recognizer)) { - return; - } - this.beginErrorCondition(recognizer); - let t = recognizer.currentToken; - let tokenName = this.getTokenErrorDisplay(t); - let expecting = this.getExpectedTokens(recognizer); - let msg = "extraneous input " + tokenName + " expecting " + expecting.toStringVocabulary(recognizer.vocabulary); - recognizer.notifyErrorListeners(msg, t, void 0); - } - reportMissingToken(recognizer) { - if (this.inErrorRecoveryMode(recognizer)) { - return; - } - this.beginErrorCondition(recognizer); - let t = recognizer.currentToken; - let expecting = this.getExpectedTokens(recognizer); - let msg = "missing " + expecting.toStringVocabulary(recognizer.vocabulary) + " at " + this.getTokenErrorDisplay(t); - recognizer.notifyErrorListeners(msg, t, void 0); - } - recoverInline(recognizer) { - let matchedSymbol = this.singleTokenDeletion(recognizer); - if (matchedSymbol) { - recognizer.consume(); - return matchedSymbol; - } - if (this.singleTokenInsertion(recognizer)) { - return this.getMissingSymbol(recognizer); - } - if (this.nextTokensContext === void 0) { - throw new InputMismatchException_1.InputMismatchException(recognizer); - } else { - throw new InputMismatchException_1.InputMismatchException(recognizer, this.nextTokensState, this.nextTokensContext); - } - } - singleTokenInsertion(recognizer) { - let currentSymbolType = recognizer.inputStream.LA(1); - let currentState = recognizer.interpreter.atn.states[recognizer.state]; - let next = currentState.transition(0).target; - let atn = recognizer.interpreter.atn; - let expectingAtLL2 = atn.nextTokens(next, PredictionContext_1.PredictionContext.fromRuleContext(atn, recognizer.context)); - if (expectingAtLL2.contains(currentSymbolType)) { - this.reportMissingToken(recognizer); - return true; - } - return false; - } - singleTokenDeletion(recognizer) { - let nextTokenType = recognizer.inputStream.LA(2); - let expecting = this.getExpectedTokens(recognizer); - if (expecting.contains(nextTokenType)) { - this.reportUnwantedToken(recognizer); - recognizer.consume(); - let matchedSymbol = recognizer.currentToken; - this.reportMatch(recognizer); - return matchedSymbol; - } - return void 0; - } - getMissingSymbol(recognizer) { - let currentSymbol = recognizer.currentToken; - let expecting = this.getExpectedTokens(recognizer); - let expectedTokenType = Token_1.Token.INVALID_TYPE; - if (!expecting.isNil) { - expectedTokenType = expecting.minElement; - } - let tokenText; - if (expectedTokenType === Token_1.Token.EOF) { - tokenText = ""; - } else { - tokenText = ""; - } - let current = currentSymbol; - let lookback = recognizer.inputStream.tryLT(-1); - if (current.type === Token_1.Token.EOF && lookback != null) { - current = lookback; - } - return this.constructToken(recognizer.inputStream.tokenSource, expectedTokenType, tokenText, current); - } - constructToken(tokenSource, expectedTokenType, tokenText, current) { - let factory = tokenSource.tokenFactory; - let x = current.tokenSource; - let stream = x ? x.inputStream : void 0; - return factory.create({source: tokenSource, stream}, expectedTokenType, tokenText, Token_1.Token.DEFAULT_CHANNEL, -1, -1, current.line, current.charPositionInLine); - } - getExpectedTokens(recognizer) { - return recognizer.getExpectedTokens(); - } - getTokenErrorDisplay(t) { - if (!t) { - return ""; - } - let s = this.getSymbolText(t); - if (!s) { - if (this.getSymbolType(t) === Token_1.Token.EOF) { - s = ""; - } else { - s = `<${this.getSymbolType(t)}>`; - } - } - return this.escapeWSAndQuote(s); - } - getSymbolText(symbol) { - return symbol.text; - } - getSymbolType(symbol) { - return symbol.type; - } - escapeWSAndQuote(s) { - s = s.replace("\n", "\\n"); - s = s.replace("\r", "\\r"); - s = s.replace(" ", "\\t"); - return "'" + s + "'"; - } - getErrorRecoverySet(recognizer) { - let atn = recognizer.interpreter.atn; - let ctx = recognizer.context; - let recoverSet = new IntervalSet_1.IntervalSet(); - while (ctx && ctx.invokingState >= 0) { - let invokingState = atn.states[ctx.invokingState]; - let rt = invokingState.transition(0); - let follow = atn.nextTokens(rt.followState); - recoverSet.addAll(follow); - ctx = ctx._parent; - } - recoverSet.remove(Token_1.Token.EPSILON); - return recoverSet; - } - consumeUntil(recognizer, set) { - let ttype = recognizer.inputStream.LA(1); - while (ttype !== Token_1.Token.EOF && !set.contains(ttype)) { - recognizer.consume(); - ttype = recognizer.inputStream.LA(1); - } - } - }; - __decorate([ - Decorators_1.Override - ], DefaultErrorStrategy.prototype, "reset", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "beginErrorCondition", null); - __decorate([ - Decorators_1.Override - ], DefaultErrorStrategy.prototype, "inErrorRecoveryMode", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "endErrorCondition", null); - __decorate([ - Decorators_1.Override - ], DefaultErrorStrategy.prototype, "reportMatch", null); - __decorate([ - Decorators_1.Override - ], DefaultErrorStrategy.prototype, "reportError", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "notifyErrorListeners", null); - __decorate([ - Decorators_1.Override - ], DefaultErrorStrategy.prototype, "recover", null); - __decorate([ - Decorators_1.Override - ], DefaultErrorStrategy.prototype, "sync", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "reportNoViableAlternative", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "reportInputMismatch", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "reportFailedPredicate", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "reportUnwantedToken", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "reportMissingToken", null); - __decorate([ - Decorators_1.Override - ], DefaultErrorStrategy.prototype, "recoverInline", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "singleTokenInsertion", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "singleTokenDeletion", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "getMissingSymbol", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "getExpectedTokens", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "getSymbolText", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "getSymbolType", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "escapeWSAndQuote", null); - __decorate([ - Decorators_1.NotNull, - __param(0, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "getErrorRecoverySet", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], DefaultErrorStrategy.prototype, "consumeUntil", null); - exports.DefaultErrorStrategy = DefaultErrorStrategy; -}); - -// node_modules/antlr4ts/BailErrorStrategy.js -var require_BailErrorStrategy = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.BailErrorStrategy = void 0; - var DefaultErrorStrategy_1 = require_DefaultErrorStrategy(); - var InputMismatchException_1 = require_InputMismatchException(); - var Decorators_1 = require_Decorators(); - var ParseCancellationException_1 = require_ParseCancellationException(); - var BailErrorStrategy = class extends DefaultErrorStrategy_1.DefaultErrorStrategy { - recover(recognizer, e) { - for (let context = recognizer.context; context; context = context.parent) { - context.exception = e; - } - throw new ParseCancellationException_1.ParseCancellationException(e); - } - recoverInline(recognizer) { - let e = new InputMismatchException_1.InputMismatchException(recognizer); - for (let context = recognizer.context; context; context = context.parent) { - context.exception = e; - } - throw new ParseCancellationException_1.ParseCancellationException(e); - } - sync(recognizer) { - } - }; - __decorate([ - Decorators_1.Override - ], BailErrorStrategy.prototype, "recover", null); - __decorate([ - Decorators_1.Override - ], BailErrorStrategy.prototype, "recoverInline", null); - __decorate([ - Decorators_1.Override - ], BailErrorStrategy.prototype, "sync", null); - exports.BailErrorStrategy = BailErrorStrategy; -}); - -// node_modules/antlr4ts/CharStream.js -var require_CharStream = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); -}); - -// node_modules/antlr4ts/Dependents.js -var require_Dependents = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.Dependents = void 0; - var Dependents; - (function(Dependents2) { - Dependents2[Dependents2["SELF"] = 0] = "SELF"; - Dependents2[Dependents2["PARENTS"] = 1] = "PARENTS"; - Dependents2[Dependents2["CHILDREN"] = 2] = "CHILDREN"; - Dependents2[Dependents2["ANCESTORS"] = 3] = "ANCESTORS"; - Dependents2[Dependents2["DESCENDANTS"] = 4] = "DESCENDANTS"; - Dependents2[Dependents2["SIBLINGS"] = 5] = "SIBLINGS"; - Dependents2[Dependents2["PRECEEDING_SIBLINGS"] = 6] = "PRECEEDING_SIBLINGS"; - Dependents2[Dependents2["FOLLOWING_SIBLINGS"] = 7] = "FOLLOWING_SIBLINGS"; - Dependents2[Dependents2["PRECEEDING"] = 8] = "PRECEEDING"; - Dependents2[Dependents2["FOLLOWING"] = 9] = "FOLLOWING"; - })(Dependents = exports.Dependents || (exports.Dependents = {})); -}); - -// node_modules/antlr4ts/DiagnosticErrorListener.js -var require_DiagnosticErrorListener = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.DiagnosticErrorListener = void 0; - var BitSet_1 = require_BitSet(); - var Decorators_1 = require_Decorators(); - var Interval_1 = require_Interval(); - var DiagnosticErrorListener = class { - constructor(exactOnly = true) { - this.exactOnly = exactOnly; - this.exactOnly = exactOnly; - } - syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e) { - } - reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) { - if (this.exactOnly && !exact) { - return; - } - let decision = this.getDecisionDescription(recognizer, dfa); - let conflictingAlts = this.getConflictingAlts(ambigAlts, configs); - let text = recognizer.inputStream.getText(Interval_1.Interval.of(startIndex, stopIndex)); - let message = `reportAmbiguity d=${decision}: ambigAlts=${conflictingAlts}, input='${text}'`; - recognizer.notifyErrorListeners(message); - } - reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, conflictState) { - let format = "reportAttemptingFullContext d=%s, input='%s'"; - let decision = this.getDecisionDescription(recognizer, dfa); - let text = recognizer.inputStream.getText(Interval_1.Interval.of(startIndex, stopIndex)); - let message = `reportAttemptingFullContext d=${decision}, input='${text}'`; - recognizer.notifyErrorListeners(message); - } - reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, acceptState) { - let format = "reportContextSensitivity d=%s, input='%s'"; - let decision = this.getDecisionDescription(recognizer, dfa); - let text = recognizer.inputStream.getText(Interval_1.Interval.of(startIndex, stopIndex)); - let message = `reportContextSensitivity d=${decision}, input='${text}'`; - recognizer.notifyErrorListeners(message); - } - getDecisionDescription(recognizer, dfa) { - let decision = dfa.decision; - let ruleIndex = dfa.atnStartState.ruleIndex; - let ruleNames = recognizer.ruleNames; - if (ruleIndex < 0 || ruleIndex >= ruleNames.length) { - return decision.toString(); - } - let ruleName = ruleNames[ruleIndex]; - if (!ruleName) { - return decision.toString(); - } - return `${decision} (${ruleName})`; - } - getConflictingAlts(reportedAlts, configs) { - if (reportedAlts != null) { - return reportedAlts; - } - let result = new BitSet_1.BitSet(); - for (let config of configs) { - result.set(config.alt); - } - return result; - } - }; - __decorate([ - Decorators_1.Override - ], DiagnosticErrorListener.prototype, "syntaxError", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(6, Decorators_1.NotNull) - ], DiagnosticErrorListener.prototype, "reportAmbiguity", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(5, Decorators_1.NotNull) - ], DiagnosticErrorListener.prototype, "reportAttemptingFullContext", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull), - __param(5, Decorators_1.NotNull) - ], DiagnosticErrorListener.prototype, "reportContextSensitivity", null); - __decorate([ - __param(0, Decorators_1.NotNull), - __param(1, Decorators_1.NotNull) - ], DiagnosticErrorListener.prototype, "getDecisionDescription", null); - __decorate([ - Decorators_1.NotNull, - __param(1, Decorators_1.NotNull) - ], DiagnosticErrorListener.prototype, "getConflictingAlts", null); - exports.DiagnosticErrorListener = DiagnosticErrorListener; -}); - -// node_modules/antlr4ts/LexerInterpreter.js -var require_LexerInterpreter = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.LexerInterpreter = void 0; - var Lexer_1 = require_Lexer(); - var LexerATNSimulator_1 = require_LexerATNSimulator(); - var Decorators_1 = require_Decorators(); - var Decorators_2 = require_Decorators(); - var LexerInterpreter = class LexerInterpreter extends Lexer_1.Lexer { - constructor(grammarFileName, vocabulary, ruleNames, channelNames, modeNames, atn, input) { - super(input); - if (atn.grammarType !== 0) { - throw new Error("IllegalArgumentException: The ATN must be a lexer ATN."); - } - this._grammarFileName = grammarFileName; - this._atn = atn; - this._ruleNames = ruleNames.slice(0); - this._channelNames = channelNames.slice(0); - this._modeNames = modeNames.slice(0); - this._vocabulary = vocabulary; - this._interp = new LexerATNSimulator_1.LexerATNSimulator(atn, this); - } - get atn() { - return this._atn; - } - get grammarFileName() { - return this._grammarFileName; - } - get ruleNames() { - return this._ruleNames; - } - get channelNames() { - return this._channelNames; - } - get modeNames() { - return this._modeNames; - } - get vocabulary() { - return this._vocabulary; - } - }; - __decorate([ - Decorators_1.NotNull - ], LexerInterpreter.prototype, "_vocabulary", void 0); - __decorate([ - Decorators_2.Override - ], LexerInterpreter.prototype, "atn", null); - __decorate([ - Decorators_2.Override - ], LexerInterpreter.prototype, "grammarFileName", null); - __decorate([ - Decorators_2.Override - ], LexerInterpreter.prototype, "ruleNames", null); - __decorate([ - Decorators_2.Override - ], LexerInterpreter.prototype, "channelNames", null); - __decorate([ - Decorators_2.Override - ], LexerInterpreter.prototype, "modeNames", null); - __decorate([ - Decorators_2.Override - ], LexerInterpreter.prototype, "vocabulary", null); - LexerInterpreter = __decorate([ - __param(1, Decorators_1.NotNull) - ], LexerInterpreter); - exports.LexerInterpreter = LexerInterpreter; -}); - -// node_modules/antlr4ts/ParserErrorListener.js -var require_ParserErrorListener = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); -}); - -// node_modules/antlr4ts/RuleContextWithAltNum.js -var require_RuleContextWithAltNum = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RuleContextWithAltNum = void 0; - var ATN_1 = require_ATN(); - var Decorators_1 = require_Decorators(); - var ParserRuleContext_1 = require_ParserRuleContext(); - var RuleContextWithAltNum = class extends ParserRuleContext_1.ParserRuleContext { - constructor(parent, invokingStateNumber) { - if (invokingStateNumber !== void 0) { - super(parent, invokingStateNumber); - } else { - super(); - } - this._altNumber = ATN_1.ATN.INVALID_ALT_NUMBER; - } - get altNumber() { - return this._altNumber; - } - set altNumber(altNum) { - this._altNumber = altNum; - } - }; - __decorate([ - Decorators_1.Override - ], RuleContextWithAltNum.prototype, "altNumber", null); - exports.RuleContextWithAltNum = RuleContextWithAltNum; -}); - -// node_modules/antlr4ts/RuleDependency.js -var require_RuleDependency = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RuleDependency = void 0; - function RuleDependency(dependency) { - return (target, propertyKey, propertyDescriptor) => { - }; - } - exports.RuleDependency = RuleDependency; -}); - -// node_modules/antlr4ts/RuleVersion.js -var require_RuleVersion = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RuleVersion = void 0; - function RuleVersion(version) { - return (target, propertyKey, propertyDescriptor) => { - }; - } - exports.RuleVersion = RuleVersion; -}); - -// node_modules/antlr4ts/TokenFactory.js -var require_TokenFactory = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); -}); - -// node_modules/antlr4ts/TokenSource.js -var require_TokenSource = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); -}); - -// node_modules/antlr4ts/TokenStream.js -var require_TokenStream = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); -}); - -// node_modules/antlr4ts/TokenStreamRewriter.js -var require_TokenStreamRewriter = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.RewriteOperation = exports.TokenStreamRewriter = void 0; - var Interval_1 = require_Interval(); - var Decorators_1 = require_Decorators(); - var Token_1 = require_Token(); - var TokenStreamRewriter = class { - constructor(tokens2) { - this.tokens = tokens2; - this.programs = new Map(); - this.programs.set(TokenStreamRewriter.DEFAULT_PROGRAM_NAME, []); - this.lastRewriteTokenIndexes = new Map(); - } - getTokenStream() { - return this.tokens; - } - rollback(instructionIndex, programName = TokenStreamRewriter.DEFAULT_PROGRAM_NAME) { - let is = this.programs.get(programName); - if (is != null) { - this.programs.set(programName, is.slice(TokenStreamRewriter.MIN_TOKEN_INDEX, instructionIndex)); - } - } - deleteProgram(programName = TokenStreamRewriter.DEFAULT_PROGRAM_NAME) { - this.rollback(TokenStreamRewriter.MIN_TOKEN_INDEX, programName); - } - insertAfter(tokenOrIndex, text, programName = TokenStreamRewriter.DEFAULT_PROGRAM_NAME) { - let index; - if (typeof tokenOrIndex === "number") { - index = tokenOrIndex; - } else { - index = tokenOrIndex.tokenIndex; - } - let rewrites = this.getProgram(programName); - let op = new InsertAfterOp(this.tokens, index, rewrites.length, text); - rewrites.push(op); - } - insertBefore(tokenOrIndex, text, programName = TokenStreamRewriter.DEFAULT_PROGRAM_NAME) { - let index; - if (typeof tokenOrIndex === "number") { - index = tokenOrIndex; - } else { - index = tokenOrIndex.tokenIndex; - } - let rewrites = this.getProgram(programName); - let op = new InsertBeforeOp(this.tokens, index, rewrites.length, text); - rewrites.push(op); - } - replaceSingle(index, text) { - if (typeof index === "number") { - this.replace(index, index, text); - } else { - this.replace(index, index, text); - } - } - replace(from, to, text, programName = TokenStreamRewriter.DEFAULT_PROGRAM_NAME) { - if (typeof from !== "number") { - from = from.tokenIndex; - } - if (typeof to !== "number") { - to = to.tokenIndex; - } - if (from > to || from < 0 || to < 0 || to >= this.tokens.size) { - throw new RangeError(`replace: range invalid: ${from}..${to}(size=${this.tokens.size})`); - } - let rewrites = this.getProgram(programName); - let op = new ReplaceOp(this.tokens, from, to, rewrites.length, text); - rewrites.push(op); - } - delete(from, to, programName = TokenStreamRewriter.DEFAULT_PROGRAM_NAME) { - if (to === void 0) { - to = from; - } - if (typeof from === "number") { - this.replace(from, to, "", programName); - } else { - this.replace(from, to, "", programName); - } - } - getLastRewriteTokenIndex(programName = TokenStreamRewriter.DEFAULT_PROGRAM_NAME) { - let I = this.lastRewriteTokenIndexes.get(programName); - if (I == null) { - return -1; - } - return I; - } - setLastRewriteTokenIndex(programName, i) { - this.lastRewriteTokenIndexes.set(programName, i); - } - getProgram(name) { - let is = this.programs.get(name); - if (is == null) { - is = this.initializeProgram(name); - } - return is; - } - initializeProgram(name) { - let is = []; - this.programs.set(name, is); - return is; - } - getText(intervalOrProgram, programName = TokenStreamRewriter.DEFAULT_PROGRAM_NAME) { - let interval; - if (intervalOrProgram instanceof Interval_1.Interval) { - interval = intervalOrProgram; - } else { - interval = Interval_1.Interval.of(0, this.tokens.size - 1); - } - if (typeof intervalOrProgram === "string") { - programName = intervalOrProgram; - } - let rewrites = this.programs.get(programName); - let start = interval.a; - let stop = interval.b; - if (stop > this.tokens.size - 1) { - stop = this.tokens.size - 1; - } - if (start < 0) { - start = 0; - } - if (rewrites == null || rewrites.length === 0) { - return this.tokens.getText(interval); - } - let buf = []; - let indexToOp = this.reduceToSingleOperationPerIndex(rewrites); - let i = start; - while (i <= stop && i < this.tokens.size) { - let op = indexToOp.get(i); - indexToOp.delete(i); - let t = this.tokens.get(i); - if (op == null) { - if (t.type !== Token_1.Token.EOF) { - buf.push(String(t.text)); - } - i++; - } else { - i = op.execute(buf); - } - } - if (stop === this.tokens.size - 1) { - for (let op of indexToOp.values()) { - if (op.index >= this.tokens.size - 1) { - buf.push(op.text.toString()); - } - } - } - return buf.join(""); - } - reduceToSingleOperationPerIndex(rewrites) { - for (let i = 0; i < rewrites.length; i++) { - let op = rewrites[i]; - if (op == null) { - continue; - } - if (!(op instanceof ReplaceOp)) { - continue; - } - let rop = op; - let inserts = this.getKindOfOps(rewrites, InsertBeforeOp, i); - for (let iop of inserts) { - if (iop.index === rop.index) { - rewrites[iop.instructionIndex] = void 0; - rop.text = iop.text.toString() + (rop.text != null ? rop.text.toString() : ""); - } else if (iop.index > rop.index && iop.index <= rop.lastIndex) { - rewrites[iop.instructionIndex] = void 0; - } - } - let prevReplaces = this.getKindOfOps(rewrites, ReplaceOp, i); - for (let prevRop of prevReplaces) { - if (prevRop.index >= rop.index && prevRop.lastIndex <= rop.lastIndex) { - rewrites[prevRop.instructionIndex] = void 0; - continue; - } - let disjoint = prevRop.lastIndex < rop.index || prevRop.index > rop.lastIndex; - if (prevRop.text == null && rop.text == null && !disjoint) { - rewrites[prevRop.instructionIndex] = void 0; - rop.index = Math.min(prevRop.index, rop.index); - rop.lastIndex = Math.max(prevRop.lastIndex, rop.lastIndex); - } else if (!disjoint) { - throw new Error(`replace op boundaries of ${rop} overlap with previous ${prevRop}`); - } - } - } - for (let i = 0; i < rewrites.length; i++) { - let op = rewrites[i]; - if (op == null) { - continue; - } - if (!(op instanceof InsertBeforeOp)) { - continue; - } - let iop = op; - let prevInserts = this.getKindOfOps(rewrites, InsertBeforeOp, i); - for (let prevIop of prevInserts) { - if (prevIop.index === iop.index) { - if (prevIop instanceof InsertAfterOp) { - iop.text = this.catOpText(prevIop.text, iop.text); - rewrites[prevIop.instructionIndex] = void 0; - } else if (prevIop instanceof InsertBeforeOp) { - iop.text = this.catOpText(iop.text, prevIop.text); - rewrites[prevIop.instructionIndex] = void 0; - } - } - } - let prevReplaces = this.getKindOfOps(rewrites, ReplaceOp, i); - for (let rop of prevReplaces) { - if (iop.index === rop.index) { - rop.text = this.catOpText(iop.text, rop.text); - rewrites[i] = void 0; - continue; - } - if (iop.index >= rop.index && iop.index <= rop.lastIndex) { - throw new Error(`insert op ${iop} within boundaries of previous ${rop}`); - } - } - } - let m = new Map(); - for (let op of rewrites) { - if (op == null) { - continue; - } - if (m.get(op.index) != null) { - throw new Error("should only be one op per index"); - } - m.set(op.index, op); - } - return m; - } - catOpText(a, b) { - let x = ""; - let y = ""; - if (a != null) { - x = a.toString(); - } - if (b != null) { - y = b.toString(); - } - return x + y; - } - getKindOfOps(rewrites, kind, before) { - let ops = []; - for (let i = 0; i < before && i < rewrites.length; i++) { - let op = rewrites[i]; - if (op == null) { - continue; - } - if (op instanceof kind) { - ops.push(op); - } - } - return ops; - } - }; - exports.TokenStreamRewriter = TokenStreamRewriter; - TokenStreamRewriter.DEFAULT_PROGRAM_NAME = "default"; - TokenStreamRewriter.PROGRAM_INIT_SIZE = 100; - TokenStreamRewriter.MIN_TOKEN_INDEX = 0; - var RewriteOperation = class { - constructor(tokens2, index, instructionIndex, text) { - this.tokens = tokens2; - this.instructionIndex = instructionIndex; - this.index = index; - this.text = text === void 0 ? "" : text; - } - execute(buf) { - return this.index; - } - toString() { - let opName = this.constructor.name; - let $index = opName.indexOf("$"); - opName = opName.substring($index + 1, opName.length); - return "<" + opName + "@" + this.tokens.get(this.index) + ':"' + this.text + '">'; - } - }; - __decorate([ - Decorators_1.Override - ], RewriteOperation.prototype, "toString", null); - exports.RewriteOperation = RewriteOperation; - var InsertBeforeOp = class extends RewriteOperation { - constructor(tokens2, index, instructionIndex, text) { - super(tokens2, index, instructionIndex, text); - } - execute(buf) { - buf.push(this.text.toString()); - if (this.tokens.get(this.index).type !== Token_1.Token.EOF) { - buf.push(String(this.tokens.get(this.index).text)); - } - return this.index + 1; - } - }; - __decorate([ - Decorators_1.Override - ], InsertBeforeOp.prototype, "execute", null); - var InsertAfterOp = class extends InsertBeforeOp { - constructor(tokens2, index, instructionIndex, text) { - super(tokens2, index + 1, instructionIndex, text); - } - }; - var ReplaceOp = class extends RewriteOperation { - constructor(tokens2, from, to, instructionIndex, text) { - super(tokens2, from, instructionIndex, text); - this.lastIndex = to; - } - execute(buf) { - if (this.text != null) { - buf.push(this.text.toString()); - } - return this.lastIndex + 1; - } - toString() { - if (this.text == null) { - return ""; - } - return "'; - } - }; - __decorate([ - Decorators_1.Override - ], ReplaceOp.prototype, "execute", null); - __decorate([ - Decorators_1.Override - ], ReplaceOp.prototype, "toString", null); -}); - -// node_modules/antlr4ts/Vocabulary.js -var require_Vocabulary = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); -}); - -// node_modules/antlr4ts/WritableToken.js -var require_WritableToken = __commonJS((exports) => { - "use strict"; - Object.defineProperty(exports, "__esModule", {value: true}); -}); - -// node_modules/antlr4ts/index.js -var require_antlr4ts = __commonJS((exports) => { - "use strict"; - var __createBinding = exports && exports.__createBinding || (Object.create ? function(o, m, k, k2) { - if (k2 === void 0) - k2 = k; - Object.defineProperty(o, k2, {enumerable: true, get: function() { - return m[k]; - }}); - } : function(o, m, k, k2) { - if (k2 === void 0) - k2 = k; - o[k2] = m[k]; - }); - var __exportStar = exports && exports.__exportStar || function(m, exports2) { - for (var p in m) - if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports2, p)) - __createBinding(exports2, m, p); - }; - Object.defineProperty(exports, "__esModule", {value: true}); - __exportStar(require_ANTLRErrorListener(), exports); - __exportStar(require_ANTLRErrorStrategy(), exports); - __exportStar(require_ANTLRInputStream(), exports); - __exportStar(require_BailErrorStrategy(), exports); - __exportStar(require_BufferedTokenStream(), exports); - __exportStar(require_CharStream(), exports); - __exportStar(require_CharStreams(), exports); - __exportStar(require_CodePointBuffer(), exports); - __exportStar(require_CodePointCharStream(), exports); - __exportStar(require_CommonToken(), exports); - __exportStar(require_CommonTokenFactory(), exports); - __exportStar(require_CommonTokenStream(), exports); - __exportStar(require_ConsoleErrorListener(), exports); - __exportStar(require_DefaultErrorStrategy(), exports); - __exportStar(require_Dependents(), exports); - __exportStar(require_DiagnosticErrorListener(), exports); - __exportStar(require_FailedPredicateException(), exports); - __exportStar(require_InputMismatchException(), exports); - __exportStar(require_InterpreterRuleContext(), exports); - __exportStar(require_IntStream(), exports); - __exportStar(require_Lexer(), exports); - __exportStar(require_LexerInterpreter(), exports); - __exportStar(require_LexerNoViableAltException(), exports); - __exportStar(require_ListTokenSource(), exports); - __exportStar(require_NoViableAltException(), exports); - __exportStar(require_Parser(), exports); - __exportStar(require_ParserErrorListener(), exports); - __exportStar(require_ParserInterpreter(), exports); - __exportStar(require_ParserRuleContext(), exports); - __exportStar(require_ProxyErrorListener(), exports); - __exportStar(require_ProxyParserErrorListener(), exports); - __exportStar(require_RecognitionException(), exports); - __exportStar(require_Recognizer(), exports); - __exportStar(require_RuleContext(), exports); - __exportStar(require_RuleContextWithAltNum(), exports); - __exportStar(require_RuleDependency(), exports); - __exportStar(require_RuleVersion(), exports); - __exportStar(require_Token(), exports); - __exportStar(require_TokenFactory(), exports); - __exportStar(require_TokenSource(), exports); - __exportStar(require_TokenStream(), exports); - __exportStar(require_TokenStreamRewriter(), exports); - __exportStar(require_Vocabulary(), exports); - __exportStar(require_VocabularyImpl(), exports); - __exportStar(require_WritableToken(), exports); -}); - -// node_modules/antlr4ts/tree/AbstractParseTreeVisitor.js -var require_AbstractParseTreeVisitor = __commonJS((exports) => { - "use strict"; - var __decorate = exports && exports.__decorate || function(decorators, target, key, desc) { - var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; - if (typeof Reflect === "object" && typeof Reflect.decorate === "function") - r = Reflect.decorate(decorators, target, key, desc); - else - for (var i = decorators.length - 1; i >= 0; i--) - if (d = decorators[i]) - r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; - return c > 3 && r && Object.defineProperty(target, key, r), r; - }; - var __param = exports && exports.__param || function(paramIndex, decorator) { - return function(target, key) { - decorator(target, key, paramIndex); - }; - }; - Object.defineProperty(exports, "__esModule", {value: true}); - exports.AbstractParseTreeVisitor = void 0; - var Decorators_1 = require_Decorators(); - var AbstractParseTreeVisitor2 = class { - visit(tree) { - return tree.accept(this); - } - visitChildren(node) { - let result = this.defaultResult(); - let n = node.childCount; - for (let i = 0; i < n; i++) { - if (!this.shouldVisitNextChild(node, result)) { - break; - } - let c = node.getChild(i); - let childResult = c.accept(this); - result = this.aggregateResult(result, childResult); - } - return result; - } - visitTerminal(node) { - return this.defaultResult(); - } - visitErrorNode(node) { - return this.defaultResult(); - } - aggregateResult(aggregate, nextResult) { - return nextResult; - } - shouldVisitNextChild(node, currentResult) { - return true; - } - }; - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], AbstractParseTreeVisitor2.prototype, "visit", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], AbstractParseTreeVisitor2.prototype, "visitChildren", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], AbstractParseTreeVisitor2.prototype, "visitTerminal", null); - __decorate([ - Decorators_1.Override, - __param(0, Decorators_1.NotNull) - ], AbstractParseTreeVisitor2.prototype, "visitErrorNode", null); - __decorate([ - __param(0, Decorators_1.NotNull) - ], AbstractParseTreeVisitor2.prototype, "shouldVisitNextChild", null); - exports.AbstractParseTreeVisitor = AbstractParseTreeVisitor2; -}); - -// node_modules/antlr4/src/antlr4/Utils.js -var require_Utils2 = __commonJS((exports, module2) => { - function arrayToString(a) { - return Array.isArray(a) ? "[" + a.join(", ") + "]" : "null"; - } - String.prototype.seed = String.prototype.seed || Math.round(Math.random() * Math.pow(2, 32)); - String.prototype.hashCode = function() { - const key = this.toString(); - let h1b, k1; - const remainder = key.length & 3; - const bytes = key.length - remainder; - let h1 = String.prototype.seed; - const c1 = 3432918353; - const c2 = 461845907; - let i = 0; - while (i < bytes) { - k1 = key.charCodeAt(i) & 255 | (key.charCodeAt(++i) & 255) << 8 | (key.charCodeAt(++i) & 255) << 16 | (key.charCodeAt(++i) & 255) << 24; - ++i; - k1 = (k1 & 65535) * c1 + (((k1 >>> 16) * c1 & 65535) << 16) & 4294967295; - k1 = k1 << 15 | k1 >>> 17; - k1 = (k1 & 65535) * c2 + (((k1 >>> 16) * c2 & 65535) << 16) & 4294967295; - h1 ^= k1; - h1 = h1 << 13 | h1 >>> 19; - h1b = (h1 & 65535) * 5 + (((h1 >>> 16) * 5 & 65535) << 16) & 4294967295; - h1 = (h1b & 65535) + 27492 + (((h1b >>> 16) + 58964 & 65535) << 16); - } - k1 = 0; - switch (remainder) { - case 3: - k1 ^= (key.charCodeAt(i + 2) & 255) << 16; - case 2: - k1 ^= (key.charCodeAt(i + 1) & 255) << 8; - case 1: - k1 ^= key.charCodeAt(i) & 255; - k1 = (k1 & 65535) * c1 + (((k1 >>> 16) * c1 & 65535) << 16) & 4294967295; - k1 = k1 << 15 | k1 >>> 17; - k1 = (k1 & 65535) * c2 + (((k1 >>> 16) * c2 & 65535) << 16) & 4294967295; - h1 ^= k1; - } - h1 ^= key.length; - h1 ^= h1 >>> 16; - h1 = (h1 & 65535) * 2246822507 + (((h1 >>> 16) * 2246822507 & 65535) << 16) & 4294967295; - h1 ^= h1 >>> 13; - h1 = (h1 & 65535) * 3266489909 + (((h1 >>> 16) * 3266489909 & 65535) << 16) & 4294967295; - h1 ^= h1 >>> 16; - return h1 >>> 0; - }; - function standardEqualsFunction(a, b) { - return a ? a.equals(b) : a == b; - } - function standardHashCodeFunction(a) { - return a ? a.hashCode() : -1; - } - var Set2 = class { - constructor(hashFunction, equalsFunction) { - this.data = {}; - this.hashFunction = hashFunction || standardHashCodeFunction; - this.equalsFunction = equalsFunction || standardEqualsFunction; - } - add(value) { - const hash = this.hashFunction(value); - const key = "hash_" + hash; - if (key in this.data) { - const values = this.data[key]; - for (let i = 0; i < values.length; i++) { - if (this.equalsFunction(value, values[i])) { - return values[i]; - } - } - values.push(value); - return value; - } else { - this.data[key] = [value]; - return value; - } - } - contains(value) { - return this.get(value) != null; - } - get(value) { - const hash = this.hashFunction(value); - const key = "hash_" + hash; - if (key in this.data) { - const values = this.data[key]; - for (let i = 0; i < values.length; i++) { - if (this.equalsFunction(value, values[i])) { - return values[i]; - } - } - } - return null; - } - values() { - let l = []; - for (const key in this.data) { - if (key.indexOf("hash_") === 0) { - l = l.concat(this.data[key]); - } - } - return l; - } - toString() { - return arrayToString(this.values()); - } - get length() { - let l = 0; - for (const key in this.data) { - if (key.indexOf("hash_") === 0) { - l = l + this.data[key].length; - } - } - return l; - } - }; - var BitSet = class { - constructor() { - this.data = []; - } - add(value) { - this.data[value] = true; - } - or(set) { - const bits = this; - Object.keys(set.data).map(function(alt) { - bits.add(alt); - }); - } - remove(value) { - delete this.data[value]; - } - contains(value) { - return this.data[value] === true; - } - values() { - return Object.keys(this.data); - } - minValue() { - return Math.min.apply(null, this.values()); - } - hashCode() { - const hash = new Hash2(); - hash.update(this.values()); - return hash.finish(); - } - equals(other) { - if (!(other instanceof BitSet)) { - return false; - } - return this.hashCode() === other.hashCode(); - } - toString() { - return "{" + this.values().join(", ") + "}"; - } - get length() { - return this.values().length; - } - }; - var Map2 = class { - constructor(hashFunction, equalsFunction) { - this.data = {}; - this.hashFunction = hashFunction || standardHashCodeFunction; - this.equalsFunction = equalsFunction || standardEqualsFunction; - } - put(key, value) { - const hashKey = "hash_" + this.hashFunction(key); - if (hashKey in this.data) { - const entries = this.data[hashKey]; - for (let i = 0; i < entries.length; i++) { - const entry = entries[i]; - if (this.equalsFunction(key, entry.key)) { - const oldValue = entry.value; - entry.value = value; - return oldValue; - } - } - entries.push({key, value}); - return value; - } else { - this.data[hashKey] = [{key, value}]; - return value; - } - } - containsKey(key) { - const hashKey = "hash_" + this.hashFunction(key); - if (hashKey in this.data) { - const entries = this.data[hashKey]; - for (let i = 0; i < entries.length; i++) { - const entry = entries[i]; - if (this.equalsFunction(key, entry.key)) - return true; - } - } - return false; - } - get(key) { - const hashKey = "hash_" + this.hashFunction(key); - if (hashKey in this.data) { - const entries = this.data[hashKey]; - for (let i = 0; i < entries.length; i++) { - const entry = entries[i]; - if (this.equalsFunction(key, entry.key)) - return entry.value; - } - } - return null; - } - entries() { - let l = []; - for (const key in this.data) { - if (key.indexOf("hash_") === 0) { - l = l.concat(this.data[key]); - } - } - return l; - } - getKeys() { - return this.entries().map(function(e) { - return e.key; - }); - } - getValues() { - return this.entries().map(function(e) { - return e.value; - }); - } - toString() { - const ss = this.entries().map(function(entry) { - return "{" + entry.key + ":" + entry.value + "}"; - }); - return "[" + ss.join(", ") + "]"; - } - get length() { - let l = 0; - for (const hashKey in this.data) { - if (hashKey.indexOf("hash_") === 0) { - l = l + this.data[hashKey].length; - } - } - return l; - } - }; - var AltDict = class { - constructor() { - this.data = {}; - } - get(key) { - key = "k-" + key; - if (key in this.data) { - return this.data[key]; - } else { - return null; - } - } - put(key, value) { - key = "k-" + key; - this.data[key] = value; - } - values() { - const data = this.data; - const keys = Object.keys(this.data); - return keys.map(function(key) { - return data[key]; - }); - } - }; - var DoubleDict = class { - constructor(defaultMapCtor) { - this.defaultMapCtor = defaultMapCtor || Map2; - this.cacheMap = new this.defaultMapCtor(); - } - get(a, b) { - const d = this.cacheMap.get(a) || null; - return d === null ? null : d.get(b) || null; - } - set(a, b, o) { - let d = this.cacheMap.get(a) || null; - if (d === null) { - d = new this.defaultMapCtor(); - this.cacheMap.put(a, d); - } - d.put(b, o); - } - }; - var Hash2 = class { - constructor() { - this.count = 0; - this.hash = 0; - } - update() { - for (let i = 0; i < arguments.length; i++) { - const value = arguments[i]; - if (value == null) - continue; - if (Array.isArray(value)) - this.update.apply(this, value); - else { - let k = 0; - switch (typeof value) { - case "undefined": - case "function": - continue; - case "number": - case "boolean": - k = value; - break; - case "string": - k = value.hashCode(); - break; - default: - if (value.updateHashCode) - value.updateHashCode(this); - else - console.log("No updateHashCode for " + value.toString()); - continue; - } - k = k * 3432918353; - k = k << 15 | k >>> 32 - 15; - k = k * 461845907; - this.count = this.count + 1; - let hash = this.hash ^ k; - hash = hash << 13 | hash >>> 32 - 13; - hash = hash * 5 + 3864292196; - this.hash = hash; - } - } - } - finish() { - let hash = this.hash ^ this.count * 4; - hash = hash ^ hash >>> 16; - hash = hash * 2246822507; - hash = hash ^ hash >>> 13; - hash = hash * 3266489909; - hash = hash ^ hash >>> 16; - return hash; - } - }; - function hashStuff() { - const hash = new Hash2(); - hash.update.apply(hash, arguments); - return hash.finish(); - } - function escapeWhitespace(s, escapeSpaces) { - s = s.replace(/\t/g, "\\t").replace(/\n/g, "\\n").replace(/\r/g, "\\r"); - if (escapeSpaces) { - s = s.replace(/ /g, "\xB7"); - } - return s; - } - function titleCase(str) { - return str.replace(/\w\S*/g, function(txt) { - return txt.charAt(0).toUpperCase() + txt.substr(1); - }); - } - function equalArrays(a, b) { - if (!Array.isArray(a) || !Array.isArray(b)) - return false; - if (a === b) - return true; - if (a.length !== b.length) - return false; - for (let i = 0; i < a.length; i++) { - if (a[i] === b[i]) - continue; - if (!a[i].equals || !a[i].equals(b[i])) - return false; - } - return true; - } - module2.exports = { - Hash: Hash2, - Set: Set2, - Map: Map2, - BitSet, - AltDict, - DoubleDict, - hashStuff, - escapeWhitespace, - arrayToString, - titleCase, - equalArrays - }; -}); - -// node_modules/antlr4/src/antlr4/Token.js -var require_Token2 = __commonJS((exports, module2) => { - var Token2 = class { - constructor() { - this.source = null; - this.type = null; - this.channel = null; - this.start = null; - this.stop = null; - this.tokenIndex = null; - this.line = null; - this.column = null; - this._text = null; - } - getTokenSource() { - return this.source[0]; - } - getInputStream() { - return this.source[1]; - } - get text() { - return this._text; - } - set text(text) { - this._text = text; - } - }; - Token2.INVALID_TYPE = 0; - Token2.EPSILON = -2; - Token2.MIN_USER_TOKEN_TYPE = 1; - Token2.EOF = -1; - Token2.DEFAULT_CHANNEL = 0; - Token2.HIDDEN_CHANNEL = 1; - var CommonToken = class extends Token2 { - constructor(source, type, channel, start, stop) { - super(); - this.source = source !== void 0 ? source : CommonToken.EMPTY_SOURCE; - this.type = type !== void 0 ? type : null; - this.channel = channel !== void 0 ? channel : Token2.DEFAULT_CHANNEL; - this.start = start !== void 0 ? start : -1; - this.stop = stop !== void 0 ? stop : -1; - this.tokenIndex = -1; - if (this.source[0] !== null) { - this.line = source[0].line; - this.column = source[0].column; - } else { - this.column = -1; - } - } - clone() { - const t = new CommonToken(this.source, this.type, this.channel, this.start, this.stop); - t.tokenIndex = this.tokenIndex; - t.line = this.line; - t.column = this.column; - t.text = this.text; - return t; - } - toString() { - let txt = this.text; - if (txt !== null) { - txt = txt.replace(/\n/g, "\\n").replace(/\r/g, "\\r").replace(/\t/g, "\\t"); - } else { - txt = ""; - } - return "[@" + this.tokenIndex + "," + this.start + ":" + this.stop + "='" + txt + "',<" + this.type + ">" + (this.channel > 0 ? ",channel=" + this.channel : "") + "," + this.line + ":" + this.column + "]"; - } - get text() { - if (this._text !== null) { - return this._text; - } - const input = this.getInputStream(); - if (input === null) { - return null; - } - const n = input.size; - if (this.start < n && this.stop < n) { - return input.getText(this.start, this.stop); - } else { - return ""; - } - } - set text(text) { - this._text = text; - } - }; - CommonToken.EMPTY_SOURCE = [null, null]; - module2.exports = { - Token: Token2, - CommonToken - }; -}); - -// node_modules/antlr4/src/antlr4/atn/ATNState.js -var require_ATNState2 = __commonJS((exports, module2) => { - var ATNState = class { - constructor() { - this.atn = null; - this.stateNumber = ATNState.INVALID_STATE_NUMBER; - this.stateType = null; - this.ruleIndex = 0; - this.epsilonOnlyTransitions = false; - this.transitions = []; - this.nextTokenWithinRule = null; - } - toString() { - return this.stateNumber; - } - equals(other) { - if (other instanceof ATNState) { - return this.stateNumber === other.stateNumber; - } else { - return false; - } - } - isNonGreedyExitState() { - return false; - } - addTransition(trans, index) { - if (index === void 0) { - index = -1; - } - if (this.transitions.length === 0) { - this.epsilonOnlyTransitions = trans.isEpsilon; - } else if (this.epsilonOnlyTransitions !== trans.isEpsilon) { - this.epsilonOnlyTransitions = false; - } - if (index === -1) { - this.transitions.push(trans); - } else { - this.transitions.splice(index, 1, trans); - } - } - }; - ATNState.INVALID_TYPE = 0; - ATNState.BASIC = 1; - ATNState.RULE_START = 2; - ATNState.BLOCK_START = 3; - ATNState.PLUS_BLOCK_START = 4; - ATNState.STAR_BLOCK_START = 5; - ATNState.TOKEN_START = 6; - ATNState.RULE_STOP = 7; - ATNState.BLOCK_END = 8; - ATNState.STAR_LOOP_BACK = 9; - ATNState.STAR_LOOP_ENTRY = 10; - ATNState.PLUS_LOOP_BACK = 11; - ATNState.LOOP_END = 12; - ATNState.serializationNames = [ - "INVALID", - "BASIC", - "RULE_START", - "BLOCK_START", - "PLUS_BLOCK_START", - "STAR_BLOCK_START", - "TOKEN_START", - "RULE_STOP", - "BLOCK_END", - "STAR_LOOP_BACK", - "STAR_LOOP_ENTRY", - "PLUS_LOOP_BACK", - "LOOP_END" - ]; - ATNState.INVALID_STATE_NUMBER = -1; - var BasicState = class extends ATNState { - constructor() { - super(); - this.stateType = ATNState.BASIC; - } - }; - var DecisionState = class extends ATNState { - constructor() { - super(); - this.decision = -1; - this.nonGreedy = false; - return this; - } - }; - var BlockStartState = class extends DecisionState { - constructor() { - super(); - this.endState = null; - return this; - } - }; - var BasicBlockStartState = class extends BlockStartState { - constructor() { - super(); - this.stateType = ATNState.BLOCK_START; - return this; - } - }; - var BlockEndState = class extends ATNState { - constructor() { - super(); - this.stateType = ATNState.BLOCK_END; - this.startState = null; - return this; - } - }; - var RuleStopState = class extends ATNState { - constructor() { - super(); - this.stateType = ATNState.RULE_STOP; - return this; - } - }; - var RuleStartState = class extends ATNState { - constructor() { - super(); - this.stateType = ATNState.RULE_START; - this.stopState = null; - this.isPrecedenceRule = false; - return this; - } - }; - var PlusLoopbackState = class extends DecisionState { - constructor() { - super(); - this.stateType = ATNState.PLUS_LOOP_BACK; - return this; - } - }; - var PlusBlockStartState = class extends BlockStartState { - constructor() { - super(); - this.stateType = ATNState.PLUS_BLOCK_START; - this.loopBackState = null; - return this; - } - }; - var StarBlockStartState = class extends BlockStartState { - constructor() { - super(); - this.stateType = ATNState.STAR_BLOCK_START; - return this; - } - }; - var StarLoopbackState = class extends ATNState { - constructor() { - super(); - this.stateType = ATNState.STAR_LOOP_BACK; - return this; - } - }; - var StarLoopEntryState = class extends DecisionState { - constructor() { - super(); - this.stateType = ATNState.STAR_LOOP_ENTRY; - this.loopBackState = null; - this.isPrecedenceDecision = null; - return this; - } - }; - var LoopEndState = class extends ATNState { - constructor() { - super(); - this.stateType = ATNState.LOOP_END; - this.loopBackState = null; - return this; - } - }; - var TokensStartState = class extends DecisionState { - constructor() { - super(); - this.stateType = ATNState.TOKEN_START; - return this; - } - }; - module2.exports = { - ATNState, - BasicState, - DecisionState, - BlockStartState, - BlockEndState, - LoopEndState, - RuleStartState, - RuleStopState, - TokensStartState, - PlusLoopbackState, - StarLoopbackState, - StarLoopEntryState, - PlusBlockStartState, - StarBlockStartState, - BasicBlockStartState - }; -}); - -// node_modules/antlr4/src/antlr4/atn/SemanticContext.js -var require_SemanticContext2 = __commonJS((exports, module2) => { - var {Set: Set2, Hash: Hash2, equalArrays} = require_Utils2(); - var SemanticContext = class { - hashCode() { - const hash = new Hash2(); - this.updateHashCode(hash); - return hash.finish(); - } - evaluate(parser, outerContext) { - } - evalPrecedence(parser, outerContext) { - return this; - } - static andContext(a, b) { - if (a === null || a === SemanticContext.NONE) { - return b; - } - if (b === null || b === SemanticContext.NONE) { - return a; - } - const result = new AND(a, b); - if (result.opnds.length === 1) { - return result.opnds[0]; - } else { - return result; - } - } - static orContext(a, b) { - if (a === null) { - return b; - } - if (b === null) { - return a; - } - if (a === SemanticContext.NONE || b === SemanticContext.NONE) { - return SemanticContext.NONE; - } - const result = new OR(a, b); - if (result.opnds.length === 1) { - return result.opnds[0]; - } else { - return result; - } - } - }; - var Predicate = class extends SemanticContext { - constructor(ruleIndex, predIndex, isCtxDependent) { - super(); - this.ruleIndex = ruleIndex === void 0 ? -1 : ruleIndex; - this.predIndex = predIndex === void 0 ? -1 : predIndex; - this.isCtxDependent = isCtxDependent === void 0 ? false : isCtxDependent; - } - evaluate(parser, outerContext) { - const localctx = this.isCtxDependent ? outerContext : null; - return parser.sempred(localctx, this.ruleIndex, this.predIndex); - } - updateHashCode(hash) { - hash.update(this.ruleIndex, this.predIndex, this.isCtxDependent); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof Predicate)) { - return false; - } else { - return this.ruleIndex === other.ruleIndex && this.predIndex === other.predIndex && this.isCtxDependent === other.isCtxDependent; - } - } - toString() { - return "{" + this.ruleIndex + ":" + this.predIndex + "}?"; - } - }; - SemanticContext.NONE = new Predicate(); - var PrecedencePredicate = class extends SemanticContext { - constructor(precedence) { - super(); - this.precedence = precedence === void 0 ? 0 : precedence; - } - evaluate(parser, outerContext) { - return parser.precpred(outerContext, this.precedence); - } - evalPrecedence(parser, outerContext) { - if (parser.precpred(outerContext, this.precedence)) { - return SemanticContext.NONE; - } else { - return null; - } - } - compareTo(other) { - return this.precedence - other.precedence; - } - updateHashCode(hash) { - hash.update(this.precedence); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof PrecedencePredicate)) { - return false; - } else { - return this.precedence === other.precedence; - } - } - toString() { - return "{" + this.precedence + ">=prec}?"; - } - static filterPrecedencePredicates(set) { - const result = []; - set.values().map(function(context) { - if (context instanceof PrecedencePredicate) { - result.push(context); - } - }); - return result; - } - }; - var AND = class extends SemanticContext { - constructor(a, b) { - super(); - const operands = new Set2(); - if (a instanceof AND) { - a.opnds.map(function(o) { - operands.add(o); - }); - } else { - operands.add(a); - } - if (b instanceof AND) { - b.opnds.map(function(o) { - operands.add(o); - }); - } else { - operands.add(b); - } - const precedencePredicates = PrecedencePredicate.filterPrecedencePredicates(operands); - if (precedencePredicates.length > 0) { - let reduced = null; - precedencePredicates.map(function(p) { - if (reduced === null || p.precedence < reduced.precedence) { - reduced = p; - } - }); - operands.add(reduced); - } - this.opnds = Array.from(operands.values()); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof AND)) { - return false; - } else { - return equalArrays(this.opnds, other.opnds); - } - } - updateHashCode(hash) { - hash.update(this.opnds, "AND"); - } - evaluate(parser, outerContext) { - for (let i = 0; i < this.opnds.length; i++) { - if (!this.opnds[i].evaluate(parser, outerContext)) { - return false; - } - } - return true; - } - evalPrecedence(parser, outerContext) { - let differs = false; - const operands = []; - for (let i = 0; i < this.opnds.length; i++) { - const context = this.opnds[i]; - const evaluated = context.evalPrecedence(parser, outerContext); - differs |= evaluated !== context; - if (evaluated === null) { - return null; - } else if (evaluated !== SemanticContext.NONE) { - operands.push(evaluated); - } - } - if (!differs) { - return this; - } - if (operands.length === 0) { - return SemanticContext.NONE; - } - let result = null; - operands.map(function(o) { - result = result === null ? o : SemanticContext.andContext(result, o); - }); - return result; - } - toString() { - const s = this.opnds.map((o) => o.toString()); - return (s.length > 3 ? s.slice(3) : s).join("&&"); - } - }; - var OR = class extends SemanticContext { - constructor(a, b) { - super(); - const operands = new Set2(); - if (a instanceof OR) { - a.opnds.map(function(o) { - operands.add(o); - }); - } else { - operands.add(a); - } - if (b instanceof OR) { - b.opnds.map(function(o) { - operands.add(o); - }); - } else { - operands.add(b); - } - const precedencePredicates = PrecedencePredicate.filterPrecedencePredicates(operands); - if (precedencePredicates.length > 0) { - const s = precedencePredicates.sort(function(a2, b2) { - return a2.compareTo(b2); - }); - const reduced = s[s.length - 1]; - operands.add(reduced); - } - this.opnds = Array.from(operands.values()); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof OR)) { - return false; - } else { - return equalArrays(this.opnds, other.opnds); - } - } - updateHashCode(hash) { - hash.update(this.opnds, "OR"); - } - evaluate(parser, outerContext) { - for (let i = 0; i < this.opnds.length; i++) { - if (this.opnds[i].evaluate(parser, outerContext)) { - return true; - } - } - return false; - } - evalPrecedence(parser, outerContext) { - let differs = false; - const operands = []; - for (let i = 0; i < this.opnds.length; i++) { - const context = this.opnds[i]; - const evaluated = context.evalPrecedence(parser, outerContext); - differs |= evaluated !== context; - if (evaluated === SemanticContext.NONE) { - return SemanticContext.NONE; - } else if (evaluated !== null) { - operands.push(evaluated); - } - } - if (!differs) { - return this; - } - if (operands.length === 0) { - return null; - } - const result = null; - operands.map(function(o) { - return result === null ? o : SemanticContext.orContext(result, o); - }); - return result; - } - toString() { - const s = this.opnds.map((o) => o.toString()); - return (s.length > 3 ? s.slice(3) : s).join("||"); - } - }; - module2.exports = { - SemanticContext, - PrecedencePredicate, - Predicate - }; -}); - -// node_modules/antlr4/src/antlr4/atn/ATNConfig.js -var require_ATNConfig2 = __commonJS((exports, module2) => { - var {DecisionState} = require_ATNState2(); - var {SemanticContext} = require_SemanticContext2(); - var {Hash: Hash2} = require_Utils2(); - function checkParams(params, isCfg) { - if (params === null) { - const result = {state: null, alt: null, context: null, semanticContext: null}; - if (isCfg) { - result.reachesIntoOuterContext = 0; - } - return result; - } else { - const props = {}; - props.state = params.state || null; - props.alt = params.alt === void 0 ? null : params.alt; - props.context = params.context || null; - props.semanticContext = params.semanticContext || null; - if (isCfg) { - props.reachesIntoOuterContext = params.reachesIntoOuterContext || 0; - props.precedenceFilterSuppressed = params.precedenceFilterSuppressed || false; - } - return props; - } - } - var ATNConfig = class { - constructor(params, config) { - this.checkContext(params, config); - params = checkParams(params); - config = checkParams(config, true); - this.state = params.state !== null ? params.state : config.state; - this.alt = params.alt !== null ? params.alt : config.alt; - this.context = params.context !== null ? params.context : config.context; - this.semanticContext = params.semanticContext !== null ? params.semanticContext : config.semanticContext !== null ? config.semanticContext : SemanticContext.NONE; - this.reachesIntoOuterContext = config.reachesIntoOuterContext; - this.precedenceFilterSuppressed = config.precedenceFilterSuppressed; - } - checkContext(params, config) { - if ((params.context === null || params.context === void 0) && (config === null || config.context === null || config.context === void 0)) { - this.context = null; - } - } - hashCode() { - const hash = new Hash2(); - this.updateHashCode(hash); - return hash.finish(); - } - updateHashCode(hash) { - hash.update(this.state.stateNumber, this.alt, this.context, this.semanticContext); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof ATNConfig)) { - return false; - } else { - return this.state.stateNumber === other.state.stateNumber && this.alt === other.alt && (this.context === null ? other.context === null : this.context.equals(other.context)) && this.semanticContext.equals(other.semanticContext) && this.precedenceFilterSuppressed === other.precedenceFilterSuppressed; - } - } - hashCodeForConfigSet() { - const hash = new Hash2(); - hash.update(this.state.stateNumber, this.alt, this.semanticContext); - return hash.finish(); - } - equalsForConfigSet(other) { - if (this === other) { - return true; - } else if (!(other instanceof ATNConfig)) { - return false; - } else { - return this.state.stateNumber === other.state.stateNumber && this.alt === other.alt && this.semanticContext.equals(other.semanticContext); - } - } - toString() { - return "(" + this.state + "," + this.alt + (this.context !== null ? ",[" + this.context.toString() + "]" : "") + (this.semanticContext !== SemanticContext.NONE ? "," + this.semanticContext.toString() : "") + (this.reachesIntoOuterContext > 0 ? ",up=" + this.reachesIntoOuterContext : "") + ")"; - } - }; - var LexerATNConfig = class extends ATNConfig { - constructor(params, config) { - super(params, config); - const lexerActionExecutor = params.lexerActionExecutor || null; - this.lexerActionExecutor = lexerActionExecutor || (config !== null ? config.lexerActionExecutor : null); - this.passedThroughNonGreedyDecision = config !== null ? this.checkNonGreedyDecision(config, this.state) : false; - this.hashCodeForConfigSet = LexerATNConfig.prototype.hashCode; - this.equalsForConfigSet = LexerATNConfig.prototype.equals; - return this; - } - updateHashCode(hash) { - hash.update(this.state.stateNumber, this.alt, this.context, this.semanticContext, this.passedThroughNonGreedyDecision, this.lexerActionExecutor); - } - equals(other) { - return this === other || other instanceof LexerATNConfig && this.passedThroughNonGreedyDecision === other.passedThroughNonGreedyDecision && (this.lexerActionExecutor ? this.lexerActionExecutor.equals(other.lexerActionExecutor) : !other.lexerActionExecutor) && super.equals(other); - } - checkNonGreedyDecision(source, target) { - return source.passedThroughNonGreedyDecision || target instanceof DecisionState && target.nonGreedy; - } - }; - module2.exports.ATNConfig = ATNConfig; - module2.exports.LexerATNConfig = LexerATNConfig; -}); - -// node_modules/antlr4/src/antlr4/IntervalSet.js -var require_IntervalSet2 = __commonJS((exports, module2) => { - var {Token: Token2} = require_Token2(); - var Interval = class { - constructor(start, stop) { - this.start = start; - this.stop = stop; - } - contains(item) { - return item >= this.start && item < this.stop; - } - toString() { - if (this.start === this.stop - 1) { - return this.start.toString(); - } else { - return this.start.toString() + ".." + (this.stop - 1).toString(); - } - } - get length() { - return this.stop - this.start; - } - }; - var IntervalSet = class { - constructor() { - this.intervals = null; - this.readOnly = false; - } - first(v) { - if (this.intervals === null || this.intervals.length === 0) { - return Token2.INVALID_TYPE; - } else { - return this.intervals[0].start; - } - } - addOne(v) { - this.addInterval(new Interval(v, v + 1)); - } - addRange(l, h) { - this.addInterval(new Interval(l, h + 1)); - } - addInterval(toAdd) { - if (this.intervals === null) { - this.intervals = []; - this.intervals.push(toAdd); - } else { - for (let pos = 0; pos < this.intervals.length; pos++) { - const existing = this.intervals[pos]; - if (toAdd.stop < existing.start) { - this.intervals.splice(pos, 0, toAdd); - return; - } else if (toAdd.stop === existing.start) { - this.intervals[pos].start = toAdd.start; - return; - } else if (toAdd.start <= existing.stop) { - this.intervals[pos] = new Interval(Math.min(existing.start, toAdd.start), Math.max(existing.stop, toAdd.stop)); - this.reduce(pos); - return; - } - } - this.intervals.push(toAdd); - } - } - addSet(other) { - if (other.intervals !== null) { - other.intervals.forEach((toAdd) => this.addInterval(toAdd), this); - } - return this; - } - reduce(pos) { - if (pos < this.intervals.length - 1) { - const current = this.intervals[pos]; - const next = this.intervals[pos + 1]; - if (current.stop >= next.stop) { - this.intervals.splice(pos + 1, 1); - this.reduce(pos); - } else if (current.stop >= next.start) { - this.intervals[pos] = new Interval(current.start, next.stop); - this.intervals.splice(pos + 1, 1); - } - } - } - complement(start, stop) { - const result = new IntervalSet(); - result.addInterval(new Interval(start, stop + 1)); - if (this.intervals !== null) - this.intervals.forEach((toRemove) => result.removeRange(toRemove)); - return result; - } - contains(item) { - if (this.intervals === null) { - return false; - } else { - for (let k = 0; k < this.intervals.length; k++) { - if (this.intervals[k].contains(item)) { - return true; - } - } - return false; - } - } - removeRange(toRemove) { - if (toRemove.start === toRemove.stop - 1) { - this.removeOne(toRemove.start); - } else if (this.intervals !== null) { - let pos = 0; - for (let n = 0; n < this.intervals.length; n++) { - const existing = this.intervals[pos]; - if (toRemove.stop <= existing.start) { - return; - } else if (toRemove.start > existing.start && toRemove.stop < existing.stop) { - this.intervals[pos] = new Interval(existing.start, toRemove.start); - const x = new Interval(toRemove.stop, existing.stop); - this.intervals.splice(pos, 0, x); - return; - } else if (toRemove.start <= existing.start && toRemove.stop >= existing.stop) { - this.intervals.splice(pos, 1); - pos = pos - 1; - } else if (toRemove.start < existing.stop) { - this.intervals[pos] = new Interval(existing.start, toRemove.start); - } else if (toRemove.stop < existing.stop) { - this.intervals[pos] = new Interval(toRemove.stop, existing.stop); - } - pos += 1; - } - } - } - removeOne(value) { - if (this.intervals !== null) { - for (let i = 0; i < this.intervals.length; i++) { - const existing = this.intervals[i]; - if (value < existing.start) { - return; - } else if (value === existing.start && value === existing.stop - 1) { - this.intervals.splice(i, 1); - return; - } else if (value === existing.start) { - this.intervals[i] = new Interval(existing.start + 1, existing.stop); - return; - } else if (value === existing.stop - 1) { - this.intervals[i] = new Interval(existing.start, existing.stop - 1); - return; - } else if (value < existing.stop - 1) { - const replace = new Interval(existing.start, value); - existing.start = value + 1; - this.intervals.splice(i, 0, replace); - return; - } - } - } - } - toString(literalNames, symbolicNames, elemsAreChar) { - literalNames = literalNames || null; - symbolicNames = symbolicNames || null; - elemsAreChar = elemsAreChar || false; - if (this.intervals === null) { - return "{}"; - } else if (literalNames !== null || symbolicNames !== null) { - return this.toTokenString(literalNames, symbolicNames); - } else if (elemsAreChar) { - return this.toCharString(); - } else { - return this.toIndexString(); - } - } - toCharString() { - const names = []; - for (let i = 0; i < this.intervals.length; i++) { - const existing = this.intervals[i]; - if (existing.stop === existing.start + 1) { - if (existing.start === Token2.EOF) { - names.push(""); - } else { - names.push("'" + String.fromCharCode(existing.start) + "'"); - } - } else { - names.push("'" + String.fromCharCode(existing.start) + "'..'" + String.fromCharCode(existing.stop - 1) + "'"); - } - } - if (names.length > 1) { - return "{" + names.join(", ") + "}"; - } else { - return names[0]; - } - } - toIndexString() { - const names = []; - for (let i = 0; i < this.intervals.length; i++) { - const existing = this.intervals[i]; - if (existing.stop === existing.start + 1) { - if (existing.start === Token2.EOF) { - names.push(""); - } else { - names.push(existing.start.toString()); - } - } else { - names.push(existing.start.toString() + ".." + (existing.stop - 1).toString()); - } - } - if (names.length > 1) { - return "{" + names.join(", ") + "}"; - } else { - return names[0]; - } - } - toTokenString(literalNames, symbolicNames) { - const names = []; - for (let i = 0; i < this.intervals.length; i++) { - const existing = this.intervals[i]; - for (let j = existing.start; j < existing.stop; j++) { - names.push(this.elementName(literalNames, symbolicNames, j)); - } - } - if (names.length > 1) { - return "{" + names.join(", ") + "}"; - } else { - return names[0]; - } - } - elementName(literalNames, symbolicNames, token) { - if (token === Token2.EOF) { - return ""; - } else if (token === Token2.EPSILON) { - return ""; - } else { - return literalNames[token] || symbolicNames[token]; - } - } - get length() { - return this.intervals.map((interval) => interval.length).reduce((acc, val) => acc + val); - } - }; - module2.exports = { - Interval, - IntervalSet - }; -}); - -// node_modules/antlr4/src/antlr4/atn/Transition.js -var require_Transition2 = __commonJS((exports, module2) => { - var {Token: Token2} = require_Token2(); - var {IntervalSet} = require_IntervalSet2(); - var {Predicate, PrecedencePredicate} = require_SemanticContext2(); - var Transition = class { - constructor(target) { - if (target === void 0 || target === null) { - throw "target cannot be null."; - } - this.target = target; - this.isEpsilon = false; - this.label = null; - } - }; - Transition.EPSILON = 1; - Transition.RANGE = 2; - Transition.RULE = 3; - Transition.PREDICATE = 4; - Transition.ATOM = 5; - Transition.ACTION = 6; - Transition.SET = 7; - Transition.NOT_SET = 8; - Transition.WILDCARD = 9; - Transition.PRECEDENCE = 10; - Transition.serializationNames = [ - "INVALID", - "EPSILON", - "RANGE", - "RULE", - "PREDICATE", - "ATOM", - "ACTION", - "SET", - "NOT_SET", - "WILDCARD", - "PRECEDENCE" - ]; - Transition.serializationTypes = { - EpsilonTransition: Transition.EPSILON, - RangeTransition: Transition.RANGE, - RuleTransition: Transition.RULE, - PredicateTransition: Transition.PREDICATE, - AtomTransition: Transition.ATOM, - ActionTransition: Transition.ACTION, - SetTransition: Transition.SET, - NotSetTransition: Transition.NOT_SET, - WildcardTransition: Transition.WILDCARD, - PrecedencePredicateTransition: Transition.PRECEDENCE - }; - var AtomTransition2 = class extends Transition { - constructor(target, label) { - super(target); - this.label_ = label; - this.label = this.makeLabel(); - this.serializationType = Transition.ATOM; - } - makeLabel() { - const s = new IntervalSet(); - s.addOne(this.label_); - return s; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return this.label_ === symbol; - } - toString() { - return this.label_; - } - }; - var RuleTransition = class extends Transition { - constructor(ruleStart, ruleIndex, precedence, followState) { - super(ruleStart); - this.ruleIndex = ruleIndex; - this.precedence = precedence; - this.followState = followState; - this.serializationType = Transition.RULE; - this.isEpsilon = true; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return false; - } - }; - var EpsilonTransition = class extends Transition { - constructor(target, outermostPrecedenceReturn) { - super(target); - this.serializationType = Transition.EPSILON; - this.isEpsilon = true; - this.outermostPrecedenceReturn = outermostPrecedenceReturn; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return false; - } - toString() { - return "epsilon"; - } - }; - var RangeTransition = class extends Transition { - constructor(target, start, stop) { - super(target); - this.serializationType = Transition.RANGE; - this.start = start; - this.stop = stop; - this.label = this.makeLabel(); - } - makeLabel() { - const s = new IntervalSet(); - s.addRange(this.start, this.stop); - return s; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return symbol >= this.start && symbol <= this.stop; - } - toString() { - return "'" + String.fromCharCode(this.start) + "'..'" + String.fromCharCode(this.stop) + "'"; - } - }; - var AbstractPredicateTransition = class extends Transition { - constructor(target) { - super(target); - } - }; - var PredicateTransition = class extends AbstractPredicateTransition { - constructor(target, ruleIndex, predIndex, isCtxDependent) { - super(target); - this.serializationType = Transition.PREDICATE; - this.ruleIndex = ruleIndex; - this.predIndex = predIndex; - this.isCtxDependent = isCtxDependent; - this.isEpsilon = true; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return false; - } - getPredicate() { - return new Predicate(this.ruleIndex, this.predIndex, this.isCtxDependent); - } - toString() { - return "pred_" + this.ruleIndex + ":" + this.predIndex; - } - }; - var ActionTransition = class extends Transition { - constructor(target, ruleIndex, actionIndex, isCtxDependent) { - super(target); - this.serializationType = Transition.ACTION; - this.ruleIndex = ruleIndex; - this.actionIndex = actionIndex === void 0 ? -1 : actionIndex; - this.isCtxDependent = isCtxDependent === void 0 ? false : isCtxDependent; - this.isEpsilon = true; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return false; - } - toString() { - return "action_" + this.ruleIndex + ":" + this.actionIndex; - } - }; - var SetTransition = class extends Transition { - constructor(target, set) { - super(target); - this.serializationType = Transition.SET; - if (set !== void 0 && set !== null) { - this.label = set; - } else { - this.label = new IntervalSet(); - this.label.addOne(Token2.INVALID_TYPE); - } - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return this.label.contains(symbol); - } - toString() { - return this.label.toString(); - } - }; - var NotSetTransition = class extends SetTransition { - constructor(target, set) { - super(target, set); - this.serializationType = Transition.NOT_SET; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return symbol >= minVocabSymbol && symbol <= maxVocabSymbol && !super.matches(symbol, minVocabSymbol, maxVocabSymbol); - } - toString() { - return "~" + super.toString(); - } - }; - var WildcardTransition = class extends Transition { - constructor(target) { - super(target); - this.serializationType = Transition.WILDCARD; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return symbol >= minVocabSymbol && symbol <= maxVocabSymbol; - } - toString() { - return "."; - } - }; - var PrecedencePredicateTransition = class extends AbstractPredicateTransition { - constructor(target, precedence) { - super(target); - this.serializationType = Transition.PRECEDENCE; - this.precedence = precedence; - this.isEpsilon = true; - } - matches(symbol, minVocabSymbol, maxVocabSymbol) { - return false; - } - getPredicate() { - return new PrecedencePredicate(this.precedence); - } - toString() { - return this.precedence + " >= _p"; - } - }; - module2.exports = { - Transition, - AtomTransition: AtomTransition2, - SetTransition, - NotSetTransition, - RuleTransition, - ActionTransition, - EpsilonTransition, - RangeTransition, - WildcardTransition, - PredicateTransition, - PrecedencePredicateTransition, - AbstractPredicateTransition - }; -}); - -// node_modules/antlr4/src/antlr4/tree/Tree.js -var require_Tree = __commonJS((exports, module2) => { - var {Token: Token2} = require_Token2(); - var {Interval} = require_IntervalSet2(); - var INVALID_INTERVAL = new Interval(-1, -2); - var Tree = class { - }; - var SyntaxTree = class extends Tree { - constructor() { - super(); - } - }; - var ParseTree = class extends SyntaxTree { - constructor() { - super(); - } - }; - var RuleNode = class extends ParseTree { - constructor() { - super(); - } - getRuleContext() { - throw new Error("missing interface implementation"); - } - }; - var TerminalNode = class extends ParseTree { - constructor() { - super(); - } - }; - var ErrorNode2 = class extends TerminalNode { - constructor() { - super(); - } - }; - var ParseTreeVisitor = class { - visit(ctx) { - if (Array.isArray(ctx)) { - return ctx.map(function(child) { - return child.accept(this); - }, this); - } else { - return ctx.accept(this); - } - } - visitChildren(ctx) { - if (ctx.children) { - return this.visit(ctx.children); - } else { - return null; - } - } - visitTerminal(node) { - } - visitErrorNode(node) { - } - }; - var ParseTreeListener = class { - visitTerminal(node) { - } - visitErrorNode(node) { - } - enterEveryRule(node) { - } - exitEveryRule(node) { - } - }; - var TerminalNodeImpl = class extends TerminalNode { - constructor(symbol) { - super(); - this.parentCtx = null; - this.symbol = symbol; - } - getChild(i) { - return null; - } - getSymbol() { - return this.symbol; - } - getParent() { - return this.parentCtx; - } - getPayload() { - return this.symbol; - } - getSourceInterval() { - if (this.symbol === null) { - return INVALID_INTERVAL; - } - const tokenIndex = this.symbol.tokenIndex; - return new Interval(tokenIndex, tokenIndex); - } - getChildCount() { - return 0; - } - accept(visitor) { - return visitor.visitTerminal(this); - } - getText() { - return this.symbol.text; - } - toString() { - if (this.symbol.type === Token2.EOF) { - return ""; - } else { - return this.symbol.text; - } - } - }; - var ErrorNodeImpl = class extends TerminalNodeImpl { - constructor(token) { - super(token); - } - isErrorNode() { - return true; - } - accept(visitor) { - return visitor.visitErrorNode(this); - } - }; - var ParseTreeWalker = class { - walk(listener, t) { - const errorNode = t instanceof ErrorNode2 || t.isErrorNode !== void 0 && t.isErrorNode(); - if (errorNode) { - listener.visitErrorNode(t); - } else if (t instanceof TerminalNode) { - listener.visitTerminal(t); - } else { - this.enterRule(listener, t); - for (let i = 0; i < t.getChildCount(); i++) { - const child = t.getChild(i); - this.walk(listener, child); - } - this.exitRule(listener, t); - } - } - enterRule(listener, r) { - const ctx = r.getRuleContext(); - listener.enterEveryRule(ctx); - ctx.enterRule(listener); - } - exitRule(listener, r) { - const ctx = r.getRuleContext(); - ctx.exitRule(listener); - listener.exitEveryRule(ctx); - } - }; - ParseTreeWalker.DEFAULT = new ParseTreeWalker(); - module2.exports = { - RuleNode, - ErrorNode: ErrorNode2, - TerminalNode, - ErrorNodeImpl, - TerminalNodeImpl, - ParseTreeListener, - ParseTreeVisitor, - ParseTreeWalker, - INVALID_INTERVAL - }; -}); - -// node_modules/antlr4/src/antlr4/tree/Trees.js -var require_Trees2 = __commonJS((exports, module2) => { - var Utils3 = require_Utils2(); - var {Token: Token2} = require_Token2(); - var {ErrorNode: ErrorNode2, TerminalNode, RuleNode} = require_Tree(); - var Trees = { - toStringTree: function(tree, ruleNames, recog) { - ruleNames = ruleNames || null; - recog = recog || null; - if (recog !== null) { - ruleNames = recog.ruleNames; - } - let s = Trees.getNodeText(tree, ruleNames); - s = Utils3.escapeWhitespace(s, false); - const c = tree.getChildCount(); - if (c === 0) { - return s; - } - let res = "(" + s + " "; - if (c > 0) { - s = Trees.toStringTree(tree.getChild(0), ruleNames); - res = res.concat(s); - } - for (let i = 1; i < c; i++) { - s = Trees.toStringTree(tree.getChild(i), ruleNames); - res = res.concat(" " + s); - } - res = res.concat(")"); - return res; - }, - getNodeText: function(t, ruleNames, recog) { - ruleNames = ruleNames || null; - recog = recog || null; - if (recog !== null) { - ruleNames = recog.ruleNames; - } - if (ruleNames !== null) { - if (t instanceof RuleNode) { - const context = t.getRuleContext(); - const altNumber = context.getAltNumber(); - if (altNumber != 0) { - return ruleNames[t.ruleIndex] + ":" + altNumber; - } - return ruleNames[t.ruleIndex]; - } else if (t instanceof ErrorNode2) { - return t.toString(); - } else if (t instanceof TerminalNode) { - if (t.symbol !== null) { - return t.symbol.text; - } - } - } - const payload = t.getPayload(); - if (payload instanceof Token2) { - return payload.text; - } - return t.getPayload().toString(); - }, - getChildren: function(t) { - const list = []; - for (let i = 0; i < t.getChildCount(); i++) { - list.push(t.getChild(i)); - } - return list; - }, - getAncestors: function(t) { - let ancestors = []; - t = t.getParent(); - while (t !== null) { - ancestors = [t].concat(ancestors); - t = t.getParent(); - } - return ancestors; - }, - findAllTokenNodes: function(t, ttype) { - return Trees.findAllNodes(t, ttype, true); - }, - findAllRuleNodes: function(t, ruleIndex) { - return Trees.findAllNodes(t, ruleIndex, false); - }, - findAllNodes: function(t, index, findTokens) { - const nodes = []; - Trees._findAllNodes(t, index, findTokens, nodes); - return nodes; - }, - _findAllNodes: function(t, index, findTokens, nodes) { - if (findTokens && t instanceof TerminalNode) { - if (t.symbol.type === index) { - nodes.push(t); - } - } else if (!findTokens && t instanceof RuleNode) { - if (t.ruleIndex === index) { - nodes.push(t); - } - } - for (let i = 0; i < t.getChildCount(); i++) { - Trees._findAllNodes(t.getChild(i), index, findTokens, nodes); - } - }, - descendants: function(t) { - let nodes = [t]; - for (let i = 0; i < t.getChildCount(); i++) { - nodes = nodes.concat(Trees.descendants(t.getChild(i))); - } - return nodes; - } - }; - module2.exports = Trees; -}); - -// node_modules/antlr4/src/antlr4/RuleContext.js -var require_RuleContext2 = __commonJS((exports, module2) => { - var {RuleNode} = require_Tree(); - var {INVALID_INTERVAL} = require_Tree(); - var Trees = require_Trees2(); - var RuleContext = class extends RuleNode { - constructor(parent, invokingState) { - super(); - this.parentCtx = parent || null; - this.invokingState = invokingState || -1; - } - depth() { - let n = 0; - let p = this; - while (p !== null) { - p = p.parentCtx; - n += 1; - } - return n; - } - isEmpty() { - return this.invokingState === -1; - } - getSourceInterval() { - return INVALID_INTERVAL; - } - getRuleContext() { - return this; - } - getPayload() { - return this; - } - getText() { - if (this.getChildCount() === 0) { - return ""; - } else { - return this.children.map(function(child) { - return child.getText(); - }).join(""); - } - } - getAltNumber() { - return 0; - } - setAltNumber(altNumber) { - } - getChild(i) { - return null; - } - getChildCount() { - return 0; - } - accept(visitor) { - return visitor.visitChildren(this); - } - toStringTree(ruleNames, recog) { - return Trees.toStringTree(this, ruleNames, recog); - } - toString(ruleNames, stop) { - ruleNames = ruleNames || null; - stop = stop || null; - let p = this; - let s = "["; - while (p !== null && p !== stop) { - if (ruleNames === null) { - if (!p.isEmpty()) { - s += p.invokingState; - } - } else { - const ri = p.ruleIndex; - const ruleName = ri >= 0 && ri < ruleNames.length ? ruleNames[ri] : "" + ri; - s += ruleName; - } - if (p.parentCtx !== null && (ruleNames !== null || !p.parentCtx.isEmpty())) { - s += " "; - } - p = p.parentCtx; - } - s += "]"; - return s; - } - }; - module2.exports = RuleContext; -}); - -// node_modules/antlr4/src/antlr4/PredictionContext.js -var require_PredictionContext2 = __commonJS((exports, module2) => { - var RuleContext = require_RuleContext2(); - var {Hash: Hash2, Map: Map2, equalArrays} = require_Utils2(); - var PredictionContext = class { - constructor(cachedHashCode) { - this.cachedHashCode = cachedHashCode; - } - isEmpty() { - return this === PredictionContext.EMPTY; - } - hasEmptyPath() { - return this.getReturnState(this.length - 1) === PredictionContext.EMPTY_RETURN_STATE; - } - hashCode() { - return this.cachedHashCode; - } - updateHashCode(hash) { - hash.update(this.cachedHashCode); - } - }; - PredictionContext.EMPTY = null; - PredictionContext.EMPTY_RETURN_STATE = 2147483647; - PredictionContext.globalNodeCount = 1; - PredictionContext.id = PredictionContext.globalNodeCount; - var PredictionContextCache = class { - constructor() { - this.cache = new Map2(); - } - add(ctx) { - if (ctx === PredictionContext.EMPTY) { - return PredictionContext.EMPTY; - } - const existing = this.cache.get(ctx) || null; - if (existing !== null) { - return existing; - } - this.cache.put(ctx, ctx); - return ctx; - } - get(ctx) { - return this.cache.get(ctx) || null; - } - get length() { - return this.cache.length; - } - }; - var SingletonPredictionContext = class extends PredictionContext { - constructor(parent, returnState) { - let hashCode = 0; - const hash = new Hash2(); - if (parent !== null) { - hash.update(parent, returnState); - } else { - hash.update(1); - } - hashCode = hash.finish(); - super(hashCode); - this.parentCtx = parent; - this.returnState = returnState; - } - getParent(index) { - return this.parentCtx; - } - getReturnState(index) { - return this.returnState; - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof SingletonPredictionContext)) { - return false; - } else if (this.hashCode() !== other.hashCode()) { - return false; - } else { - if (this.returnState !== other.returnState) - return false; - else if (this.parentCtx == null) - return other.parentCtx == null; - else - return this.parentCtx.equals(other.parentCtx); - } - } - toString() { - const up = this.parentCtx === null ? "" : this.parentCtx.toString(); - if (up.length === 0) { - if (this.returnState === PredictionContext.EMPTY_RETURN_STATE) { - return "$"; - } else { - return "" + this.returnState; - } - } else { - return "" + this.returnState + " " + up; - } - } - get length() { - return 1; - } - static create(parent, returnState) { - if (returnState === PredictionContext.EMPTY_RETURN_STATE && parent === null) { - return PredictionContext.EMPTY; - } else { - return new SingletonPredictionContext(parent, returnState); - } - } - }; - var EmptyPredictionContext = class extends SingletonPredictionContext { - constructor() { - super(null, PredictionContext.EMPTY_RETURN_STATE); - } - isEmpty() { - return true; - } - getParent(index) { - return null; - } - getReturnState(index) { - return this.returnState; - } - equals(other) { - return this === other; - } - toString() { - return "$"; - } - }; - PredictionContext.EMPTY = new EmptyPredictionContext(); - var ArrayPredictionContext = class extends PredictionContext { - constructor(parents, returnStates) { - const h = new Hash2(); - h.update(parents, returnStates); - const hashCode = h.finish(); - super(hashCode); - this.parents = parents; - this.returnStates = returnStates; - return this; - } - isEmpty() { - return this.returnStates[0] === PredictionContext.EMPTY_RETURN_STATE; - } - getParent(index) { - return this.parents[index]; - } - getReturnState(index) { - return this.returnStates[index]; - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof ArrayPredictionContext)) { - return false; - } else if (this.hashCode() !== other.hashCode()) { - return false; - } else { - return equalArrays(this.returnStates, other.returnStates) && equalArrays(this.parents, other.parents); - } - } - toString() { - if (this.isEmpty()) { - return "[]"; - } else { - let s = "["; - for (let i = 0; i < this.returnStates.length; i++) { - if (i > 0) { - s = s + ", "; - } - if (this.returnStates[i] === PredictionContext.EMPTY_RETURN_STATE) { - s = s + "$"; - continue; - } - s = s + this.returnStates[i]; - if (this.parents[i] !== null) { - s = s + " " + this.parents[i]; - } else { - s = s + "null"; - } - } - return s + "]"; - } - } - get length() { - return this.returnStates.length; - } - }; - function predictionContextFromRuleContext(atn, outerContext) { - if (outerContext === void 0 || outerContext === null) { - outerContext = RuleContext.EMPTY; - } - if (outerContext.parentCtx === null || outerContext === RuleContext.EMPTY) { - return PredictionContext.EMPTY; - } - const parent = predictionContextFromRuleContext(atn, outerContext.parentCtx); - const state = atn.states[outerContext.invokingState]; - const transition = state.transitions[0]; - return SingletonPredictionContext.create(parent, transition.followState.stateNumber); - } - function merge(a, b, rootIsWildcard, mergeCache) { - if (a === b) { - return a; - } - if (a instanceof SingletonPredictionContext && b instanceof SingletonPredictionContext) { - return mergeSingletons(a, b, rootIsWildcard, mergeCache); - } - if (rootIsWildcard) { - if (a instanceof EmptyPredictionContext) { - return a; - } - if (b instanceof EmptyPredictionContext) { - return b; - } - } - if (a instanceof SingletonPredictionContext) { - a = new ArrayPredictionContext([a.getParent()], [a.returnState]); - } - if (b instanceof SingletonPredictionContext) { - b = new ArrayPredictionContext([b.getParent()], [b.returnState]); - } - return mergeArrays(a, b, rootIsWildcard, mergeCache); - } - function mergeSingletons(a, b, rootIsWildcard, mergeCache) { - if (mergeCache !== null) { - let previous = mergeCache.get(a, b); - if (previous !== null) { - return previous; - } - previous = mergeCache.get(b, a); - if (previous !== null) { - return previous; - } - } - const rootMerge = mergeRoot(a, b, rootIsWildcard); - if (rootMerge !== null) { - if (mergeCache !== null) { - mergeCache.set(a, b, rootMerge); - } - return rootMerge; - } - if (a.returnState === b.returnState) { - const parent = merge(a.parentCtx, b.parentCtx, rootIsWildcard, mergeCache); - if (parent === a.parentCtx) { - return a; - } - if (parent === b.parentCtx) { - return b; - } - const spc = SingletonPredictionContext.create(parent, a.returnState); - if (mergeCache !== null) { - mergeCache.set(a, b, spc); - } - return spc; - } else { - let singleParent = null; - if (a === b || a.parentCtx !== null && a.parentCtx === b.parentCtx) { - singleParent = a.parentCtx; - } - if (singleParent !== null) { - const payloads2 = [a.returnState, b.returnState]; - if (a.returnState > b.returnState) { - payloads2[0] = b.returnState; - payloads2[1] = a.returnState; - } - const parents2 = [singleParent, singleParent]; - const apc = new ArrayPredictionContext(parents2, payloads2); - if (mergeCache !== null) { - mergeCache.set(a, b, apc); - } - return apc; - } - const payloads = [a.returnState, b.returnState]; - let parents = [a.parentCtx, b.parentCtx]; - if (a.returnState > b.returnState) { - payloads[0] = b.returnState; - payloads[1] = a.returnState; - parents = [b.parentCtx, a.parentCtx]; - } - const a_ = new ArrayPredictionContext(parents, payloads); - if (mergeCache !== null) { - mergeCache.set(a, b, a_); - } - return a_; - } - } - function mergeRoot(a, b, rootIsWildcard) { - if (rootIsWildcard) { - if (a === PredictionContext.EMPTY) { - return PredictionContext.EMPTY; - } - if (b === PredictionContext.EMPTY) { - return PredictionContext.EMPTY; - } - } else { - if (a === PredictionContext.EMPTY && b === PredictionContext.EMPTY) { - return PredictionContext.EMPTY; - } else if (a === PredictionContext.EMPTY) { - const payloads = [ - b.returnState, - PredictionContext.EMPTY_RETURN_STATE - ]; - const parents = [b.parentCtx, null]; - return new ArrayPredictionContext(parents, payloads); - } else if (b === PredictionContext.EMPTY) { - const payloads = [a.returnState, PredictionContext.EMPTY_RETURN_STATE]; - const parents = [a.parentCtx, null]; - return new ArrayPredictionContext(parents, payloads); - } - } - return null; - } - function mergeArrays(a, b, rootIsWildcard, mergeCache) { - if (mergeCache !== null) { - let previous = mergeCache.get(a, b); - if (previous !== null) { - return previous; - } - previous = mergeCache.get(b, a); - if (previous !== null) { - return previous; - } - } - let i = 0; - let j = 0; - let k = 0; - let mergedReturnStates = []; - let mergedParents = []; - while (i < a.returnStates.length && j < b.returnStates.length) { - const a_parent = a.parents[i]; - const b_parent = b.parents[j]; - if (a.returnStates[i] === b.returnStates[j]) { - const payload = a.returnStates[i]; - const bothDollars = payload === PredictionContext.EMPTY_RETURN_STATE && a_parent === null && b_parent === null; - const ax_ax = a_parent !== null && b_parent !== null && a_parent === b_parent; - if (bothDollars || ax_ax) { - mergedParents[k] = a_parent; - mergedReturnStates[k] = payload; - } else { - mergedParents[k] = merge(a_parent, b_parent, rootIsWildcard, mergeCache); - mergedReturnStates[k] = payload; - } - i += 1; - j += 1; - } else if (a.returnStates[i] < b.returnStates[j]) { - mergedParents[k] = a_parent; - mergedReturnStates[k] = a.returnStates[i]; - i += 1; - } else { - mergedParents[k] = b_parent; - mergedReturnStates[k] = b.returnStates[j]; - j += 1; - } - k += 1; - } - if (i < a.returnStates.length) { - for (let p = i; p < a.returnStates.length; p++) { - mergedParents[k] = a.parents[p]; - mergedReturnStates[k] = a.returnStates[p]; - k += 1; - } - } else { - for (let p = j; p < b.returnStates.length; p++) { - mergedParents[k] = b.parents[p]; - mergedReturnStates[k] = b.returnStates[p]; - k += 1; - } - } - if (k < mergedParents.length) { - if (k === 1) { - const a_ = SingletonPredictionContext.create(mergedParents[0], mergedReturnStates[0]); - if (mergeCache !== null) { - mergeCache.set(a, b, a_); - } - return a_; - } - mergedParents = mergedParents.slice(0, k); - mergedReturnStates = mergedReturnStates.slice(0, k); - } - const M = new ArrayPredictionContext(mergedParents, mergedReturnStates); - if (M === a) { - if (mergeCache !== null) { - mergeCache.set(a, b, a); - } - return a; - } - if (M === b) { - if (mergeCache !== null) { - mergeCache.set(a, b, b); - } - return b; - } - combineCommonParents(mergedParents); - if (mergeCache !== null) { - mergeCache.set(a, b, M); - } - return M; - } - function combineCommonParents(parents) { - const uniqueParents = new Map2(); - for (let p = 0; p < parents.length; p++) { - const parent = parents[p]; - if (!uniqueParents.containsKey(parent)) { - uniqueParents.put(parent, parent); - } - } - for (let q = 0; q < parents.length; q++) { - parents[q] = uniqueParents.get(parents[q]); - } - } - function getCachedPredictionContext(context, contextCache, visited) { - if (context.isEmpty()) { - return context; - } - let existing = visited.get(context) || null; - if (existing !== null) { - return existing; - } - existing = contextCache.get(context); - if (existing !== null) { - visited.put(context, existing); - return existing; - } - let changed = false; - let parents = []; - for (let i = 0; i < parents.length; i++) { - const parent = getCachedPredictionContext(context.getParent(i), contextCache, visited); - if (changed || parent !== context.getParent(i)) { - if (!changed) { - parents = []; - for (let j = 0; j < context.length; j++) { - parents[j] = context.getParent(j); - } - changed = true; - } - parents[i] = parent; - } - } - if (!changed) { - contextCache.add(context); - visited.put(context, context); - return context; - } - let updated = null; - if (parents.length === 0) { - updated = PredictionContext.EMPTY; - } else if (parents.length === 1) { - updated = SingletonPredictionContext.create(parents[0], context.getReturnState(0)); - } else { - updated = new ArrayPredictionContext(parents, context.returnStates); - } - contextCache.add(updated); - visited.put(updated, updated); - visited.put(context, updated); - return updated; - } - module2.exports = { - merge, - PredictionContext, - PredictionContextCache, - SingletonPredictionContext, - predictionContextFromRuleContext, - getCachedPredictionContext - }; -}); - -// node_modules/antlr4/src/antlr4/LL1Analyzer.js -var require_LL1Analyzer2 = __commonJS((exports, module2) => { - var {Set: Set2, BitSet} = require_Utils2(); - var {Token: Token2} = require_Token2(); - var {ATNConfig} = require_ATNConfig2(); - var {IntervalSet} = require_IntervalSet2(); - var {RuleStopState} = require_ATNState2(); - var {RuleTransition, NotSetTransition, WildcardTransition, AbstractPredicateTransition} = require_Transition2(); - var {predictionContextFromRuleContext, PredictionContext, SingletonPredictionContext} = require_PredictionContext2(); - var LL1Analyzer = class { - constructor(atn) { - this.atn = atn; - } - getDecisionLookahead(s) { - if (s === null) { - return null; - } - const count = s.transitions.length; - const look = []; - for (let alt = 0; alt < count; alt++) { - look[alt] = new IntervalSet(); - const lookBusy = new Set2(); - const seeThruPreds = false; - this._LOOK(s.transition(alt).target, null, PredictionContext.EMPTY, look[alt], lookBusy, new BitSet(), seeThruPreds, false); - if (look[alt].length === 0 || look[alt].contains(LL1Analyzer.HIT_PRED)) { - look[alt] = null; - } - } - return look; - } - LOOK(s, stopState, ctx) { - const r = new IntervalSet(); - const seeThruPreds = true; - ctx = ctx || null; - const lookContext = ctx !== null ? predictionContextFromRuleContext(s.atn, ctx) : null; - this._LOOK(s, stopState, lookContext, r, new Set2(), new BitSet(), seeThruPreds, true); - return r; - } - _LOOK(s, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF) { - const c = new ATNConfig({state: s, alt: 0, context: ctx}, null); - if (lookBusy.contains(c)) { - return; - } - lookBusy.add(c); - if (s === stopState) { - if (ctx === null) { - look.addOne(Token2.EPSILON); - return; - } else if (ctx.isEmpty() && addEOF) { - look.addOne(Token2.EOF); - return; - } - } - if (s instanceof RuleStopState) { - if (ctx === null) { - look.addOne(Token2.EPSILON); - return; - } else if (ctx.isEmpty() && addEOF) { - look.addOne(Token2.EOF); - return; - } - if (ctx !== PredictionContext.EMPTY) { - const removed = calledRuleStack.contains(s.ruleIndex); - try { - calledRuleStack.remove(s.ruleIndex); - for (let i = 0; i < ctx.length; i++) { - const returnState = this.atn.states[ctx.getReturnState(i)]; - this._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF); - } - } finally { - if (removed) { - calledRuleStack.add(s.ruleIndex); - } - } - return; - } - } - for (let j = 0; j < s.transitions.length; j++) { - const t = s.transitions[j]; - if (t.constructor === RuleTransition) { - if (calledRuleStack.contains(t.target.ruleIndex)) { - continue; - } - const newContext = SingletonPredictionContext.create(ctx, t.followState.stateNumber); - try { - calledRuleStack.add(t.target.ruleIndex); - this._LOOK(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF); - } finally { - calledRuleStack.remove(t.target.ruleIndex); - } - } else if (t instanceof AbstractPredicateTransition) { - if (seeThruPreds) { - this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF); - } else { - look.addOne(LL1Analyzer.HIT_PRED); - } - } else if (t.isEpsilon) { - this._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF); - } else if (t.constructor === WildcardTransition) { - look.addRange(Token2.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType); - } else { - let set = t.label; - if (set !== null) { - if (t instanceof NotSetTransition) { - set = set.complement(Token2.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType); - } - look.addSet(set); - } - } - } - } - }; - LL1Analyzer.HIT_PRED = Token2.INVALID_TYPE; - module2.exports = LL1Analyzer; -}); - -// node_modules/antlr4/src/antlr4/atn/ATN.js -var require_ATN2 = __commonJS((exports, module2) => { - var LL1Analyzer = require_LL1Analyzer2(); - var {IntervalSet} = require_IntervalSet2(); - var {Token: Token2} = require_Token2(); - var ATN2 = class { - constructor(grammarType, maxTokenType) { - this.grammarType = grammarType; - this.maxTokenType = maxTokenType; - this.states = []; - this.decisionToState = []; - this.ruleToStartState = []; - this.ruleToStopState = null; - this.modeNameToStartState = {}; - this.ruleToTokenType = null; - this.lexerActions = null; - this.modeToStartState = []; - } - nextTokensInContext(s, ctx) { - const anal = new LL1Analyzer(this); - return anal.LOOK(s, null, ctx); - } - nextTokensNoContext(s) { - if (s.nextTokenWithinRule !== null) { - return s.nextTokenWithinRule; - } - s.nextTokenWithinRule = this.nextTokensInContext(s, null); - s.nextTokenWithinRule.readOnly = true; - return s.nextTokenWithinRule; - } - nextTokens(s, ctx) { - if (ctx === void 0) { - return this.nextTokensNoContext(s); - } else { - return this.nextTokensInContext(s, ctx); - } - } - addState(state) { - if (state !== null) { - state.atn = this; - state.stateNumber = this.states.length; - } - this.states.push(state); - } - removeState(state) { - this.states[state.stateNumber] = null; - } - defineDecisionState(s) { - this.decisionToState.push(s); - s.decision = this.decisionToState.length - 1; - return s.decision; - } - getDecisionState(decision) { - if (this.decisionToState.length === 0) { - return null; - } else { - return this.decisionToState[decision]; - } - } - getExpectedTokens(stateNumber, ctx) { - if (stateNumber < 0 || stateNumber >= this.states.length) { - throw "Invalid state number."; - } - const s = this.states[stateNumber]; - let following = this.nextTokens(s); - if (!following.contains(Token2.EPSILON)) { - return following; - } - const expected = new IntervalSet(); - expected.addSet(following); - expected.removeOne(Token2.EPSILON); - while (ctx !== null && ctx.invokingState >= 0 && following.contains(Token2.EPSILON)) { - const invokingState = this.states[ctx.invokingState]; - const rt = invokingState.transitions[0]; - following = this.nextTokens(rt.followState); - expected.addSet(following); - expected.removeOne(Token2.EPSILON); - ctx = ctx.parentCtx; - } - if (following.contains(Token2.EPSILON)) { - expected.addOne(Token2.EOF); - } - return expected; - } - }; - ATN2.INVALID_ALT_NUMBER = 0; - module2.exports = ATN2; -}); - -// node_modules/antlr4/src/antlr4/atn/ATNType.js -var require_ATNType = __commonJS((exports, module2) => { - module2.exports = { - LEXER: 0, - PARSER: 1 - }; -}); - -// node_modules/antlr4/src/antlr4/atn/ATNDeserializationOptions.js -var require_ATNDeserializationOptions2 = __commonJS((exports, module2) => { - var ATNDeserializationOptions = class { - constructor(copyFrom) { - if (copyFrom === void 0) { - copyFrom = null; - } - this.readOnly = false; - this.verifyATN = copyFrom === null ? true : copyFrom.verifyATN; - this.generateRuleBypassTransitions = copyFrom === null ? false : copyFrom.generateRuleBypassTransitions; - } - }; - ATNDeserializationOptions.defaultOptions = new ATNDeserializationOptions(); - ATNDeserializationOptions.defaultOptions.readOnly = true; - module2.exports = ATNDeserializationOptions; -}); - -// node_modules/antlr4/src/antlr4/atn/LexerAction.js -var require_LexerAction = __commonJS((exports, module2) => { - var LexerActionType = { - CHANNEL: 0, - CUSTOM: 1, - MODE: 2, - MORE: 3, - POP_MODE: 4, - PUSH_MODE: 5, - SKIP: 6, - TYPE: 7 - }; - var LexerAction = class { - constructor(action) { - this.actionType = action; - this.isPositionDependent = false; - } - hashCode() { - const hash = new Hash(); - this.updateHashCode(hash); - return hash.finish(); - } - updateHashCode(hash) { - hash.update(this.actionType); - } - equals(other) { - return this === other; - } - }; - var LexerSkipAction = class extends LexerAction { - constructor() { - super(LexerActionType.SKIP); - } - execute(lexer) { - lexer.skip(); - } - toString() { - return "skip"; - } - }; - LexerSkipAction.INSTANCE = new LexerSkipAction(); - var LexerTypeAction = class extends LexerAction { - constructor(type) { - super(LexerActionType.TYPE); - this.type = type; - } - execute(lexer) { - lexer.type = this.type; - } - updateHashCode(hash) { - hash.update(this.actionType, this.type); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof LexerTypeAction)) { - return false; - } else { - return this.type === other.type; - } - } - toString() { - return "type(" + this.type + ")"; - } - }; - var LexerPushModeAction = class extends LexerAction { - constructor(mode) { - super(LexerActionType.PUSH_MODE); - this.mode = mode; - } - execute(lexer) { - lexer.pushMode(this.mode); - } - updateHashCode(hash) { - hash.update(this.actionType, this.mode); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof LexerPushModeAction)) { - return false; - } else { - return this.mode === other.mode; - } - } - toString() { - return "pushMode(" + this.mode + ")"; - } - }; - var LexerPopModeAction = class extends LexerAction { - constructor() { - super(LexerActionType.POP_MODE); - } - execute(lexer) { - lexer.popMode(); - } - toString() { - return "popMode"; - } - }; - LexerPopModeAction.INSTANCE = new LexerPopModeAction(); - var LexerMoreAction = class extends LexerAction { - constructor() { - super(LexerActionType.MORE); - } - execute(lexer) { - lexer.more(); - } - toString() { - return "more"; - } - }; - LexerMoreAction.INSTANCE = new LexerMoreAction(); - var LexerModeAction = class extends LexerAction { - constructor(mode) { - super(LexerActionType.MODE); - this.mode = mode; - } - execute(lexer) { - lexer.mode(this.mode); - } - updateHashCode(hash) { - hash.update(this.actionType, this.mode); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof LexerModeAction)) { - return false; - } else { - return this.mode === other.mode; - } - } - toString() { - return "mode(" + this.mode + ")"; - } - }; - var LexerCustomAction = class extends LexerAction { - constructor(ruleIndex, actionIndex) { - super(LexerActionType.CUSTOM); - this.ruleIndex = ruleIndex; - this.actionIndex = actionIndex; - this.isPositionDependent = true; - } - execute(lexer) { - lexer.action(null, this.ruleIndex, this.actionIndex); - } - updateHashCode(hash) { - hash.update(this.actionType, this.ruleIndex, this.actionIndex); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof LexerCustomAction)) { - return false; - } else { - return this.ruleIndex === other.ruleIndex && this.actionIndex === other.actionIndex; - } - } - }; - var LexerChannelAction = class extends LexerAction { - constructor(channel) { - super(LexerActionType.CHANNEL); - this.channel = channel; - } - execute(lexer) { - lexer._channel = this.channel; - } - updateHashCode(hash) { - hash.update(this.actionType, this.channel); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof LexerChannelAction)) { - return false; - } else { - return this.channel === other.channel; - } - } - toString() { - return "channel(" + this.channel + ")"; - } - }; - var LexerIndexedCustomAction = class extends LexerAction { - constructor(offset, action) { - super(action.actionType); - this.offset = offset; - this.action = action; - this.isPositionDependent = true; - } - execute(lexer) { - this.action.execute(lexer); - } - updateHashCode(hash) { - hash.update(this.actionType, this.offset, this.action); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof LexerIndexedCustomAction)) { - return false; - } else { - return this.offset === other.offset && this.action === other.action; - } - } - }; - module2.exports = { - LexerActionType, - LexerSkipAction, - LexerChannelAction, - LexerCustomAction, - LexerIndexedCustomAction, - LexerMoreAction, - LexerTypeAction, - LexerPushModeAction, - LexerPopModeAction, - LexerModeAction - }; -}); - -// node_modules/antlr4/src/antlr4/atn/ATNDeserializer.js -var require_ATNDeserializer2 = __commonJS((exports, module2) => { - var {Token: Token2} = require_Token2(); - var ATN2 = require_ATN2(); - var ATNType = require_ATNType(); - var { - ATNState, - BasicState, - DecisionState, - BlockStartState, - BlockEndState, - LoopEndState, - RuleStartState, - RuleStopState, - TokensStartState, - PlusLoopbackState, - StarLoopbackState, - StarLoopEntryState, - PlusBlockStartState, - StarBlockStartState, - BasicBlockStartState - } = require_ATNState2(); - var { - Transition, - AtomTransition: AtomTransition2, - SetTransition, - NotSetTransition, - RuleTransition, - RangeTransition, - ActionTransition, - EpsilonTransition, - WildcardTransition, - PredicateTransition, - PrecedencePredicateTransition - } = require_Transition2(); - var {IntervalSet} = require_IntervalSet2(); - var ATNDeserializationOptions = require_ATNDeserializationOptions2(); - var { - LexerActionType, - LexerSkipAction, - LexerChannelAction, - LexerCustomAction, - LexerMoreAction, - LexerTypeAction, - LexerPushModeAction, - LexerPopModeAction, - LexerModeAction - } = require_LexerAction(); - var BASE_SERIALIZED_UUID = "AADB8D7E-AEEF-4415-AD2B-8204D6CF042E"; - var ADDED_UNICODE_SMP = "59627784-3BE5-417A-B9EB-8131A7286089"; - var SUPPORTED_UUIDS = [BASE_SERIALIZED_UUID, ADDED_UNICODE_SMP]; - var SERIALIZED_VERSION = 3; - var SERIALIZED_UUID = ADDED_UNICODE_SMP; - function initArray(length, value) { - const tmp = []; - tmp[length - 1] = value; - return tmp.map(function(i) { - return value; - }); - } - var ATNDeserializer3 = class { - constructor(options) { - if (options === void 0 || options === null) { - options = ATNDeserializationOptions.defaultOptions; - } - this.deserializationOptions = options; - this.stateFactories = null; - this.actionFactories = null; - } - isFeatureSupported(feature, actualUuid) { - const idx1 = SUPPORTED_UUIDS.indexOf(feature); - if (idx1 < 0) { - return false; - } - const idx2 = SUPPORTED_UUIDS.indexOf(actualUuid); - return idx2 >= idx1; - } - deserialize(data) { - this.reset(data); - this.checkVersion(); - this.checkUUID(); - const atn = this.readATN(); - this.readStates(atn); - this.readRules(atn); - this.readModes(atn); - const sets = []; - this.readSets(atn, sets, this.readInt.bind(this)); - if (this.isFeatureSupported(ADDED_UNICODE_SMP, this.uuid)) { - this.readSets(atn, sets, this.readInt32.bind(this)); - } - this.readEdges(atn, sets); - this.readDecisions(atn); - this.readLexerActions(atn); - this.markPrecedenceDecisions(atn); - this.verifyATN(atn); - if (this.deserializationOptions.generateRuleBypassTransitions && atn.grammarType === ATNType.PARSER) { - this.generateRuleBypassTransitions(atn); - this.verifyATN(atn); - } - return atn; - } - reset(data) { - const adjust = function(c) { - const v = c.charCodeAt(0); - return v > 1 ? v - 2 : v + 65534; - }; - const temp = data.split("").map(adjust); - temp[0] = data.charCodeAt(0); - this.data = temp; - this.pos = 0; - } - checkVersion() { - const version = this.readInt(); - if (version !== SERIALIZED_VERSION) { - throw "Could not deserialize ATN with version " + version + " (expected " + SERIALIZED_VERSION + ")."; - } - } - checkUUID() { - const uuid = this.readUUID(); - if (SUPPORTED_UUIDS.indexOf(uuid) < 0) { - throw "Could not deserialize ATN with UUID: " + uuid + " (expected " + SERIALIZED_UUID + " or a legacy UUID).", uuid, SERIALIZED_UUID; - } - this.uuid = uuid; - } - readATN() { - const grammarType = this.readInt(); - const maxTokenType = this.readInt(); - return new ATN2(grammarType, maxTokenType); - } - readStates(atn) { - let j, pair, stateNumber; - const loopBackStateNumbers = []; - const endStateNumbers = []; - const nstates = this.readInt(); - for (let i = 0; i < nstates; i++) { - const stype = this.readInt(); - if (stype === ATNState.INVALID_TYPE) { - atn.addState(null); - continue; - } - let ruleIndex = this.readInt(); - if (ruleIndex === 65535) { - ruleIndex = -1; - } - const s = this.stateFactory(stype, ruleIndex); - if (stype === ATNState.LOOP_END) { - const loopBackStateNumber = this.readInt(); - loopBackStateNumbers.push([s, loopBackStateNumber]); - } else if (s instanceof BlockStartState) { - const endStateNumber = this.readInt(); - endStateNumbers.push([s, endStateNumber]); - } - atn.addState(s); - } - for (j = 0; j < loopBackStateNumbers.length; j++) { - pair = loopBackStateNumbers[j]; - pair[0].loopBackState = atn.states[pair[1]]; - } - for (j = 0; j < endStateNumbers.length; j++) { - pair = endStateNumbers[j]; - pair[0].endState = atn.states[pair[1]]; - } - let numNonGreedyStates = this.readInt(); - for (j = 0; j < numNonGreedyStates; j++) { - stateNumber = this.readInt(); - atn.states[stateNumber].nonGreedy = true; - } - let numPrecedenceStates = this.readInt(); - for (j = 0; j < numPrecedenceStates; j++) { - stateNumber = this.readInt(); - atn.states[stateNumber].isPrecedenceRule = true; - } - } - readRules(atn) { - let i; - const nrules = this.readInt(); - if (atn.grammarType === ATNType.LEXER) { - atn.ruleToTokenType = initArray(nrules, 0); - } - atn.ruleToStartState = initArray(nrules, 0); - for (i = 0; i < nrules; i++) { - const s = this.readInt(); - atn.ruleToStartState[i] = atn.states[s]; - if (atn.grammarType === ATNType.LEXER) { - let tokenType = this.readInt(); - if (tokenType === 65535) { - tokenType = Token2.EOF; - } - atn.ruleToTokenType[i] = tokenType; - } - } - atn.ruleToStopState = initArray(nrules, 0); - for (i = 0; i < atn.states.length; i++) { - const state = atn.states[i]; - if (!(state instanceof RuleStopState)) { - continue; - } - atn.ruleToStopState[state.ruleIndex] = state; - atn.ruleToStartState[state.ruleIndex].stopState = state; - } - } - readModes(atn) { - const nmodes = this.readInt(); - for (let i = 0; i < nmodes; i++) { - let s = this.readInt(); - atn.modeToStartState.push(atn.states[s]); - } - } - readSets(atn, sets, readUnicode) { - const m = this.readInt(); - for (let i = 0; i < m; i++) { - const iset = new IntervalSet(); - sets.push(iset); - const n = this.readInt(); - const containsEof = this.readInt(); - if (containsEof !== 0) { - iset.addOne(-1); - } - for (let j = 0; j < n; j++) { - const i1 = readUnicode(); - const i2 = readUnicode(); - iset.addRange(i1, i2); - } - } - } - readEdges(atn, sets) { - let i, j, state, trans, target; - const nedges = this.readInt(); - for (i = 0; i < nedges; i++) { - const src = this.readInt(); - const trg = this.readInt(); - const ttype = this.readInt(); - const arg1 = this.readInt(); - const arg2 = this.readInt(); - const arg3 = this.readInt(); - trans = this.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets); - const srcState = atn.states[src]; - srcState.addTransition(trans); - } - for (i = 0; i < atn.states.length; i++) { - state = atn.states[i]; - for (j = 0; j < state.transitions.length; j++) { - const t = state.transitions[j]; - if (!(t instanceof RuleTransition)) { - continue; - } - let outermostPrecedenceReturn = -1; - if (atn.ruleToStartState[t.target.ruleIndex].isPrecedenceRule) { - if (t.precedence === 0) { - outermostPrecedenceReturn = t.target.ruleIndex; - } - } - trans = new EpsilonTransition(t.followState, outermostPrecedenceReturn); - atn.ruleToStopState[t.target.ruleIndex].addTransition(trans); - } - } - for (i = 0; i < atn.states.length; i++) { - state = atn.states[i]; - if (state instanceof BlockStartState) { - if (state.endState === null) { - throw "IllegalState"; - } - if (state.endState.startState !== null) { - throw "IllegalState"; - } - state.endState.startState = state; - } - if (state instanceof PlusLoopbackState) { - for (j = 0; j < state.transitions.length; j++) { - target = state.transitions[j].target; - if (target instanceof PlusBlockStartState) { - target.loopBackState = state; - } - } - } else if (state instanceof StarLoopbackState) { - for (j = 0; j < state.transitions.length; j++) { - target = state.transitions[j].target; - if (target instanceof StarLoopEntryState) { - target.loopBackState = state; - } - } - } - } - } - readDecisions(atn) { - const ndecisions = this.readInt(); - for (let i = 0; i < ndecisions; i++) { - const s = this.readInt(); - const decState = atn.states[s]; - atn.decisionToState.push(decState); - decState.decision = i; - } - } - readLexerActions(atn) { - if (atn.grammarType === ATNType.LEXER) { - const count = this.readInt(); - atn.lexerActions = initArray(count, null); - for (let i = 0; i < count; i++) { - const actionType = this.readInt(); - let data1 = this.readInt(); - if (data1 === 65535) { - data1 = -1; - } - let data2 = this.readInt(); - if (data2 === 65535) { - data2 = -1; - } - atn.lexerActions[i] = this.lexerActionFactory(actionType, data1, data2); - } - } - } - generateRuleBypassTransitions(atn) { - let i; - const count = atn.ruleToStartState.length; - for (i = 0; i < count; i++) { - atn.ruleToTokenType[i] = atn.maxTokenType + i + 1; - } - for (i = 0; i < count; i++) { - this.generateRuleBypassTransition(atn, i); - } - } - generateRuleBypassTransition(atn, idx) { - let i, state; - const bypassStart = new BasicBlockStartState(); - bypassStart.ruleIndex = idx; - atn.addState(bypassStart); - const bypassStop = new BlockEndState(); - bypassStop.ruleIndex = idx; - atn.addState(bypassStop); - bypassStart.endState = bypassStop; - atn.defineDecisionState(bypassStart); - bypassStop.startState = bypassStart; - let excludeTransition = null; - let endState = null; - if (atn.ruleToStartState[idx].isPrecedenceRule) { - endState = null; - for (i = 0; i < atn.states.length; i++) { - state = atn.states[i]; - if (this.stateIsEndStateFor(state, idx)) { - endState = state; - excludeTransition = state.loopBackState.transitions[0]; - break; - } - } - if (excludeTransition === null) { - throw "Couldn't identify final state of the precedence rule prefix section."; - } - } else { - endState = atn.ruleToStopState[idx]; - } - for (i = 0; i < atn.states.length; i++) { - state = atn.states[i]; - for (let j = 0; j < state.transitions.length; j++) { - const transition = state.transitions[j]; - if (transition === excludeTransition) { - continue; - } - if (transition.target === endState) { - transition.target = bypassStop; - } - } - } - const ruleToStartState = atn.ruleToStartState[idx]; - const count = ruleToStartState.transitions.length; - while (count > 0) { - bypassStart.addTransition(ruleToStartState.transitions[count - 1]); - ruleToStartState.transitions = ruleToStartState.transitions.slice(-1); - } - atn.ruleToStartState[idx].addTransition(new EpsilonTransition(bypassStart)); - bypassStop.addTransition(new EpsilonTransition(endState)); - const matchState = new BasicState(); - atn.addState(matchState); - matchState.addTransition(new AtomTransition2(bypassStop, atn.ruleToTokenType[idx])); - bypassStart.addTransition(new EpsilonTransition(matchState)); - } - stateIsEndStateFor(state, idx) { - if (state.ruleIndex !== idx) { - return null; - } - if (!(state instanceof StarLoopEntryState)) { - return null; - } - const maybeLoopEndState = state.transitions[state.transitions.length - 1].target; - if (!(maybeLoopEndState instanceof LoopEndState)) { - return null; - } - if (maybeLoopEndState.epsilonOnlyTransitions && maybeLoopEndState.transitions[0].target instanceof RuleStopState) { - return state; - } else { - return null; - } - } - markPrecedenceDecisions(atn) { - for (let i = 0; i < atn.states.length; i++) { - const state = atn.states[i]; - if (!(state instanceof StarLoopEntryState)) { - continue; - } - if (atn.ruleToStartState[state.ruleIndex].isPrecedenceRule) { - const maybeLoopEndState = state.transitions[state.transitions.length - 1].target; - if (maybeLoopEndState instanceof LoopEndState) { - if (maybeLoopEndState.epsilonOnlyTransitions && maybeLoopEndState.transitions[0].target instanceof RuleStopState) { - state.isPrecedenceDecision = true; - } - } - } - } - } - verifyATN(atn) { - if (!this.deserializationOptions.verifyATN) { - return; - } - for (let i = 0; i < atn.states.length; i++) { - const state = atn.states[i]; - if (state === null) { - continue; - } - this.checkCondition(state.epsilonOnlyTransitions || state.transitions.length <= 1); - if (state instanceof PlusBlockStartState) { - this.checkCondition(state.loopBackState !== null); - } else if (state instanceof StarLoopEntryState) { - this.checkCondition(state.loopBackState !== null); - this.checkCondition(state.transitions.length === 2); - if (state.transitions[0].target instanceof StarBlockStartState) { - this.checkCondition(state.transitions[1].target instanceof LoopEndState); - this.checkCondition(!state.nonGreedy); - } else if (state.transitions[0].target instanceof LoopEndState) { - this.checkCondition(state.transitions[1].target instanceof StarBlockStartState); - this.checkCondition(state.nonGreedy); - } else { - throw "IllegalState"; - } - } else if (state instanceof StarLoopbackState) { - this.checkCondition(state.transitions.length === 1); - this.checkCondition(state.transitions[0].target instanceof StarLoopEntryState); - } else if (state instanceof LoopEndState) { - this.checkCondition(state.loopBackState !== null); - } else if (state instanceof RuleStartState) { - this.checkCondition(state.stopState !== null); - } else if (state instanceof BlockStartState) { - this.checkCondition(state.endState !== null); - } else if (state instanceof BlockEndState) { - this.checkCondition(state.startState !== null); - } else if (state instanceof DecisionState) { - this.checkCondition(state.transitions.length <= 1 || state.decision >= 0); - } else { - this.checkCondition(state.transitions.length <= 1 || state instanceof RuleStopState); - } - } - } - checkCondition(condition, message) { - if (!condition) { - if (message === void 0 || message === null) { - message = "IllegalState"; - } - throw message; - } - } - readInt() { - return this.data[this.pos++]; - } - readInt32() { - const low = this.readInt(); - const high = this.readInt(); - return low | high << 16; - } - readLong() { - const low = this.readInt32(); - const high = this.readInt32(); - return low & 4294967295 | high << 32; - } - readUUID() { - const bb = []; - for (let i = 7; i >= 0; i--) { - const int = this.readInt(); - bb[2 * i + 1] = int & 255; - bb[2 * i] = int >> 8 & 255; - } - return byteToHex[bb[0]] + byteToHex[bb[1]] + byteToHex[bb[2]] + byteToHex[bb[3]] + "-" + byteToHex[bb[4]] + byteToHex[bb[5]] + "-" + byteToHex[bb[6]] + byteToHex[bb[7]] + "-" + byteToHex[bb[8]] + byteToHex[bb[9]] + "-" + byteToHex[bb[10]] + byteToHex[bb[11]] + byteToHex[bb[12]] + byteToHex[bb[13]] + byteToHex[bb[14]] + byteToHex[bb[15]]; - } - edgeFactory(atn, type, src, trg, arg1, arg2, arg3, sets) { - const target = atn.states[trg]; - switch (type) { - case Transition.EPSILON: - return new EpsilonTransition(target); - case Transition.RANGE: - return arg3 !== 0 ? new RangeTransition(target, Token2.EOF, arg2) : new RangeTransition(target, arg1, arg2); - case Transition.RULE: - return new RuleTransition(atn.states[arg1], arg2, arg3, target); - case Transition.PREDICATE: - return new PredicateTransition(target, arg1, arg2, arg3 !== 0); - case Transition.PRECEDENCE: - return new PrecedencePredicateTransition(target, arg1); - case Transition.ATOM: - return arg3 !== 0 ? new AtomTransition2(target, Token2.EOF) : new AtomTransition2(target, arg1); - case Transition.ACTION: - return new ActionTransition(target, arg1, arg2, arg3 !== 0); - case Transition.SET: - return new SetTransition(target, sets[arg1]); - case Transition.NOT_SET: - return new NotSetTransition(target, sets[arg1]); - case Transition.WILDCARD: - return new WildcardTransition(target); - default: - throw "The specified transition type: " + type + " is not valid."; - } - } - stateFactory(type, ruleIndex) { - if (this.stateFactories === null) { - const sf = []; - sf[ATNState.INVALID_TYPE] = null; - sf[ATNState.BASIC] = () => new BasicState(); - sf[ATNState.RULE_START] = () => new RuleStartState(); - sf[ATNState.BLOCK_START] = () => new BasicBlockStartState(); - sf[ATNState.PLUS_BLOCK_START] = () => new PlusBlockStartState(); - sf[ATNState.STAR_BLOCK_START] = () => new StarBlockStartState(); - sf[ATNState.TOKEN_START] = () => new TokensStartState(); - sf[ATNState.RULE_STOP] = () => new RuleStopState(); - sf[ATNState.BLOCK_END] = () => new BlockEndState(); - sf[ATNState.STAR_LOOP_BACK] = () => new StarLoopbackState(); - sf[ATNState.STAR_LOOP_ENTRY] = () => new StarLoopEntryState(); - sf[ATNState.PLUS_LOOP_BACK] = () => new PlusLoopbackState(); - sf[ATNState.LOOP_END] = () => new LoopEndState(); - this.stateFactories = sf; - } - if (type > this.stateFactories.length || this.stateFactories[type] === null) { - throw "The specified state type " + type + " is not valid."; - } else { - const s = this.stateFactories[type](); - if (s !== null) { - s.ruleIndex = ruleIndex; - return s; - } - } - } - lexerActionFactory(type, data1, data2) { - if (this.actionFactories === null) { - const af = []; - af[LexerActionType.CHANNEL] = (data12, data22) => new LexerChannelAction(data12); - af[LexerActionType.CUSTOM] = (data12, data22) => new LexerCustomAction(data12, data22); - af[LexerActionType.MODE] = (data12, data22) => new LexerModeAction(data12); - af[LexerActionType.MORE] = (data12, data22) => LexerMoreAction.INSTANCE; - af[LexerActionType.POP_MODE] = (data12, data22) => LexerPopModeAction.INSTANCE; - af[LexerActionType.PUSH_MODE] = (data12, data22) => new LexerPushModeAction(data12); - af[LexerActionType.SKIP] = (data12, data22) => LexerSkipAction.INSTANCE; - af[LexerActionType.TYPE] = (data12, data22) => new LexerTypeAction(data12); - this.actionFactories = af; - } - if (type > this.actionFactories.length || this.actionFactories[type] === null) { - throw "The specified lexer action type " + type + " is not valid."; - } else { - return this.actionFactories[type](data1, data2); - } - } - }; - function createByteToHex() { - const bth = []; - for (let i = 0; i < 256; i++) { - bth[i] = (i + 256).toString(16).substr(1).toUpperCase(); - } - return bth; - } - var byteToHex = createByteToHex(); - module2.exports = ATNDeserializer3; -}); - -// node_modules/antlr4/src/antlr4/error/ErrorListener.js -var require_ErrorListener = __commonJS((exports, module2) => { - var ErrorListener2 = class { - syntaxError(recognizer, offendingSymbol, line, column, msg, e) { - } - reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) { - } - reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) { - } - reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs) { - } - }; - var ConsoleErrorListener = class extends ErrorListener2 { - constructor() { - super(); - } - syntaxError(recognizer, offendingSymbol, line, column, msg, e) { - console.error("line " + line + ":" + column + " " + msg); - } - }; - ConsoleErrorListener.INSTANCE = new ConsoleErrorListener(); - var ProxyErrorListener = class extends ErrorListener2 { - constructor(delegates) { - super(); - if (delegates === null) { - throw "delegates"; - } - this.delegates = delegates; - return this; - } - syntaxError(recognizer, offendingSymbol, line, column, msg, e) { - this.delegates.map((d) => d.syntaxError(recognizer, offendingSymbol, line, column, msg, e)); - } - reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) { - this.delegates.map((d) => d.reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs)); - } - reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) { - this.delegates.map((d) => d.reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs)); - } - reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs) { - this.delegates.map((d) => d.reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs)); - } - }; - module2.exports = {ErrorListener: ErrorListener2, ConsoleErrorListener, ProxyErrorListener}; -}); - -// node_modules/antlr4/src/antlr4/Recognizer.js -var require_Recognizer2 = __commonJS((exports, module2) => { - var {Token: Token2} = require_Token2(); - var {ConsoleErrorListener} = require_ErrorListener(); - var {ProxyErrorListener} = require_ErrorListener(); - var Recognizer = class { - constructor() { - this._listeners = [ConsoleErrorListener.INSTANCE]; - this._interp = null; - this._stateNumber = -1; - } - checkVersion(toolVersion) { - const runtimeVersion = "4.9.2"; - if (runtimeVersion !== toolVersion) { - console.log("ANTLR runtime and generated code versions disagree: " + runtimeVersion + "!=" + toolVersion); - } - } - addErrorListener(listener) { - this._listeners.push(listener); - } - removeErrorListeners() { - this._listeners = []; - } - getTokenTypeMap() { - const tokenNames = this.getTokenNames(); - if (tokenNames === null) { - throw "The current recognizer does not provide a list of token names."; - } - let result = this.tokenTypeMapCache[tokenNames]; - if (result === void 0) { - result = tokenNames.reduce(function(o, k, i) { - o[k] = i; - }); - result.EOF = Token2.EOF; - this.tokenTypeMapCache[tokenNames] = result; - } - return result; - } - getRuleIndexMap() { - const ruleNames = this.ruleNames; - if (ruleNames === null) { - throw "The current recognizer does not provide a list of rule names."; - } - let result = this.ruleIndexMapCache[ruleNames]; - if (result === void 0) { - result = ruleNames.reduce(function(o, k, i) { - o[k] = i; - }); - this.ruleIndexMapCache[ruleNames] = result; - } - return result; - } - getTokenType(tokenName) { - const ttype = this.getTokenTypeMap()[tokenName]; - if (ttype !== void 0) { - return ttype; - } else { - return Token2.INVALID_TYPE; - } - } - getErrorHeader(e) { - const line = e.getOffendingToken().line; - const column = e.getOffendingToken().column; - return "line " + line + ":" + column; - } - getTokenErrorDisplay(t) { - if (t === null) { - return ""; - } - let s = t.text; - if (s === null) { - if (t.type === Token2.EOF) { - s = ""; - } else { - s = "<" + t.type + ">"; - } - } - s = s.replace("\n", "\\n").replace("\r", "\\r").replace(" ", "\\t"); - return "'" + s + "'"; - } - getErrorListenerDispatch() { - return new ProxyErrorListener(this._listeners); - } - sempred(localctx, ruleIndex, actionIndex) { - return true; - } - precpred(localctx, precedence) { - return true; - } - get state() { - return this._stateNumber; - } - set state(state) { - this._stateNumber = state; - } - }; - Recognizer.tokenTypeMapCache = {}; - Recognizer.ruleIndexMapCache = {}; - module2.exports = Recognizer; -}); - -// node_modules/antlr4/src/antlr4/CommonTokenFactory.js -var require_CommonTokenFactory2 = __commonJS((exports, module2) => { - var CommonToken = require_Token2().CommonToken; - var TokenFactory = class { - }; - var CommonTokenFactory = class extends TokenFactory { - constructor(copyText) { - super(); - this.copyText = copyText === void 0 ? false : copyText; - } - create(source, type, text, channel, start, stop, line, column) { - const t = new CommonToken(source, type, channel, start, stop); - t.line = line; - t.column = column; - if (text !== null) { - t.text = text; - } else if (this.copyText && source[1] !== null) { - t.text = source[1].getText(start, stop); - } - return t; - } - createThin(type, text) { - const t = new CommonToken(null, type); - t.text = text; - return t; - } - }; - CommonTokenFactory.DEFAULT = new CommonTokenFactory(); - module2.exports = CommonTokenFactory; -}); - -// node_modules/antlr4/src/antlr4/error/Errors.js -var require_Errors = __commonJS((exports, module2) => { - var {PredicateTransition} = require_Transition2(); - var {Interval} = require_IntervalSet2().Interval; - var RecognitionException2 = class extends Error { - constructor(params) { - super(params.message); - if (!!Error.captureStackTrace) { - Error.captureStackTrace(this, RecognitionException2); - } else { - var stack = new Error().stack; - } - this.message = params.message; - this.recognizer = params.recognizer; - this.input = params.input; - this.ctx = params.ctx; - this.offendingToken = null; - this.offendingState = -1; - if (this.recognizer !== null) { - this.offendingState = this.recognizer.state; - } - } - getExpectedTokens() { - if (this.recognizer !== null) { - return this.recognizer.atn.getExpectedTokens(this.offendingState, this.ctx); - } else { - return null; - } - } - toString() { - return this.message; - } - }; - var LexerNoViableAltException = class extends RecognitionException2 { - constructor(lexer, input, startIndex, deadEndConfigs) { - super({message: "", recognizer: lexer, input, ctx: null}); - this.startIndex = startIndex; - this.deadEndConfigs = deadEndConfigs; - } - toString() { - let symbol = ""; - if (this.startIndex >= 0 && this.startIndex < this.input.size) { - symbol = this.input.getText(new Interval(this.startIndex, this.startIndex)); - } - return "LexerNoViableAltException" + symbol; - } - }; - var NoViableAltException2 = class extends RecognitionException2 { - constructor(recognizer, input, startToken, offendingToken, deadEndConfigs, ctx) { - ctx = ctx || recognizer._ctx; - offendingToken = offendingToken || recognizer.getCurrentToken(); - startToken = startToken || recognizer.getCurrentToken(); - input = input || recognizer.getInputStream(); - super({message: "", recognizer, input, ctx}); - this.deadEndConfigs = deadEndConfigs; - this.startToken = startToken; - this.offendingToken = offendingToken; - } - }; - var InputMismatchException = class extends RecognitionException2 { - constructor(recognizer) { - super({message: "", recognizer, input: recognizer.getInputStream(), ctx: recognizer._ctx}); - this.offendingToken = recognizer.getCurrentToken(); - } - }; - function formatMessage(predicate, message) { - if (message !== null) { - return message; - } else { - return "failed predicate: {" + predicate + "}?"; - } - } - var FailedPredicateException2 = class extends RecognitionException2 { - constructor(recognizer, predicate, message) { - super({ - message: formatMessage(predicate, message || null), - recognizer, - input: recognizer.getInputStream(), - ctx: recognizer._ctx - }); - const s = recognizer._interp.atn.states[recognizer.state]; - const trans = s.transitions[0]; - if (trans instanceof PredicateTransition) { - this.ruleIndex = trans.ruleIndex; - this.predicateIndex = trans.predIndex; - } else { - this.ruleIndex = 0; - this.predicateIndex = 0; - } - this.predicate = predicate; - this.offendingToken = recognizer.getCurrentToken(); - } - }; - var ParseCancellationException = class extends Error { - constructor() { - super(); - Error.captureStackTrace(this, ParseCancellationException); - } - }; - module2.exports = { - RecognitionException: RecognitionException2, - NoViableAltException: NoViableAltException2, - LexerNoViableAltException, - InputMismatchException, - FailedPredicateException: FailedPredicateException2, - ParseCancellationException - }; -}); - -// node_modules/antlr4/src/antlr4/Lexer.js -var require_Lexer2 = __commonJS((exports, module2) => { - var {Token: Token2} = require_Token2(); - var Recognizer = require_Recognizer2(); - var CommonTokenFactory = require_CommonTokenFactory2(); - var {RecognitionException: RecognitionException2} = require_Errors(); - var {LexerNoViableAltException} = require_Errors(); - var Lexer2 = class extends Recognizer { - constructor(input) { - super(); - this._input = input; - this._factory = CommonTokenFactory.DEFAULT; - this._tokenFactorySourcePair = [this, input]; - this._interp = null; - this._token = null; - this._tokenStartCharIndex = -1; - this._tokenStartLine = -1; - this._tokenStartColumn = -1; - this._hitEOF = false; - this._channel = Token2.DEFAULT_CHANNEL; - this._type = Token2.INVALID_TYPE; - this._modeStack = []; - this._mode = Lexer2.DEFAULT_MODE; - this._text = null; - } - reset() { - if (this._input !== null) { - this._input.seek(0); - } - this._token = null; - this._type = Token2.INVALID_TYPE; - this._channel = Token2.DEFAULT_CHANNEL; - this._tokenStartCharIndex = -1; - this._tokenStartColumn = -1; - this._tokenStartLine = -1; - this._text = null; - this._hitEOF = false; - this._mode = Lexer2.DEFAULT_MODE; - this._modeStack = []; - this._interp.reset(); - } - nextToken() { - if (this._input === null) { - throw "nextToken requires a non-null input stream."; - } - const tokenStartMarker = this._input.mark(); - try { - while (true) { - if (this._hitEOF) { - this.emitEOF(); - return this._token; - } - this._token = null; - this._channel = Token2.DEFAULT_CHANNEL; - this._tokenStartCharIndex = this._input.index; - this._tokenStartColumn = this._interp.column; - this._tokenStartLine = this._interp.line; - this._text = null; - let continueOuter = false; - while (true) { - this._type = Token2.INVALID_TYPE; - let ttype = Lexer2.SKIP; - try { - ttype = this._interp.match(this._input, this._mode); - } catch (e) { - if (e instanceof RecognitionException2) { - this.notifyListeners(e); - this.recover(e); - } else { - console.log(e.stack); - throw e; - } - } - if (this._input.LA(1) === Token2.EOF) { - this._hitEOF = true; - } - if (this._type === Token2.INVALID_TYPE) { - this._type = ttype; - } - if (this._type === Lexer2.SKIP) { - continueOuter = true; - break; - } - if (this._type !== Lexer2.MORE) { - break; - } - } - if (continueOuter) { - continue; - } - if (this._token === null) { - this.emit(); - } - return this._token; - } - } finally { - this._input.release(tokenStartMarker); - } - } - skip() { - this._type = Lexer2.SKIP; - } - more() { - this._type = Lexer2.MORE; - } - mode(m) { - this._mode = m; - } - pushMode(m) { - if (this._interp.debug) { - console.log("pushMode " + m); - } - this._modeStack.push(this._mode); - this.mode(m); - } - popMode() { - if (this._modeStack.length === 0) { - throw "Empty Stack"; - } - if (this._interp.debug) { - console.log("popMode back to " + this._modeStack.slice(0, -1)); - } - this.mode(this._modeStack.pop()); - return this._mode; - } - emitToken(token) { - this._token = token; - } - emit() { - const t = this._factory.create(this._tokenFactorySourcePair, this._type, this._text, this._channel, this._tokenStartCharIndex, this.getCharIndex() - 1, this._tokenStartLine, this._tokenStartColumn); - this.emitToken(t); - return t; - } - emitEOF() { - const cpos = this.column; - const lpos = this.line; - const eof = this._factory.create(this._tokenFactorySourcePair, Token2.EOF, null, Token2.DEFAULT_CHANNEL, this._input.index, this._input.index - 1, lpos, cpos); - this.emitToken(eof); - return eof; - } - getCharIndex() { - return this._input.index; - } - getAllTokens() { - const tokens2 = []; - let t = this.nextToken(); - while (t.type !== Token2.EOF) { - tokens2.push(t); - t = this.nextToken(); - } - return tokens2; - } - notifyListeners(e) { - const start = this._tokenStartCharIndex; - const stop = this._input.index; - const text = this._input.getText(start, stop); - const msg = "token recognition error at: '" + this.getErrorDisplay(text) + "'"; - const listener = this.getErrorListenerDispatch(); - listener.syntaxError(this, null, this._tokenStartLine, this._tokenStartColumn, msg, e); - } - getErrorDisplay(s) { - const d = []; - for (let i = 0; i < s.length; i++) { - d.push(s[i]); - } - return d.join(""); - } - getErrorDisplayForChar(c) { - if (c.charCodeAt(0) === Token2.EOF) { - return ""; - } else if (c === "\n") { - return "\\n"; - } else if (c === " ") { - return "\\t"; - } else if (c === "\r") { - return "\\r"; - } else { - return c; - } - } - getCharErrorDisplay(c) { - return "'" + this.getErrorDisplayForChar(c) + "'"; - } - recover(re) { - if (this._input.LA(1) !== Token2.EOF) { - if (re instanceof LexerNoViableAltException) { - this._interp.consume(this._input); - } else { - this._input.consume(); - } - } - } - get inputStream() { - return this._input; - } - set inputStream(input) { - this._input = null; - this._tokenFactorySourcePair = [this, this._input]; - this.reset(); - this._input = input; - this._tokenFactorySourcePair = [this, this._input]; - } - get sourceName() { - return this._input.sourceName; - } - get type() { - return this.type; - } - set type(type) { - this._type = type; - } - get line() { - return this._interp.line; - } - set line(line) { - this._interp.line = line; - } - get column() { - return this._interp.column; - } - set column(column) { - this._interp.column = column; - } - get text() { - if (this._text !== null) { - return this._text; - } else { - return this._interp.getText(this._input); - } - } - set text(text) { - this._text = text; - } - }; - Lexer2.DEFAULT_MODE = 0; - Lexer2.MORE = -2; - Lexer2.SKIP = -3; - Lexer2.DEFAULT_TOKEN_CHANNEL = Token2.DEFAULT_CHANNEL; - Lexer2.HIDDEN = Token2.HIDDEN_CHANNEL; - Lexer2.MIN_CHAR_VALUE = 0; - Lexer2.MAX_CHAR_VALUE = 1114111; - module2.exports = Lexer2; -}); - -// node_modules/antlr4/src/antlr4/atn/ATNConfigSet.js -var require_ATNConfigSet2 = __commonJS((exports, module2) => { - var ATN2 = require_ATN2(); - var Utils3 = require_Utils2(); - var {SemanticContext} = require_SemanticContext2(); - var {merge} = require_PredictionContext2(); - function hashATNConfig(c) { - return c.hashCodeForConfigSet(); - } - function equalATNConfigs(a, b) { - if (a === b) { - return true; - } else if (a === null || b === null) { - return false; - } else - return a.equalsForConfigSet(b); - } - var ATNConfigSet = class { - constructor(fullCtx) { - this.configLookup = new Utils3.Set(hashATNConfig, equalATNConfigs); - this.fullCtx = fullCtx === void 0 ? true : fullCtx; - this.readOnly = false; - this.configs = []; - this.uniqueAlt = 0; - this.conflictingAlts = null; - this.hasSemanticContext = false; - this.dipsIntoOuterContext = false; - this.cachedHashCode = -1; - } - add(config, mergeCache) { - if (mergeCache === void 0) { - mergeCache = null; - } - if (this.readOnly) { - throw "This set is readonly"; - } - if (config.semanticContext !== SemanticContext.NONE) { - this.hasSemanticContext = true; - } - if (config.reachesIntoOuterContext > 0) { - this.dipsIntoOuterContext = true; - } - const existing = this.configLookup.add(config); - if (existing === config) { - this.cachedHashCode = -1; - this.configs.push(config); - return true; - } - const rootIsWildcard = !this.fullCtx; - const merged = merge(existing.context, config.context, rootIsWildcard, mergeCache); - existing.reachesIntoOuterContext = Math.max(existing.reachesIntoOuterContext, config.reachesIntoOuterContext); - if (config.precedenceFilterSuppressed) { - existing.precedenceFilterSuppressed = true; - } - existing.context = merged; - return true; - } - getStates() { - const states = new Utils3.Set(); - for (let i = 0; i < this.configs.length; i++) { - states.add(this.configs[i].state); - } - return states; - } - getPredicates() { - const preds = []; - for (let i = 0; i < this.configs.length; i++) { - const c = this.configs[i].semanticContext; - if (c !== SemanticContext.NONE) { - preds.push(c.semanticContext); - } - } - return preds; - } - optimizeConfigs(interpreter) { - if (this.readOnly) { - throw "This set is readonly"; - } - if (this.configLookup.length === 0) { - return; - } - for (let i = 0; i < this.configs.length; i++) { - const config = this.configs[i]; - config.context = interpreter.getCachedContext(config.context); - } - } - addAll(coll) { - for (let i = 0; i < coll.length; i++) { - this.add(coll[i]); - } - return false; - } - equals(other) { - return this === other || other instanceof ATNConfigSet && Utils3.equalArrays(this.configs, other.configs) && this.fullCtx === other.fullCtx && this.uniqueAlt === other.uniqueAlt && this.conflictingAlts === other.conflictingAlts && this.hasSemanticContext === other.hasSemanticContext && this.dipsIntoOuterContext === other.dipsIntoOuterContext; - } - hashCode() { - const hash = new Utils3.Hash(); - hash.update(this.configs); - return hash.finish(); - } - updateHashCode(hash) { - if (this.readOnly) { - if (this.cachedHashCode === -1) { - this.cachedHashCode = this.hashCode(); - } - hash.update(this.cachedHashCode); - } else { - hash.update(this.hashCode()); - } - } - isEmpty() { - return this.configs.length === 0; - } - contains(item) { - if (this.configLookup === null) { - throw "This method is not implemented for readonly sets."; - } - return this.configLookup.contains(item); - } - containsFast(item) { - if (this.configLookup === null) { - throw "This method is not implemented for readonly sets."; - } - return this.configLookup.containsFast(item); - } - clear() { - if (this.readOnly) { - throw "This set is readonly"; - } - this.configs = []; - this.cachedHashCode = -1; - this.configLookup = new Utils3.Set(); - } - setReadonly(readOnly) { - this.readOnly = readOnly; - if (readOnly) { - this.configLookup = null; - } - } - toString() { - return Utils3.arrayToString(this.configs) + (this.hasSemanticContext ? ",hasSemanticContext=" + this.hasSemanticContext : "") + (this.uniqueAlt !== ATN2.INVALID_ALT_NUMBER ? ",uniqueAlt=" + this.uniqueAlt : "") + (this.conflictingAlts !== null ? ",conflictingAlts=" + this.conflictingAlts : "") + (this.dipsIntoOuterContext ? ",dipsIntoOuterContext" : ""); - } - get items() { - return this.configs; - } - get length() { - return this.configs.length; - } - }; - var OrderedATNConfigSet = class extends ATNConfigSet { - constructor() { - super(); - this.configLookup = new Utils3.Set(); - } - }; - module2.exports = { - ATNConfigSet, - OrderedATNConfigSet - }; -}); - -// node_modules/antlr4/src/antlr4/dfa/DFAState.js -var require_DFAState2 = __commonJS((exports, module2) => { - var {ATNConfigSet} = require_ATNConfigSet2(); - var {Hash: Hash2, Set: Set2} = require_Utils2(); - var PredPrediction = class { - constructor(pred, alt) { - this.alt = alt; - this.pred = pred; - } - toString() { - return "(" + this.pred + ", " + this.alt + ")"; - } - }; - var DFAState = class { - constructor(stateNumber, configs) { - if (stateNumber === null) { - stateNumber = -1; - } - if (configs === null) { - configs = new ATNConfigSet(); - } - this.stateNumber = stateNumber; - this.configs = configs; - this.edges = null; - this.isAcceptState = false; - this.prediction = 0; - this.lexerActionExecutor = null; - this.requiresFullContext = false; - this.predicates = null; - return this; - } - getAltSet() { - const alts = new Set2(); - if (this.configs !== null) { - for (let i = 0; i < this.configs.length; i++) { - const c = this.configs[i]; - alts.add(c.alt); - } - } - if (alts.length === 0) { - return null; - } else { - return alts; - } - } - equals(other) { - return this === other || other instanceof DFAState && this.configs.equals(other.configs); - } - toString() { - let s = "" + this.stateNumber + ":" + this.configs; - if (this.isAcceptState) { - s = s + "=>"; - if (this.predicates !== null) - s = s + this.predicates; - else - s = s + this.prediction; - } - return s; - } - hashCode() { - const hash = new Hash2(); - hash.update(this.configs); - return hash.finish(); - } - }; - module2.exports = {DFAState, PredPrediction}; -}); - -// node_modules/antlr4/src/antlr4/atn/ATNSimulator.js -var require_ATNSimulator2 = __commonJS((exports, module2) => { - var {DFAState} = require_DFAState2(); - var {ATNConfigSet} = require_ATNConfigSet2(); - var {getCachedPredictionContext} = require_PredictionContext2(); - var {Map: Map2} = require_Utils2(); - var ATNSimulator = class { - constructor(atn, sharedContextCache) { - this.atn = atn; - this.sharedContextCache = sharedContextCache; - return this; - } - getCachedContext(context) { - if (this.sharedContextCache === null) { - return context; - } - const visited = new Map2(); - return getCachedPredictionContext(context, this.sharedContextCache, visited); - } - }; - ATNSimulator.ERROR = new DFAState(2147483647, new ATNConfigSet()); - module2.exports = ATNSimulator; -}); - -// node_modules/antlr4/src/antlr4/atn/LexerActionExecutor.js -var require_LexerActionExecutor2 = __commonJS((exports, module2) => { - var {hashStuff} = require_Utils2(); - var {LexerIndexedCustomAction} = require_LexerAction(); - var LexerActionExecutor = class { - constructor(lexerActions) { - this.lexerActions = lexerActions === null ? [] : lexerActions; - this.cachedHashCode = hashStuff(lexerActions); - return this; - } - fixOffsetBeforeMatch(offset) { - let updatedLexerActions = null; - for (let i = 0; i < this.lexerActions.length; i++) { - if (this.lexerActions[i].isPositionDependent && !(this.lexerActions[i] instanceof LexerIndexedCustomAction)) { - if (updatedLexerActions === null) { - updatedLexerActions = this.lexerActions.concat([]); - } - updatedLexerActions[i] = new LexerIndexedCustomAction(offset, this.lexerActions[i]); - } - } - if (updatedLexerActions === null) { - return this; - } else { - return new LexerActionExecutor(updatedLexerActions); - } - } - execute(lexer, input, startIndex) { - let requiresSeek = false; - const stopIndex = input.index; - try { - for (let i = 0; i < this.lexerActions.length; i++) { - let lexerAction = this.lexerActions[i]; - if (lexerAction instanceof LexerIndexedCustomAction) { - const offset = lexerAction.offset; - input.seek(startIndex + offset); - lexerAction = lexerAction.action; - requiresSeek = startIndex + offset !== stopIndex; - } else if (lexerAction.isPositionDependent) { - input.seek(stopIndex); - requiresSeek = false; - } - lexerAction.execute(lexer); - } - } finally { - if (requiresSeek) { - input.seek(stopIndex); - } - } - } - hashCode() { - return this.cachedHashCode; - } - updateHashCode(hash) { - hash.update(this.cachedHashCode); - } - equals(other) { - if (this === other) { - return true; - } else if (!(other instanceof LexerActionExecutor)) { - return false; - } else if (this.cachedHashCode != other.cachedHashCode) { - return false; - } else if (this.lexerActions.length != other.lexerActions.length) { - return false; - } else { - const numActions = this.lexerActions.length; - for (let idx = 0; idx < numActions; ++idx) { - if (!this.lexerActions[idx].equals(other.lexerActions[idx])) { - return false; - } - } - return true; - } - } - static append(lexerActionExecutor, lexerAction) { - if (lexerActionExecutor === null) { - return new LexerActionExecutor([lexerAction]); - } - const lexerActions = lexerActionExecutor.lexerActions.concat([lexerAction]); - return new LexerActionExecutor(lexerActions); - } - }; - module2.exports = LexerActionExecutor; -}); - -// node_modules/antlr4/src/antlr4/atn/LexerATNSimulator.js -var require_LexerATNSimulator2 = __commonJS((exports, module2) => { - var {Token: Token2} = require_Token2(); - var Lexer2 = require_Lexer2(); - var ATN2 = require_ATN2(); - var ATNSimulator = require_ATNSimulator2(); - var {DFAState} = require_DFAState2(); - var {OrderedATNConfigSet} = require_ATNConfigSet2(); - var {PredictionContext} = require_PredictionContext2(); - var {SingletonPredictionContext} = require_PredictionContext2(); - var {RuleStopState} = require_ATNState2(); - var {LexerATNConfig} = require_ATNConfig2(); - var {Transition} = require_Transition2(); - var LexerActionExecutor = require_LexerActionExecutor2(); - var {LexerNoViableAltException} = require_Errors(); - function resetSimState(sim) { - sim.index = -1; - sim.line = 0; - sim.column = -1; - sim.dfaState = null; - } - var SimState = class { - constructor() { - resetSimState(this); - } - reset() { - resetSimState(this); - } - }; - var LexerATNSimulator2 = class extends ATNSimulator { - constructor(recog, atn, decisionToDFA, sharedContextCache) { - super(atn, sharedContextCache); - this.decisionToDFA = decisionToDFA; - this.recog = recog; - this.startIndex = -1; - this.line = 1; - this.column = 0; - this.mode = Lexer2.DEFAULT_MODE; - this.prevAccept = new SimState(); - } - copyState(simulator) { - this.column = simulator.column; - this.line = simulator.line; - this.mode = simulator.mode; - this.startIndex = simulator.startIndex; - } - match(input, mode) { - this.match_calls += 1; - this.mode = mode; - const mark = input.mark(); - try { - this.startIndex = input.index; - this.prevAccept.reset(); - const dfa = this.decisionToDFA[mode]; - if (dfa.s0 === null) { - return this.matchATN(input); - } else { - return this.execATN(input, dfa.s0); - } - } finally { - input.release(mark); - } - } - reset() { - this.prevAccept.reset(); - this.startIndex = -1; - this.line = 1; - this.column = 0; - this.mode = Lexer2.DEFAULT_MODE; - } - matchATN(input) { - const startState = this.atn.modeToStartState[this.mode]; - if (LexerATNSimulator2.debug) { - console.log("matchATN mode " + this.mode + " start: " + startState); - } - const old_mode = this.mode; - const s0_closure = this.computeStartState(input, startState); - const suppressEdge = s0_closure.hasSemanticContext; - s0_closure.hasSemanticContext = false; - const next = this.addDFAState(s0_closure); - if (!suppressEdge) { - this.decisionToDFA[this.mode].s0 = next; - } - const predict = this.execATN(input, next); - if (LexerATNSimulator2.debug) { - console.log("DFA after matchATN: " + this.decisionToDFA[old_mode].toLexerString()); - } - return predict; - } - execATN(input, ds0) { - if (LexerATNSimulator2.debug) { - console.log("start state closure=" + ds0.configs); - } - if (ds0.isAcceptState) { - this.captureSimState(this.prevAccept, input, ds0); - } - let t = input.LA(1); - let s = ds0; - while (true) { - if (LexerATNSimulator2.debug) { - console.log("execATN loop starting closure: " + s.configs); - } - let target = this.getExistingTargetState(s, t); - if (target === null) { - target = this.computeTargetState(input, s, t); - } - if (target === ATNSimulator.ERROR) { - break; - } - if (t !== Token2.EOF) { - this.consume(input); - } - if (target.isAcceptState) { - this.captureSimState(this.prevAccept, input, target); - if (t === Token2.EOF) { - break; - } - } - t = input.LA(1); - s = target; - } - return this.failOrAccept(this.prevAccept, input, s.configs, t); - } - getExistingTargetState(s, t) { - if (s.edges === null || t < LexerATNSimulator2.MIN_DFA_EDGE || t > LexerATNSimulator2.MAX_DFA_EDGE) { - return null; - } - let target = s.edges[t - LexerATNSimulator2.MIN_DFA_EDGE]; - if (target === void 0) { - target = null; - } - if (LexerATNSimulator2.debug && target !== null) { - console.log("reuse state " + s.stateNumber + " edge to " + target.stateNumber); - } - return target; - } - computeTargetState(input, s, t) { - const reach = new OrderedATNConfigSet(); - this.getReachableConfigSet(input, s.configs, reach, t); - if (reach.items.length === 0) { - if (!reach.hasSemanticContext) { - this.addDFAEdge(s, t, ATNSimulator.ERROR); - } - return ATNSimulator.ERROR; - } - return this.addDFAEdge(s, t, null, reach); - } - failOrAccept(prevAccept, input, reach, t) { - if (this.prevAccept.dfaState !== null) { - const lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor; - this.accept(input, lexerActionExecutor, this.startIndex, prevAccept.index, prevAccept.line, prevAccept.column); - return prevAccept.dfaState.prediction; - } else { - if (t === Token2.EOF && input.index === this.startIndex) { - return Token2.EOF; - } - throw new LexerNoViableAltException(this.recog, input, this.startIndex, reach); - } - } - getReachableConfigSet(input, closure, reach, t) { - let skipAlt = ATN2.INVALID_ALT_NUMBER; - for (let i = 0; i < closure.items.length; i++) { - const cfg = closure.items[i]; - const currentAltReachedAcceptState = cfg.alt === skipAlt; - if (currentAltReachedAcceptState && cfg.passedThroughNonGreedyDecision) { - continue; - } - if (LexerATNSimulator2.debug) { - console.log("testing %s at %s\n", this.getTokenName(t), cfg.toString(this.recog, true)); - } - for (let j = 0; j < cfg.state.transitions.length; j++) { - const trans = cfg.state.transitions[j]; - const target = this.getReachableTarget(trans, t); - if (target !== null) { - let lexerActionExecutor = cfg.lexerActionExecutor; - if (lexerActionExecutor !== null) { - lexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.index - this.startIndex); - } - const treatEofAsEpsilon = t === Token2.EOF; - const config = new LexerATNConfig({state: target, lexerActionExecutor}, cfg); - if (this.closure(input, config, reach, currentAltReachedAcceptState, true, treatEofAsEpsilon)) { - skipAlt = cfg.alt; - } - } - } - } - } - accept(input, lexerActionExecutor, startIndex, index, line, charPos) { - if (LexerATNSimulator2.debug) { - console.log("ACTION %s\n", lexerActionExecutor); - } - input.seek(index); - this.line = line; - this.column = charPos; - if (lexerActionExecutor !== null && this.recog !== null) { - lexerActionExecutor.execute(this.recog, input, startIndex); - } - } - getReachableTarget(trans, t) { - if (trans.matches(t, 0, Lexer2.MAX_CHAR_VALUE)) { - return trans.target; - } else { - return null; - } - } - computeStartState(input, p) { - const initialContext = PredictionContext.EMPTY; - const configs = new OrderedATNConfigSet(); - for (let i = 0; i < p.transitions.length; i++) { - const target = p.transitions[i].target; - const cfg = new LexerATNConfig({state: target, alt: i + 1, context: initialContext}, null); - this.closure(input, cfg, configs, false, false, false); - } - return configs; - } - closure(input, config, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon) { - let cfg = null; - if (LexerATNSimulator2.debug) { - console.log("closure(" + config.toString(this.recog, true) + ")"); - } - if (config.state instanceof RuleStopState) { - if (LexerATNSimulator2.debug) { - if (this.recog !== null) { - console.log("closure at %s rule stop %s\n", this.recog.ruleNames[config.state.ruleIndex], config); - } else { - console.log("closure at rule stop %s\n", config); - } - } - if (config.context === null || config.context.hasEmptyPath()) { - if (config.context === null || config.context.isEmpty()) { - configs.add(config); - return true; - } else { - configs.add(new LexerATNConfig({state: config.state, context: PredictionContext.EMPTY}, config)); - currentAltReachedAcceptState = true; - } - } - if (config.context !== null && !config.context.isEmpty()) { - for (let i = 0; i < config.context.length; i++) { - if (config.context.getReturnState(i) !== PredictionContext.EMPTY_RETURN_STATE) { - const newContext = config.context.getParent(i); - const returnState = this.atn.states[config.context.getReturnState(i)]; - cfg = new LexerATNConfig({state: returnState, context: newContext}, config); - currentAltReachedAcceptState = this.closure(input, cfg, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon); - } - } - } - return currentAltReachedAcceptState; - } - if (!config.state.epsilonOnlyTransitions) { - if (!currentAltReachedAcceptState || !config.passedThroughNonGreedyDecision) { - configs.add(config); - } - } - for (let j = 0; j < config.state.transitions.length; j++) { - const trans = config.state.transitions[j]; - cfg = this.getEpsilonTarget(input, config, trans, configs, speculative, treatEofAsEpsilon); - if (cfg !== null) { - currentAltReachedAcceptState = this.closure(input, cfg, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon); - } - } - return currentAltReachedAcceptState; - } - getEpsilonTarget(input, config, trans, configs, speculative, treatEofAsEpsilon) { - let cfg = null; - if (trans.serializationType === Transition.RULE) { - const newContext = SingletonPredictionContext.create(config.context, trans.followState.stateNumber); - cfg = new LexerATNConfig({state: trans.target, context: newContext}, config); - } else if (trans.serializationType === Transition.PRECEDENCE) { - throw "Precedence predicates are not supported in lexers."; - } else if (trans.serializationType === Transition.PREDICATE) { - if (LexerATNSimulator2.debug) { - console.log("EVAL rule " + trans.ruleIndex + ":" + trans.predIndex); - } - configs.hasSemanticContext = true; - if (this.evaluatePredicate(input, trans.ruleIndex, trans.predIndex, speculative)) { - cfg = new LexerATNConfig({state: trans.target}, config); - } - } else if (trans.serializationType === Transition.ACTION) { - if (config.context === null || config.context.hasEmptyPath()) { - const lexerActionExecutor = LexerActionExecutor.append(config.lexerActionExecutor, this.atn.lexerActions[trans.actionIndex]); - cfg = new LexerATNConfig({state: trans.target, lexerActionExecutor}, config); - } else { - cfg = new LexerATNConfig({state: trans.target}, config); - } - } else if (trans.serializationType === Transition.EPSILON) { - cfg = new LexerATNConfig({state: trans.target}, config); - } else if (trans.serializationType === Transition.ATOM || trans.serializationType === Transition.RANGE || trans.serializationType === Transition.SET) { - if (treatEofAsEpsilon) { - if (trans.matches(Token2.EOF, 0, Lexer2.MAX_CHAR_VALUE)) { - cfg = new LexerATNConfig({state: trans.target}, config); - } - } - } - return cfg; - } - evaluatePredicate(input, ruleIndex, predIndex, speculative) { - if (this.recog === null) { - return true; - } - if (!speculative) { - return this.recog.sempred(null, ruleIndex, predIndex); - } - const savedcolumn = this.column; - const savedLine = this.line; - const index = input.index; - const marker = input.mark(); - try { - this.consume(input); - return this.recog.sempred(null, ruleIndex, predIndex); - } finally { - this.column = savedcolumn; - this.line = savedLine; - input.seek(index); - input.release(marker); - } - } - captureSimState(settings, input, dfaState) { - settings.index = input.index; - settings.line = this.line; - settings.column = this.column; - settings.dfaState = dfaState; - } - addDFAEdge(from_, tk, to, cfgs) { - if (to === void 0) { - to = null; - } - if (cfgs === void 0) { - cfgs = null; - } - if (to === null && cfgs !== null) { - const suppressEdge = cfgs.hasSemanticContext; - cfgs.hasSemanticContext = false; - to = this.addDFAState(cfgs); - if (suppressEdge) { - return to; - } - } - if (tk < LexerATNSimulator2.MIN_DFA_EDGE || tk > LexerATNSimulator2.MAX_DFA_EDGE) { - return to; - } - if (LexerATNSimulator2.debug) { - console.log("EDGE " + from_ + " -> " + to + " upon " + tk); - } - if (from_.edges === null) { - from_.edges = []; - } - from_.edges[tk - LexerATNSimulator2.MIN_DFA_EDGE] = to; - return to; - } - addDFAState(configs) { - const proposed = new DFAState(null, configs); - let firstConfigWithRuleStopState = null; - for (let i = 0; i < configs.items.length; i++) { - const cfg = configs.items[i]; - if (cfg.state instanceof RuleStopState) { - firstConfigWithRuleStopState = cfg; - break; - } - } - if (firstConfigWithRuleStopState !== null) { - proposed.isAcceptState = true; - proposed.lexerActionExecutor = firstConfigWithRuleStopState.lexerActionExecutor; - proposed.prediction = this.atn.ruleToTokenType[firstConfigWithRuleStopState.state.ruleIndex]; - } - const dfa = this.decisionToDFA[this.mode]; - const existing = dfa.states.get(proposed); - if (existing !== null) { - return existing; - } - const newState = proposed; - newState.stateNumber = dfa.states.length; - configs.setReadonly(true); - newState.configs = configs; - dfa.states.add(newState); - return newState; - } - getDFA(mode) { - return this.decisionToDFA[mode]; - } - getText(input) { - return input.getText(this.startIndex, input.index - 1); - } - consume(input) { - const curChar = input.LA(1); - if (curChar === "\n".charCodeAt(0)) { - this.line += 1; - this.column = 0; - } else { - this.column += 1; - } - input.consume(); - } - getTokenName(tt) { - if (tt === -1) { - return "EOF"; - } else { - return "'" + String.fromCharCode(tt) + "'"; - } - } - }; - LexerATNSimulator2.debug = false; - LexerATNSimulator2.dfa_debug = false; - LexerATNSimulator2.MIN_DFA_EDGE = 0; - LexerATNSimulator2.MAX_DFA_EDGE = 127; - LexerATNSimulator2.match_calls = 0; - module2.exports = LexerATNSimulator2; -}); - -// node_modules/antlr4/src/antlr4/atn/PredictionMode.js -var require_PredictionMode2 = __commonJS((exports, module2) => { - var {Map: Map2, BitSet, AltDict, hashStuff} = require_Utils2(); - var ATN2 = require_ATN2(); - var {RuleStopState} = require_ATNState2(); - var {ATNConfigSet} = require_ATNConfigSet2(); - var {ATNConfig} = require_ATNConfig2(); - var {SemanticContext} = require_SemanticContext2(); - var PredictionMode = { - SLL: 0, - LL: 1, - LL_EXACT_AMBIG_DETECTION: 2, - hasSLLConflictTerminatingPrediction: function(mode, configs) { - if (PredictionMode.allConfigsInRuleStopStates(configs)) { - return true; - } - if (mode === PredictionMode.SLL) { - if (configs.hasSemanticContext) { - const dup = new ATNConfigSet(); - for (let i = 0; i < configs.items.length; i++) { - let c = configs.items[i]; - c = new ATNConfig({semanticContext: SemanticContext.NONE}, c); - dup.add(c); - } - configs = dup; - } - } - const altsets = PredictionMode.getConflictingAltSubsets(configs); - return PredictionMode.hasConflictingAltSet(altsets) && !PredictionMode.hasStateAssociatedWithOneAlt(configs); - }, - hasConfigInRuleStopState: function(configs) { - for (let i = 0; i < configs.items.length; i++) { - const c = configs.items[i]; - if (c.state instanceof RuleStopState) { - return true; - } - } - return false; - }, - allConfigsInRuleStopStates: function(configs) { - for (let i = 0; i < configs.items.length; i++) { - const c = configs.items[i]; - if (!(c.state instanceof RuleStopState)) { - return false; - } - } - return true; - }, - resolvesToJustOneViableAlt: function(altsets) { - return PredictionMode.getSingleViableAlt(altsets); - }, - allSubsetsConflict: function(altsets) { - return !PredictionMode.hasNonConflictingAltSet(altsets); - }, - hasNonConflictingAltSet: function(altsets) { - for (let i = 0; i < altsets.length; i++) { - const alts = altsets[i]; - if (alts.length === 1) { - return true; - } - } - return false; - }, - hasConflictingAltSet: function(altsets) { - for (let i = 0; i < altsets.length; i++) { - const alts = altsets[i]; - if (alts.length > 1) { - return true; - } - } - return false; - }, - allSubsetsEqual: function(altsets) { - let first = null; - for (let i = 0; i < altsets.length; i++) { - const alts = altsets[i]; - if (first === null) { - first = alts; - } else if (alts !== first) { - return false; - } - } - return true; - }, - getUniqueAlt: function(altsets) { - const all = PredictionMode.getAlts(altsets); - if (all.length === 1) { - return all.minValue(); - } else { - return ATN2.INVALID_ALT_NUMBER; - } - }, - getAlts: function(altsets) { - const all = new BitSet(); - altsets.map(function(alts) { - all.or(alts); - }); - return all; - }, - getConflictingAltSubsets: function(configs) { - const configToAlts = new Map2(); - configToAlts.hashFunction = function(cfg) { - hashStuff(cfg.state.stateNumber, cfg.context); - }; - configToAlts.equalsFunction = function(c1, c2) { - return c1.state.stateNumber === c2.state.stateNumber && c1.context.equals(c2.context); - }; - configs.items.map(function(cfg) { - let alts = configToAlts.get(cfg); - if (alts === null) { - alts = new BitSet(); - configToAlts.put(cfg, alts); - } - alts.add(cfg.alt); - }); - return configToAlts.getValues(); - }, - getStateToAltMap: function(configs) { - const m = new AltDict(); - configs.items.map(function(c) { - let alts = m.get(c.state); - if (alts === null) { - alts = new BitSet(); - m.put(c.state, alts); - } - alts.add(c.alt); - }); - return m; - }, - hasStateAssociatedWithOneAlt: function(configs) { - const values = PredictionMode.getStateToAltMap(configs).values(); - for (let i = 0; i < values.length; i++) { - if (values[i].length === 1) { - return true; - } - } - return false; - }, - getSingleViableAlt: function(altsets) { - let result = null; - for (let i = 0; i < altsets.length; i++) { - const alts = altsets[i]; - const minAlt = alts.minValue(); - if (result === null) { - result = minAlt; - } else if (result !== minAlt) { - return ATN2.INVALID_ALT_NUMBER; - } - } - return result; - } - }; - module2.exports = PredictionMode; -}); - -// node_modules/antlr4/src/antlr4/ParserRuleContext.js -var require_ParserRuleContext2 = __commonJS((exports, module2) => { - var RuleContext = require_RuleContext2(); - var Tree = require_Tree(); - var INVALID_INTERVAL = Tree.INVALID_INTERVAL; - var TerminalNode = Tree.TerminalNode; - var TerminalNodeImpl = Tree.TerminalNodeImpl; - var ErrorNodeImpl = Tree.ErrorNodeImpl; - var Interval = require_IntervalSet2().Interval; - var ParserRuleContext2 = class extends RuleContext { - constructor(parent, invokingStateNumber) { - parent = parent || null; - invokingStateNumber = invokingStateNumber || null; - super(parent, invokingStateNumber); - this.ruleIndex = -1; - this.children = null; - this.start = null; - this.stop = null; - this.exception = null; - } - copyFrom(ctx) { - this.parentCtx = ctx.parentCtx; - this.invokingState = ctx.invokingState; - this.children = null; - this.start = ctx.start; - this.stop = ctx.stop; - if (ctx.children) { - this.children = []; - ctx.children.map(function(child) { - if (child instanceof ErrorNodeImpl) { - this.children.push(child); - child.parentCtx = this; - } - }, this); - } - } - enterRule(listener) { - } - exitRule(listener) { - } - addChild(child) { - if (this.children === null) { - this.children = []; - } - this.children.push(child); - return child; - } - removeLastChild() { - if (this.children !== null) { - this.children.pop(); - } - } - addTokenNode(token) { - const node = new TerminalNodeImpl(token); - this.addChild(node); - node.parentCtx = this; - return node; - } - addErrorNode(badToken) { - const node = new ErrorNodeImpl(badToken); - this.addChild(node); - node.parentCtx = this; - return node; - } - getChild(i, type) { - type = type || null; - if (this.children === null || i < 0 || i >= this.children.length) { - return null; - } - if (type === null) { - return this.children[i]; - } else { - for (let j = 0; j < this.children.length; j++) { - const child = this.children[j]; - if (child instanceof type) { - if (i === 0) { - return child; - } else { - i -= 1; - } - } - } - return null; - } - } - getToken(ttype, i) { - if (this.children === null || i < 0 || i >= this.children.length) { - return null; - } - for (let j = 0; j < this.children.length; j++) { - const child = this.children[j]; - if (child instanceof TerminalNode) { - if (child.symbol.type === ttype) { - if (i === 0) { - return child; - } else { - i -= 1; - } - } - } - } - return null; - } - getTokens(ttype) { - if (this.children === null) { - return []; - } else { - const tokens2 = []; - for (let j = 0; j < this.children.length; j++) { - const child = this.children[j]; - if (child instanceof TerminalNode) { - if (child.symbol.type === ttype) { - tokens2.push(child); - } - } - } - return tokens2; - } - } - getTypedRuleContext(ctxType, i) { - return this.getChild(i, ctxType); - } - getTypedRuleContexts(ctxType) { - if (this.children === null) { - return []; - } else { - const contexts = []; - for (let j = 0; j < this.children.length; j++) { - const child = this.children[j]; - if (child instanceof ctxType) { - contexts.push(child); - } - } - return contexts; - } - } - getChildCount() { - if (this.children === null) { - return 0; - } else { - return this.children.length; - } - } - getSourceInterval() { - if (this.start === null || this.stop === null) { - return INVALID_INTERVAL; - } else { - return new Interval(this.start.tokenIndex, this.stop.tokenIndex); - } - } - }; - RuleContext.EMPTY = new ParserRuleContext2(); - module2.exports = ParserRuleContext2; -}); - -// node_modules/antlr4/src/antlr4/atn/ParserATNSimulator.js -var require_ParserATNSimulator2 = __commonJS((exports, module2) => { - var Utils3 = require_Utils2(); - var {Set: Set2, BitSet, DoubleDict} = Utils3; - var ATN2 = require_ATN2(); - var {ATNState, RuleStopState} = require_ATNState2(); - var {ATNConfig} = require_ATNConfig2(); - var {ATNConfigSet} = require_ATNConfigSet2(); - var {Token: Token2} = require_Token2(); - var {DFAState, PredPrediction} = require_DFAState2(); - var ATNSimulator = require_ATNSimulator2(); - var PredictionMode = require_PredictionMode2(); - var RuleContext = require_RuleContext2(); - var ParserRuleContext2 = require_ParserRuleContext2(); - var {SemanticContext} = require_SemanticContext2(); - var {PredictionContext} = require_PredictionContext2(); - var {Interval} = require_IntervalSet2(); - var {Transition, SetTransition, NotSetTransition, RuleTransition, ActionTransition} = require_Transition2(); - var {NoViableAltException: NoViableAltException2} = require_Errors(); - var {SingletonPredictionContext, predictionContextFromRuleContext} = require_PredictionContext2(); - var ParserATNSimulator2 = class extends ATNSimulator { - constructor(parser, atn, decisionToDFA, sharedContextCache) { - super(atn, sharedContextCache); - this.parser = parser; - this.decisionToDFA = decisionToDFA; - this.predictionMode = PredictionMode.LL; - this._input = null; - this._startIndex = 0; - this._outerContext = null; - this._dfa = null; - this.mergeCache = null; - this.debug = false; - this.debug_closure = false; - this.debug_add = false; - this.debug_list_atn_decisions = false; - this.dfa_debug = false; - this.retry_debug = false; - } - reset() { - } - adaptivePredict(input, decision, outerContext) { - if (this.debug || this.debug_list_atn_decisions) { - console.log("adaptivePredict decision " + decision + " exec LA(1)==" + this.getLookaheadName(input) + " line " + input.LT(1).line + ":" + input.LT(1).column); - } - this._input = input; - this._startIndex = input.index; - this._outerContext = outerContext; - const dfa = this.decisionToDFA[decision]; - this._dfa = dfa; - const m = input.mark(); - const index = input.index; - try { - let s0; - if (dfa.precedenceDfa) { - s0 = dfa.getPrecedenceStartState(this.parser.getPrecedence()); - } else { - s0 = dfa.s0; - } - if (s0 === null) { - if (outerContext === null) { - outerContext = RuleContext.EMPTY; - } - if (this.debug || this.debug_list_atn_decisions) { - console.log("predictATN decision " + dfa.decision + " exec LA(1)==" + this.getLookaheadName(input) + ", outerContext=" + outerContext.toString(this.parser.ruleNames)); - } - const fullCtx = false; - let s0_closure = this.computeStartState(dfa.atnStartState, RuleContext.EMPTY, fullCtx); - if (dfa.precedenceDfa) { - dfa.s0.configs = s0_closure; - s0_closure = this.applyPrecedenceFilter(s0_closure); - s0 = this.addDFAState(dfa, new DFAState(null, s0_closure)); - dfa.setPrecedenceStartState(this.parser.getPrecedence(), s0); - } else { - s0 = this.addDFAState(dfa, new DFAState(null, s0_closure)); - dfa.s0 = s0; - } - } - const alt = this.execATN(dfa, s0, input, index, outerContext); - if (this.debug) { - console.log("DFA after predictATN: " + dfa.toString(this.parser.literalNames)); - } - return alt; - } finally { - this._dfa = null; - this.mergeCache = null; - input.seek(index); - input.release(m); - } - } - execATN(dfa, s0, input, startIndex, outerContext) { - if (this.debug || this.debug_list_atn_decisions) { - console.log("execATN decision " + dfa.decision + " exec LA(1)==" + this.getLookaheadName(input) + " line " + input.LT(1).line + ":" + input.LT(1).column); - } - let alt; - let previousD = s0; - if (this.debug) { - console.log("s0 = " + s0); - } - let t = input.LA(1); - while (true) { - let D = this.getExistingTargetState(previousD, t); - if (D === null) { - D = this.computeTargetState(dfa, previousD, t); - } - if (D === ATNSimulator.ERROR) { - const e = this.noViableAlt(input, outerContext, previousD.configs, startIndex); - input.seek(startIndex); - alt = this.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previousD.configs, outerContext); - if (alt !== ATN2.INVALID_ALT_NUMBER) { - return alt; - } else { - throw e; - } - } - if (D.requiresFullContext && this.predictionMode !== PredictionMode.SLL) { - let conflictingAlts = null; - if (D.predicates !== null) { - if (this.debug) { - console.log("DFA state has preds in DFA sim LL failover"); - } - const conflictIndex = input.index; - if (conflictIndex !== startIndex) { - input.seek(startIndex); - } - conflictingAlts = this.evalSemanticContext(D.predicates, outerContext, true); - if (conflictingAlts.length === 1) { - if (this.debug) { - console.log("Full LL avoided"); - } - return conflictingAlts.minValue(); - } - if (conflictIndex !== startIndex) { - input.seek(conflictIndex); - } - } - if (this.dfa_debug) { - console.log("ctx sensitive state " + outerContext + " in " + D); - } - const fullCtx = true; - const s0_closure = this.computeStartState(dfa.atnStartState, outerContext, fullCtx); - this.reportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.index); - alt = this.execATNWithFullContext(dfa, D, s0_closure, input, startIndex, outerContext); - return alt; - } - if (D.isAcceptState) { - if (D.predicates === null) { - return D.prediction; - } - const stopIndex = input.index; - input.seek(startIndex); - const alts = this.evalSemanticContext(D.predicates, outerContext, true); - if (alts.length === 0) { - throw this.noViableAlt(input, outerContext, D.configs, startIndex); - } else if (alts.length === 1) { - return alts.minValue(); - } else { - this.reportAmbiguity(dfa, D, startIndex, stopIndex, false, alts, D.configs); - return alts.minValue(); - } - } - previousD = D; - if (t !== Token2.EOF) { - input.consume(); - t = input.LA(1); - } - } - } - getExistingTargetState(previousD, t) { - const edges = previousD.edges; - if (edges === null) { - return null; - } else { - return edges[t + 1] || null; - } - } - computeTargetState(dfa, previousD, t) { - const reach = this.computeReachSet(previousD.configs, t, false); - if (reach === null) { - this.addDFAEdge(dfa, previousD, t, ATNSimulator.ERROR); - return ATNSimulator.ERROR; - } - let D = new DFAState(null, reach); - const predictedAlt = this.getUniqueAlt(reach); - if (this.debug) { - const altSubSets = PredictionMode.getConflictingAltSubsets(reach); - console.log("SLL altSubSets=" + Utils3.arrayToString(altSubSets) + ", previous=" + previousD.configs + ", configs=" + reach + ", predict=" + predictedAlt + ", allSubsetsConflict=" + PredictionMode.allSubsetsConflict(altSubSets) + ", conflictingAlts=" + this.getConflictingAlts(reach)); - } - if (predictedAlt !== ATN2.INVALID_ALT_NUMBER) { - D.isAcceptState = true; - D.configs.uniqueAlt = predictedAlt; - D.prediction = predictedAlt; - } else if (PredictionMode.hasSLLConflictTerminatingPrediction(this.predictionMode, reach)) { - D.configs.conflictingAlts = this.getConflictingAlts(reach); - D.requiresFullContext = true; - D.isAcceptState = true; - D.prediction = D.configs.conflictingAlts.minValue(); - } - if (D.isAcceptState && D.configs.hasSemanticContext) { - this.predicateDFAState(D, this.atn.getDecisionState(dfa.decision)); - if (D.predicates !== null) { - D.prediction = ATN2.INVALID_ALT_NUMBER; - } - } - D = this.addDFAEdge(dfa, previousD, t, D); - return D; - } - predicateDFAState(dfaState, decisionState) { - const nalts = decisionState.transitions.length; - const altsToCollectPredsFrom = this.getConflictingAltsOrUniqueAlt(dfaState.configs); - const altToPred = this.getPredsForAmbigAlts(altsToCollectPredsFrom, dfaState.configs, nalts); - if (altToPred !== null) { - dfaState.predicates = this.getPredicatePredictions(altsToCollectPredsFrom, altToPred); - dfaState.prediction = ATN2.INVALID_ALT_NUMBER; - } else { - dfaState.prediction = altsToCollectPredsFrom.minValue(); - } - } - execATNWithFullContext(dfa, D, s0, input, startIndex, outerContext) { - if (this.debug || this.debug_list_atn_decisions) { - console.log("execATNWithFullContext " + s0); - } - const fullCtx = true; - let foundExactAmbig = false; - let reach; - let previous = s0; - input.seek(startIndex); - let t = input.LA(1); - let predictedAlt = -1; - while (true) { - reach = this.computeReachSet(previous, t, fullCtx); - if (reach === null) { - const e = this.noViableAlt(input, outerContext, previous, startIndex); - input.seek(startIndex); - const alt = this.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext); - if (alt !== ATN2.INVALID_ALT_NUMBER) { - return alt; - } else { - throw e; - } - } - const altSubSets = PredictionMode.getConflictingAltSubsets(reach); - if (this.debug) { - console.log("LL altSubSets=" + altSubSets + ", predict=" + PredictionMode.getUniqueAlt(altSubSets) + ", resolvesToJustOneViableAlt=" + PredictionMode.resolvesToJustOneViableAlt(altSubSets)); - } - reach.uniqueAlt = this.getUniqueAlt(reach); - if (reach.uniqueAlt !== ATN2.INVALID_ALT_NUMBER) { - predictedAlt = reach.uniqueAlt; - break; - } else if (this.predictionMode !== PredictionMode.LL_EXACT_AMBIG_DETECTION) { - predictedAlt = PredictionMode.resolvesToJustOneViableAlt(altSubSets); - if (predictedAlt !== ATN2.INVALID_ALT_NUMBER) { - break; - } - } else { - if (PredictionMode.allSubsetsConflict(altSubSets) && PredictionMode.allSubsetsEqual(altSubSets)) { - foundExactAmbig = true; - predictedAlt = PredictionMode.getSingleViableAlt(altSubSets); - break; - } - } - previous = reach; - if (t !== Token2.EOF) { - input.consume(); - t = input.LA(1); - } - } - if (reach.uniqueAlt !== ATN2.INVALID_ALT_NUMBER) { - this.reportContextSensitivity(dfa, predictedAlt, reach, startIndex, input.index); - return predictedAlt; - } - this.reportAmbiguity(dfa, D, startIndex, input.index, foundExactAmbig, null, reach); - return predictedAlt; - } - computeReachSet(closure, t, fullCtx) { - if (this.debug) { - console.log("in computeReachSet, starting closure: " + closure); - } - if (this.mergeCache === null) { - this.mergeCache = new DoubleDict(); - } - const intermediate = new ATNConfigSet(fullCtx); - let skippedStopStates = null; - for (let i = 0; i < closure.items.length; i++) { - const c = closure.items[i]; - if (this.debug) { - console.log("testing " + this.getTokenName(t) + " at " + c); - } - if (c.state instanceof RuleStopState) { - if (fullCtx || t === Token2.EOF) { - if (skippedStopStates === null) { - skippedStopStates = []; - } - skippedStopStates.push(c); - if (this.debug_add) { - console.log("added " + c + " to skippedStopStates"); - } - } - continue; - } - for (let j = 0; j < c.state.transitions.length; j++) { - const trans = c.state.transitions[j]; - const target = this.getReachableTarget(trans, t); - if (target !== null) { - const cfg = new ATNConfig({state: target}, c); - intermediate.add(cfg, this.mergeCache); - if (this.debug_add) { - console.log("added " + cfg + " to intermediate"); - } - } - } - } - let reach = null; - if (skippedStopStates === null && t !== Token2.EOF) { - if (intermediate.items.length === 1) { - reach = intermediate; - } else if (this.getUniqueAlt(intermediate) !== ATN2.INVALID_ALT_NUMBER) { - reach = intermediate; - } - } - if (reach === null) { - reach = new ATNConfigSet(fullCtx); - const closureBusy = new Set2(); - const treatEofAsEpsilon = t === Token2.EOF; - for (let k = 0; k < intermediate.items.length; k++) { - this.closure(intermediate.items[k], reach, closureBusy, false, fullCtx, treatEofAsEpsilon); - } - } - if (t === Token2.EOF) { - reach = this.removeAllConfigsNotInRuleStopState(reach, reach === intermediate); - } - if (skippedStopStates !== null && (!fullCtx || !PredictionMode.hasConfigInRuleStopState(reach))) { - for (let l = 0; l < skippedStopStates.length; l++) { - reach.add(skippedStopStates[l], this.mergeCache); - } - } - if (reach.items.length === 0) { - return null; - } else { - return reach; - } - } - removeAllConfigsNotInRuleStopState(configs, lookToEndOfRule) { - if (PredictionMode.allConfigsInRuleStopStates(configs)) { - return configs; - } - const result = new ATNConfigSet(configs.fullCtx); - for (let i = 0; i < configs.items.length; i++) { - const config = configs.items[i]; - if (config.state instanceof RuleStopState) { - result.add(config, this.mergeCache); - continue; - } - if (lookToEndOfRule && config.state.epsilonOnlyTransitions) { - const nextTokens = this.atn.nextTokens(config.state); - if (nextTokens.contains(Token2.EPSILON)) { - const endOfRuleState = this.atn.ruleToStopState[config.state.ruleIndex]; - result.add(new ATNConfig({state: endOfRuleState}, config), this.mergeCache); - } - } - } - return result; - } - computeStartState(p, ctx, fullCtx) { - const initialContext = predictionContextFromRuleContext(this.atn, ctx); - const configs = new ATNConfigSet(fullCtx); - for (let i = 0; i < p.transitions.length; i++) { - const target = p.transitions[i].target; - const c = new ATNConfig({state: target, alt: i + 1, context: initialContext}, null); - const closureBusy = new Set2(); - this.closure(c, configs, closureBusy, true, fullCtx, false); - } - return configs; - } - applyPrecedenceFilter(configs) { - let config; - const statesFromAlt1 = []; - const configSet = new ATNConfigSet(configs.fullCtx); - for (let i = 0; i < configs.items.length; i++) { - config = configs.items[i]; - if (config.alt !== 1) { - continue; - } - const updatedContext = config.semanticContext.evalPrecedence(this.parser, this._outerContext); - if (updatedContext === null) { - continue; - } - statesFromAlt1[config.state.stateNumber] = config.context; - if (updatedContext !== config.semanticContext) { - configSet.add(new ATNConfig({semanticContext: updatedContext}, config), this.mergeCache); - } else { - configSet.add(config, this.mergeCache); - } - } - for (let i = 0; i < configs.items.length; i++) { - config = configs.items[i]; - if (config.alt === 1) { - continue; - } - if (!config.precedenceFilterSuppressed) { - const context = statesFromAlt1[config.state.stateNumber] || null; - if (context !== null && context.equals(config.context)) { - continue; - } - } - configSet.add(config, this.mergeCache); - } - return configSet; - } - getReachableTarget(trans, ttype) { - if (trans.matches(ttype, 0, this.atn.maxTokenType)) { - return trans.target; - } else { - return null; - } - } - getPredsForAmbigAlts(ambigAlts, configs, nalts) { - let altToPred = []; - for (let i = 0; i < configs.items.length; i++) { - const c = configs.items[i]; - if (ambigAlts.contains(c.alt)) { - altToPred[c.alt] = SemanticContext.orContext(altToPred[c.alt] || null, c.semanticContext); - } - } - let nPredAlts = 0; - for (let i = 1; i < nalts + 1; i++) { - const pred = altToPred[i] || null; - if (pred === null) { - altToPred[i] = SemanticContext.NONE; - } else if (pred !== SemanticContext.NONE) { - nPredAlts += 1; - } - } - if (nPredAlts === 0) { - altToPred = null; - } - if (this.debug) { - console.log("getPredsForAmbigAlts result " + Utils3.arrayToString(altToPred)); - } - return altToPred; - } - getPredicatePredictions(ambigAlts, altToPred) { - const pairs = []; - let containsPredicate = false; - for (let i = 1; i < altToPred.length; i++) { - const pred = altToPred[i]; - if (ambigAlts !== null && ambigAlts.contains(i)) { - pairs.push(new PredPrediction(pred, i)); - } - if (pred !== SemanticContext.NONE) { - containsPredicate = true; - } - } - if (!containsPredicate) { - return null; - } - return pairs; - } - getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(configs, outerContext) { - const cfgs = this.splitAccordingToSemanticValidity(configs, outerContext); - const semValidConfigs = cfgs[0]; - const semInvalidConfigs = cfgs[1]; - let alt = this.getAltThatFinishedDecisionEntryRule(semValidConfigs); - if (alt !== ATN2.INVALID_ALT_NUMBER) { - return alt; - } - if (semInvalidConfigs.items.length > 0) { - alt = this.getAltThatFinishedDecisionEntryRule(semInvalidConfigs); - if (alt !== ATN2.INVALID_ALT_NUMBER) { - return alt; - } - } - return ATN2.INVALID_ALT_NUMBER; - } - getAltThatFinishedDecisionEntryRule(configs) { - const alts = []; - for (let i = 0; i < configs.items.length; i++) { - const c = configs.items[i]; - if (c.reachesIntoOuterContext > 0 || c.state instanceof RuleStopState && c.context.hasEmptyPath()) { - if (alts.indexOf(c.alt) < 0) { - alts.push(c.alt); - } - } - } - if (alts.length === 0) { - return ATN2.INVALID_ALT_NUMBER; - } else { - return Math.min.apply(null, alts); - } - } - splitAccordingToSemanticValidity(configs, outerContext) { - const succeeded = new ATNConfigSet(configs.fullCtx); - const failed = new ATNConfigSet(configs.fullCtx); - for (let i = 0; i < configs.items.length; i++) { - const c = configs.items[i]; - if (c.semanticContext !== SemanticContext.NONE) { - const predicateEvaluationResult = c.semanticContext.evaluate(this.parser, outerContext); - if (predicateEvaluationResult) { - succeeded.add(c); - } else { - failed.add(c); - } - } else { - succeeded.add(c); - } - } - return [succeeded, failed]; - } - evalSemanticContext(predPredictions, outerContext, complete) { - const predictions = new BitSet(); - for (let i = 0; i < predPredictions.length; i++) { - const pair = predPredictions[i]; - if (pair.pred === SemanticContext.NONE) { - predictions.add(pair.alt); - if (!complete) { - break; - } - continue; - } - const predicateEvaluationResult = pair.pred.evaluate(this.parser, outerContext); - if (this.debug || this.dfa_debug) { - console.log("eval pred " + pair + "=" + predicateEvaluationResult); - } - if (predicateEvaluationResult) { - if (this.debug || this.dfa_debug) { - console.log("PREDICT " + pair.alt); - } - predictions.add(pair.alt); - if (!complete) { - break; - } - } - } - return predictions; - } - closure(config, configs, closureBusy, collectPredicates, fullCtx, treatEofAsEpsilon) { - const initialDepth = 0; - this.closureCheckingStopState(config, configs, closureBusy, collectPredicates, fullCtx, initialDepth, treatEofAsEpsilon); - } - closureCheckingStopState(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon) { - if (this.debug || this.debug_closure) { - console.log("closure(" + config.toString(this.parser, true) + ")"); - if (config.reachesIntoOuterContext > 50) { - throw "problem"; - } - } - if (config.state instanceof RuleStopState) { - if (!config.context.isEmpty()) { - for (let i = 0; i < config.context.length; i++) { - if (config.context.getReturnState(i) === PredictionContext.EMPTY_RETURN_STATE) { - if (fullCtx) { - configs.add(new ATNConfig({state: config.state, context: PredictionContext.EMPTY}, config), this.mergeCache); - continue; - } else { - if (this.debug) { - console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex)); - } - this.closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon); - } - continue; - } - const returnState = this.atn.states[config.context.getReturnState(i)]; - const newContext = config.context.getParent(i); - const parms = {state: returnState, alt: config.alt, context: newContext, semanticContext: config.semanticContext}; - const c = new ATNConfig(parms, null); - c.reachesIntoOuterContext = config.reachesIntoOuterContext; - this.closureCheckingStopState(c, configs, closureBusy, collectPredicates, fullCtx, depth - 1, treatEofAsEpsilon); - } - return; - } else if (fullCtx) { - configs.add(config, this.mergeCache); - return; - } else { - if (this.debug) { - console.log("FALLING off rule " + this.getRuleName(config.state.ruleIndex)); - } - } - } - this.closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon); - } - closure_(config, configs, closureBusy, collectPredicates, fullCtx, depth, treatEofAsEpsilon) { - const p = config.state; - if (!p.epsilonOnlyTransitions) { - configs.add(config, this.mergeCache); - } - for (let i = 0; i < p.transitions.length; i++) { - if (i === 0 && this.canDropLoopEntryEdgeInLeftRecursiveRule(config)) - continue; - const t = p.transitions[i]; - const continueCollecting = collectPredicates && !(t instanceof ActionTransition); - const c = this.getEpsilonTarget(config, t, continueCollecting, depth === 0, fullCtx, treatEofAsEpsilon); - if (c !== null) { - let newDepth = depth; - if (config.state instanceof RuleStopState) { - if (this._dfa !== null && this._dfa.precedenceDfa) { - if (t.outermostPrecedenceReturn === this._dfa.atnStartState.ruleIndex) { - c.precedenceFilterSuppressed = true; - } - } - c.reachesIntoOuterContext += 1; - if (closureBusy.add(c) !== c) { - continue; - } - configs.dipsIntoOuterContext = true; - newDepth -= 1; - if (this.debug) { - console.log("dips into outer ctx: " + c); - } - } else { - if (!t.isEpsilon && closureBusy.add(c) !== c) { - continue; - } - if (t instanceof RuleTransition) { - if (newDepth >= 0) { - newDepth += 1; - } - } - } - this.closureCheckingStopState(c, configs, closureBusy, continueCollecting, fullCtx, newDepth, treatEofAsEpsilon); - } - } - } - canDropLoopEntryEdgeInLeftRecursiveRule(config) { - const p = config.state; - if (p.stateType !== ATNState.STAR_LOOP_ENTRY) - return false; - if (p.stateType !== ATNState.STAR_LOOP_ENTRY || !p.isPrecedenceDecision || config.context.isEmpty() || config.context.hasEmptyPath()) - return false; - const numCtxs = config.context.length; - for (let i = 0; i < numCtxs; i++) { - const returnState = this.atn.states[config.context.getReturnState(i)]; - if (returnState.ruleIndex !== p.ruleIndex) - return false; - } - const decisionStartState = p.transitions[0].target; - const blockEndStateNum = decisionStartState.endState.stateNumber; - const blockEndState = this.atn.states[blockEndStateNum]; - for (let i = 0; i < numCtxs; i++) { - const returnStateNumber = config.context.getReturnState(i); - const returnState = this.atn.states[returnStateNumber]; - if (returnState.transitions.length !== 1 || !returnState.transitions[0].isEpsilon) - return false; - const returnStateTarget = returnState.transitions[0].target; - if (returnState.stateType === ATNState.BLOCK_END && returnStateTarget === p) - continue; - if (returnState === blockEndState) - continue; - if (returnStateTarget === blockEndState) - continue; - if (returnStateTarget.stateType === ATNState.BLOCK_END && returnStateTarget.transitions.length === 1 && returnStateTarget.transitions[0].isEpsilon && returnStateTarget.transitions[0].target === p) - continue; - return false; - } - return true; - } - getRuleName(index) { - if (this.parser !== null && index >= 0) { - return this.parser.ruleNames[index]; - } else { - return ""; - } - } - getEpsilonTarget(config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon) { - switch (t.serializationType) { - case Transition.RULE: - return this.ruleTransition(config, t); - case Transition.PRECEDENCE: - return this.precedenceTransition(config, t, collectPredicates, inContext, fullCtx); - case Transition.PREDICATE: - return this.predTransition(config, t, collectPredicates, inContext, fullCtx); - case Transition.ACTION: - return this.actionTransition(config, t); - case Transition.EPSILON: - return new ATNConfig({state: t.target}, config); - case Transition.ATOM: - case Transition.RANGE: - case Transition.SET: - if (treatEofAsEpsilon) { - if (t.matches(Token2.EOF, 0, 1)) { - return new ATNConfig({state: t.target}, config); - } - } - return null; - default: - return null; - } - } - actionTransition(config, t) { - if (this.debug) { - const index = t.actionIndex === -1 ? 65535 : t.actionIndex; - console.log("ACTION edge " + t.ruleIndex + ":" + index); - } - return new ATNConfig({state: t.target}, config); - } - precedenceTransition(config, pt, collectPredicates, inContext, fullCtx) { - if (this.debug) { - console.log("PRED (collectPredicates=" + collectPredicates + ") " + pt.precedence + ">=_p, ctx dependent=true"); - if (this.parser !== null) { - console.log("context surrounding pred is " + Utils3.arrayToString(this.parser.getRuleInvocationStack())); - } - } - let c = null; - if (collectPredicates && inContext) { - if (fullCtx) { - const currentPosition = this._input.index; - this._input.seek(this._startIndex); - const predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext); - this._input.seek(currentPosition); - if (predSucceeds) { - c = new ATNConfig({state: pt.target}, config); - } - } else { - const newSemCtx = SemanticContext.andContext(config.semanticContext, pt.getPredicate()); - c = new ATNConfig({state: pt.target, semanticContext: newSemCtx}, config); - } - } else { - c = new ATNConfig({state: pt.target}, config); - } - if (this.debug) { - console.log("config from pred transition=" + c); - } - return c; - } - predTransition(config, pt, collectPredicates, inContext, fullCtx) { - if (this.debug) { - console.log("PRED (collectPredicates=" + collectPredicates + ") " + pt.ruleIndex + ":" + pt.predIndex + ", ctx dependent=" + pt.isCtxDependent); - if (this.parser !== null) { - console.log("context surrounding pred is " + Utils3.arrayToString(this.parser.getRuleInvocationStack())); - } - } - let c = null; - if (collectPredicates && (pt.isCtxDependent && inContext || !pt.isCtxDependent)) { - if (fullCtx) { - const currentPosition = this._input.index; - this._input.seek(this._startIndex); - const predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext); - this._input.seek(currentPosition); - if (predSucceeds) { - c = new ATNConfig({state: pt.target}, config); - } - } else { - const newSemCtx = SemanticContext.andContext(config.semanticContext, pt.getPredicate()); - c = new ATNConfig({state: pt.target, semanticContext: newSemCtx}, config); - } - } else { - c = new ATNConfig({state: pt.target}, config); - } - if (this.debug) { - console.log("config from pred transition=" + c); - } - return c; - } - ruleTransition(config, t) { - if (this.debug) { - console.log("CALL rule " + this.getRuleName(t.target.ruleIndex) + ", ctx=" + config.context); - } - const returnState = t.followState; - const newContext = SingletonPredictionContext.create(config.context, returnState.stateNumber); - return new ATNConfig({state: t.target, context: newContext}, config); - } - getConflictingAlts(configs) { - const altsets = PredictionMode.getConflictingAltSubsets(configs); - return PredictionMode.getAlts(altsets); - } - getConflictingAltsOrUniqueAlt(configs) { - let conflictingAlts = null; - if (configs.uniqueAlt !== ATN2.INVALID_ALT_NUMBER) { - conflictingAlts = new BitSet(); - conflictingAlts.add(configs.uniqueAlt); - } else { - conflictingAlts = configs.conflictingAlts; - } - return conflictingAlts; - } - getTokenName(t) { - if (t === Token2.EOF) { - return "EOF"; - } - if (this.parser !== null && this.parser.literalNames !== null) { - if (t >= this.parser.literalNames.length && t >= this.parser.symbolicNames.length) { - console.log("" + t + " ttype out of range: " + this.parser.literalNames); - console.log("" + this.parser.getInputStream().getTokens()); - } else { - const name = this.parser.literalNames[t] || this.parser.symbolicNames[t]; - return name + "<" + t + ">"; - } - } - return "" + t; - } - getLookaheadName(input) { - return this.getTokenName(input.LA(1)); - } - dumpDeadEndConfigs(nvae) { - console.log("dead end configs: "); - const decs = nvae.getDeadEndConfigs(); - for (let i = 0; i < decs.length; i++) { - const c = decs[i]; - let trans = "no edges"; - if (c.state.transitions.length > 0) { - const t = c.state.transitions[0]; - if (t instanceof AtomTransition) { - trans = "Atom " + this.getTokenName(t.label); - } else if (t instanceof SetTransition) { - const neg = t instanceof NotSetTransition; - trans = (neg ? "~" : "") + "Set " + t.set; - } - } - console.error(c.toString(this.parser, true) + ":" + trans); - } - } - noViableAlt(input, outerContext, configs, startIndex) { - return new NoViableAltException2(this.parser, input, input.get(startIndex), input.LT(1), configs, outerContext); - } - getUniqueAlt(configs) { - let alt = ATN2.INVALID_ALT_NUMBER; - for (let i = 0; i < configs.items.length; i++) { - const c = configs.items[i]; - if (alt === ATN2.INVALID_ALT_NUMBER) { - alt = c.alt; - } else if (c.alt !== alt) { - return ATN2.INVALID_ALT_NUMBER; - } - } - return alt; - } - addDFAEdge(dfa, from_, t, to) { - if (this.debug) { - console.log("EDGE " + from_ + " -> " + to + " upon " + this.getTokenName(t)); - } - if (to === null) { - return null; - } - to = this.addDFAState(dfa, to); - if (from_ === null || t < -1 || t > this.atn.maxTokenType) { - return to; - } - if (from_.edges === null) { - from_.edges = []; - } - from_.edges[t + 1] = to; - if (this.debug) { - const literalNames = this.parser === null ? null : this.parser.literalNames; - const symbolicNames = this.parser === null ? null : this.parser.symbolicNames; - console.log("DFA=\n" + dfa.toString(literalNames, symbolicNames)); - } - return to; - } - addDFAState(dfa, D) { - if (D === ATNSimulator.ERROR) { - return D; - } - const existing = dfa.states.get(D); - if (existing !== null) { - return existing; - } - D.stateNumber = dfa.states.length; - if (!D.configs.readOnly) { - D.configs.optimizeConfigs(this); - D.configs.setReadonly(true); - } - dfa.states.add(D); - if (this.debug) { - console.log("adding new DFA state: " + D); - } - return D; - } - reportAttemptingFullContext(dfa, conflictingAlts, configs, startIndex, stopIndex) { - if (this.debug || this.retry_debug) { - const interval = new Interval(startIndex, stopIndex + 1); - console.log("reportAttemptingFullContext decision=" + dfa.decision + ":" + configs + ", input=" + this.parser.getTokenStream().getText(interval)); - } - if (this.parser !== null) { - this.parser.getErrorListenerDispatch().reportAttemptingFullContext(this.parser, dfa, startIndex, stopIndex, conflictingAlts, configs); - } - } - reportContextSensitivity(dfa, prediction, configs, startIndex, stopIndex) { - if (this.debug || this.retry_debug) { - const interval = new Interval(startIndex, stopIndex + 1); - console.log("reportContextSensitivity decision=" + dfa.decision + ":" + configs + ", input=" + this.parser.getTokenStream().getText(interval)); - } - if (this.parser !== null) { - this.parser.getErrorListenerDispatch().reportContextSensitivity(this.parser, dfa, startIndex, stopIndex, prediction, configs); - } - } - reportAmbiguity(dfa, D, startIndex, stopIndex, exact, ambigAlts, configs) { - if (this.debug || this.retry_debug) { - const interval = new Interval(startIndex, stopIndex + 1); - console.log("reportAmbiguity " + ambigAlts + ":" + configs + ", input=" + this.parser.getTokenStream().getText(interval)); - } - if (this.parser !== null) { - this.parser.getErrorListenerDispatch().reportAmbiguity(this.parser, dfa, startIndex, stopIndex, exact, ambigAlts, configs); - } - } - }; - module2.exports = ParserATNSimulator2; -}); - -// node_modules/antlr4/src/antlr4/atn/index.js -var require_atn = __commonJS((exports) => { - exports.ATN = require_ATN2(); - exports.ATNDeserializer = require_ATNDeserializer2(); - exports.LexerATNSimulator = require_LexerATNSimulator2(); - exports.ParserATNSimulator = require_ParserATNSimulator2(); - exports.PredictionMode = require_PredictionMode2(); -}); - -// node_modules/antlr4/src/antlr4/polyfills/codepointat.js -var require_codepointat = __commonJS(() => { - if (!String.prototype.codePointAt) { - (function() { - "use strict"; - var defineProperty = function() { - let result; - try { - const object = {}; - const $defineProperty = Object.defineProperty; - result = $defineProperty(object, object, object) && $defineProperty; - } catch (error) { - } - return result; - }(); - const codePointAt = function(position) { - if (this == null) { - throw TypeError(); - } - const string = String(this); - const size = string.length; - let index = position ? Number(position) : 0; - if (index !== index) { - index = 0; - } - if (index < 0 || index >= size) { - return void 0; - } - const first = string.charCodeAt(index); - let second; - if (first >= 55296 && first <= 56319 && size > index + 1) { - second = string.charCodeAt(index + 1); - if (second >= 56320 && second <= 57343) { - return (first - 55296) * 1024 + second - 56320 + 65536; - } - } - return first; - }; - if (defineProperty) { - defineProperty(String.prototype, "codePointAt", { - value: codePointAt, - configurable: true, - writable: true - }); - } else { - String.prototype.codePointAt = codePointAt; - } - })(); - } -}); - -// node_modules/antlr4/src/antlr4/dfa/DFASerializer.js -var require_DFASerializer2 = __commonJS((exports, module2) => { - var DFASerializer = class { - constructor(dfa, literalNames, symbolicNames) { - this.dfa = dfa; - this.literalNames = literalNames || []; - this.symbolicNames = symbolicNames || []; - } - toString() { - if (this.dfa.s0 === null) { - return null; - } - let buf = ""; - const states = this.dfa.sortedStates(); - for (let i = 0; i < states.length; i++) { - const s = states[i]; - if (s.edges !== null) { - const n = s.edges.length; - for (let j = 0; j < n; j++) { - const t = s.edges[j] || null; - if (t !== null && t.stateNumber !== 2147483647) { - buf = buf.concat(this.getStateString(s)); - buf = buf.concat("-"); - buf = buf.concat(this.getEdgeLabel(j)); - buf = buf.concat("->"); - buf = buf.concat(this.getStateString(t)); - buf = buf.concat("\n"); - } - } - } - } - return buf.length === 0 ? null : buf; - } - getEdgeLabel(i) { - if (i === 0) { - return "EOF"; - } else if (this.literalNames !== null || this.symbolicNames !== null) { - return this.literalNames[i - 1] || this.symbolicNames[i - 1]; - } else { - return String.fromCharCode(i - 1); - } - } - getStateString(s) { - const baseStateStr = (s.isAcceptState ? ":" : "") + "s" + s.stateNumber + (s.requiresFullContext ? "^" : ""); - if (s.isAcceptState) { - if (s.predicates !== null) { - return baseStateStr + "=>" + s.predicates.toString(); - } else { - return baseStateStr + "=>" + s.prediction.toString(); - } - } else { - return baseStateStr; - } - } - }; - var LexerDFASerializer = class extends DFASerializer { - constructor(dfa) { - super(dfa, null); - } - getEdgeLabel(i) { - return "'" + String.fromCharCode(i) + "'"; - } - }; - module2.exports = {DFASerializer, LexerDFASerializer}; -}); - -// node_modules/antlr4/src/antlr4/dfa/DFA.js -var require_DFA2 = __commonJS((exports, module2) => { - var {Set: Set2} = require_Utils2(); - var {DFAState} = require_DFAState2(); - var {StarLoopEntryState} = require_ATNState2(); - var {ATNConfigSet} = require_ATNConfigSet2(); - var {DFASerializer} = require_DFASerializer2(); - var {LexerDFASerializer} = require_DFASerializer2(); - var DFA = class { - constructor(atnStartState, decision) { - if (decision === void 0) { - decision = 0; - } - this.atnStartState = atnStartState; - this.decision = decision; - this._states = new Set2(); - this.s0 = null; - this.precedenceDfa = false; - if (atnStartState instanceof StarLoopEntryState) { - if (atnStartState.isPrecedenceDecision) { - this.precedenceDfa = true; - const precedenceState = new DFAState(null, new ATNConfigSet()); - precedenceState.edges = []; - precedenceState.isAcceptState = false; - precedenceState.requiresFullContext = false; - this.s0 = precedenceState; - } - } - } - getPrecedenceStartState(precedence) { - if (!this.precedenceDfa) { - throw "Only precedence DFAs may contain a precedence start state."; - } - if (precedence < 0 || precedence >= this.s0.edges.length) { - return null; - } - return this.s0.edges[precedence] || null; - } - setPrecedenceStartState(precedence, startState) { - if (!this.precedenceDfa) { - throw "Only precedence DFAs may contain a precedence start state."; - } - if (precedence < 0) { - return; - } - this.s0.edges[precedence] = startState; - } - setPrecedenceDfa(precedenceDfa) { - if (this.precedenceDfa !== precedenceDfa) { - this._states = new Set2(); - if (precedenceDfa) { - const precedenceState = new DFAState(null, new ATNConfigSet()); - precedenceState.edges = []; - precedenceState.isAcceptState = false; - precedenceState.requiresFullContext = false; - this.s0 = precedenceState; - } else { - this.s0 = null; - } - this.precedenceDfa = precedenceDfa; - } - } - sortedStates() { - const list = this._states.values(); - return list.sort(function(a, b) { - return a.stateNumber - b.stateNumber; - }); - } - toString(literalNames, symbolicNames) { - literalNames = literalNames || null; - symbolicNames = symbolicNames || null; - if (this.s0 === null) { - return ""; - } - const serializer = new DFASerializer(this, literalNames, symbolicNames); - return serializer.toString(); - } - toLexerString() { - if (this.s0 === null) { - return ""; - } - const serializer = new LexerDFASerializer(this); - return serializer.toString(); - } - get states() { - return this._states; - } - }; - module2.exports = DFA; -}); - -// node_modules/antlr4/src/antlr4/dfa/index.js -var require_dfa = __commonJS((exports) => { - exports.DFA = require_DFA2(); - exports.DFASerializer = require_DFASerializer2().DFASerializer; - exports.LexerDFASerializer = require_DFASerializer2().LexerDFASerializer; - exports.PredPrediction = require_DFAState2().PredPrediction; -}); - -// node_modules/antlr4/src/antlr4/polyfills/fromcodepoint.js -var require_fromcodepoint = __commonJS(() => { - if (!String.fromCodePoint) { - (function() { - const defineProperty = function() { - let result; - try { - const object = {}; - const $defineProperty = Object.defineProperty; - result = $defineProperty(object, object, object) && $defineProperty; - } catch (error) { - } - return result; - }(); - const stringFromCharCode = String.fromCharCode; - const floor = Math.floor; - const fromCodePoint = function(_) { - const MAX_SIZE = 16384; - const codeUnits = []; - let highSurrogate; - let lowSurrogate; - let index = -1; - const length = arguments.length; - if (!length) { - return ""; - } - let result = ""; - while (++index < length) { - let codePoint = Number(arguments[index]); - if (!isFinite(codePoint) || codePoint < 0 || codePoint > 1114111 || floor(codePoint) !== codePoint) { - throw RangeError("Invalid code point: " + codePoint); - } - if (codePoint <= 65535) { - codeUnits.push(codePoint); - } else { - codePoint -= 65536; - highSurrogate = (codePoint >> 10) + 55296; - lowSurrogate = codePoint % 1024 + 56320; - codeUnits.push(highSurrogate, lowSurrogate); - } - if (index + 1 === length || codeUnits.length > MAX_SIZE) { - result += stringFromCharCode.apply(null, codeUnits); - codeUnits.length = 0; - } - } - return result; - }; - if (defineProperty) { - defineProperty(String, "fromCodePoint", { - value: fromCodePoint, - configurable: true, - writable: true - }); - } else { - String.fromCodePoint = fromCodePoint; - } - })(); - } -}); - -// node_modules/antlr4/src/antlr4/tree/index.js -var require_tree = __commonJS((exports, module2) => { - var Tree = require_Tree(); - var Trees = require_Trees2(); - module2.exports = __objSpread(__objSpread({}, Tree), {Trees}); -}); - -// node_modules/antlr4/src/antlr4/error/DiagnosticErrorListener.js -var require_DiagnosticErrorListener2 = __commonJS((exports, module2) => { - var {BitSet} = require_Utils2(); - var {ErrorListener: ErrorListener2} = require_ErrorListener(); - var {Interval} = require_IntervalSet2(); - var DiagnosticErrorListener = class extends ErrorListener2 { - constructor(exactOnly) { - super(); - exactOnly = exactOnly || true; - this.exactOnly = exactOnly; - } - reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) { - if (this.exactOnly && !exact) { - return; - } - const msg = "reportAmbiguity d=" + this.getDecisionDescription(recognizer, dfa) + ": ambigAlts=" + this.getConflictingAlts(ambigAlts, configs) + ", input='" + recognizer.getTokenStream().getText(new Interval(startIndex, stopIndex)) + "'"; - recognizer.notifyErrorListeners(msg); - } - reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) { - const msg = "reportAttemptingFullContext d=" + this.getDecisionDescription(recognizer, dfa) + ", input='" + recognizer.getTokenStream().getText(new Interval(startIndex, stopIndex)) + "'"; - recognizer.notifyErrorListeners(msg); - } - reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs) { - const msg = "reportContextSensitivity d=" + this.getDecisionDescription(recognizer, dfa) + ", input='" + recognizer.getTokenStream().getText(new Interval(startIndex, stopIndex)) + "'"; - recognizer.notifyErrorListeners(msg); - } - getDecisionDescription(recognizer, dfa) { - const decision = dfa.decision; - const ruleIndex = dfa.atnStartState.ruleIndex; - const ruleNames = recognizer.ruleNames; - if (ruleIndex < 0 || ruleIndex >= ruleNames.length) { - return "" + decision; - } - const ruleName = ruleNames[ruleIndex] || null; - if (ruleName === null || ruleName.length === 0) { - return "" + decision; - } - return `${decision} (${ruleName})`; - } - getConflictingAlts(reportedAlts, configs) { - if (reportedAlts !== null) { - return reportedAlts; - } - const result = new BitSet(); - for (let i = 0; i < configs.items.length; i++) { - result.add(configs.items[i].alt); - } - return `{${result.values().join(", ")}}`; - } - }; - module2.exports = DiagnosticErrorListener; -}); - -// node_modules/antlr4/src/antlr4/error/ErrorStrategy.js -var require_ErrorStrategy = __commonJS((exports, module2) => { - var {Token: Token2} = require_Token2(); - var {NoViableAltException: NoViableAltException2, InputMismatchException, FailedPredicateException: FailedPredicateException2, ParseCancellationException} = require_Errors(); - var {ATNState} = require_ATNState2(); - var {Interval, IntervalSet} = require_IntervalSet2(); - var ErrorStrategy = class { - reset(recognizer) { - } - recoverInline(recognizer) { - } - recover(recognizer, e) { - } - sync(recognizer) { - } - inErrorRecoveryMode(recognizer) { - } - reportError(recognizer) { - } - }; - var DefaultErrorStrategy = class extends ErrorStrategy { - constructor() { - super(); - this.errorRecoveryMode = false; - this.lastErrorIndex = -1; - this.lastErrorStates = null; - this.nextTokensContext = null; - this.nextTokenState = 0; - } - reset(recognizer) { - this.endErrorCondition(recognizer); - } - beginErrorCondition(recognizer) { - this.errorRecoveryMode = true; - } - inErrorRecoveryMode(recognizer) { - return this.errorRecoveryMode; - } - endErrorCondition(recognizer) { - this.errorRecoveryMode = false; - this.lastErrorStates = null; - this.lastErrorIndex = -1; - } - reportMatch(recognizer) { - this.endErrorCondition(recognizer); - } - reportError(recognizer, e) { - if (this.inErrorRecoveryMode(recognizer)) { - return; - } - this.beginErrorCondition(recognizer); - if (e instanceof NoViableAltException2) { - this.reportNoViableAlternative(recognizer, e); - } else if (e instanceof InputMismatchException) { - this.reportInputMismatch(recognizer, e); - } else if (e instanceof FailedPredicateException2) { - this.reportFailedPredicate(recognizer, e); - } else { - console.log("unknown recognition error type: " + e.constructor.name); - console.log(e.stack); - recognizer.notifyErrorListeners(e.getOffendingToken(), e.getMessage(), e); - } - } - recover(recognizer, e) { - if (this.lastErrorIndex === recognizer.getInputStream().index && this.lastErrorStates !== null && this.lastErrorStates.indexOf(recognizer.state) >= 0) { - recognizer.consume(); - } - this.lastErrorIndex = recognizer._input.index; - if (this.lastErrorStates === null) { - this.lastErrorStates = []; - } - this.lastErrorStates.push(recognizer.state); - const followSet = this.getErrorRecoverySet(recognizer); - this.consumeUntil(recognizer, followSet); - } - sync(recognizer) { - if (this.inErrorRecoveryMode(recognizer)) { - return; - } - const s = recognizer._interp.atn.states[recognizer.state]; - const la = recognizer.getTokenStream().LA(1); - const nextTokens = recognizer.atn.nextTokens(s); - if (nextTokens.contains(la)) { - this.nextTokensContext = null; - this.nextTokenState = ATNState.INVALID_STATE_NUMBER; - return; - } else if (nextTokens.contains(Token2.EPSILON)) { - if (this.nextTokensContext === null) { - this.nextTokensContext = recognizer._ctx; - this.nextTokensState = recognizer._stateNumber; - } - return; - } - switch (s.stateType) { - case ATNState.BLOCK_START: - case ATNState.STAR_BLOCK_START: - case ATNState.PLUS_BLOCK_START: - case ATNState.STAR_LOOP_ENTRY: - if (this.singleTokenDeletion(recognizer) !== null) { - return; - } else { - throw new InputMismatchException(recognizer); - } - case ATNState.PLUS_LOOP_BACK: - case ATNState.STAR_LOOP_BACK: - this.reportUnwantedToken(recognizer); - const expecting = new IntervalSet(); - expecting.addSet(recognizer.getExpectedTokens()); - const whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer)); - this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule); - break; - default: - } - } - reportNoViableAlternative(recognizer, e) { - const tokens2 = recognizer.getTokenStream(); - let input; - if (tokens2 !== null) { - if (e.startToken.type === Token2.EOF) { - input = ""; - } else { - input = tokens2.getText(new Interval(e.startToken.tokenIndex, e.offendingToken.tokenIndex)); - } - } else { - input = ""; - } - const msg = "no viable alternative at input " + this.escapeWSAndQuote(input); - recognizer.notifyErrorListeners(msg, e.offendingToken, e); - } - reportInputMismatch(recognizer, e) { - const msg = "mismatched input " + this.getTokenErrorDisplay(e.offendingToken) + " expecting " + e.getExpectedTokens().toString(recognizer.literalNames, recognizer.symbolicNames); - recognizer.notifyErrorListeners(msg, e.offendingToken, e); - } - reportFailedPredicate(recognizer, e) { - const ruleName = recognizer.ruleNames[recognizer._ctx.ruleIndex]; - const msg = "rule " + ruleName + " " + e.message; - recognizer.notifyErrorListeners(msg, e.offendingToken, e); - } - reportUnwantedToken(recognizer) { - if (this.inErrorRecoveryMode(recognizer)) { - return; - } - this.beginErrorCondition(recognizer); - const t = recognizer.getCurrentToken(); - const tokenName = this.getTokenErrorDisplay(t); - const expecting = this.getExpectedTokens(recognizer); - const msg = "extraneous input " + tokenName + " expecting " + expecting.toString(recognizer.literalNames, recognizer.symbolicNames); - recognizer.notifyErrorListeners(msg, t, null); - } - reportMissingToken(recognizer) { - if (this.inErrorRecoveryMode(recognizer)) { - return; - } - this.beginErrorCondition(recognizer); - const t = recognizer.getCurrentToken(); - const expecting = this.getExpectedTokens(recognizer); - const msg = "missing " + expecting.toString(recognizer.literalNames, recognizer.symbolicNames) + " at " + this.getTokenErrorDisplay(t); - recognizer.notifyErrorListeners(msg, t, null); - } - recoverInline(recognizer) { - const matchedSymbol = this.singleTokenDeletion(recognizer); - if (matchedSymbol !== null) { - recognizer.consume(); - return matchedSymbol; - } - if (this.singleTokenInsertion(recognizer)) { - return this.getMissingSymbol(recognizer); - } - throw new InputMismatchException(recognizer); - } - singleTokenInsertion(recognizer) { - const currentSymbolType = recognizer.getTokenStream().LA(1); - const atn = recognizer._interp.atn; - const currentState = atn.states[recognizer.state]; - const next = currentState.transitions[0].target; - const expectingAtLL2 = atn.nextTokens(next, recognizer._ctx); - if (expectingAtLL2.contains(currentSymbolType)) { - this.reportMissingToken(recognizer); - return true; - } else { - return false; - } - } - singleTokenDeletion(recognizer) { - const nextTokenType = recognizer.getTokenStream().LA(2); - const expecting = this.getExpectedTokens(recognizer); - if (expecting.contains(nextTokenType)) { - this.reportUnwantedToken(recognizer); - recognizer.consume(); - const matchedSymbol = recognizer.getCurrentToken(); - this.reportMatch(recognizer); - return matchedSymbol; - } else { - return null; - } - } - getMissingSymbol(recognizer) { - const currentSymbol = recognizer.getCurrentToken(); - const expecting = this.getExpectedTokens(recognizer); - const expectedTokenType = expecting.first(); - let tokenText; - if (expectedTokenType === Token2.EOF) { - tokenText = ""; - } else { - tokenText = ""; - } - let current = currentSymbol; - const lookback = recognizer.getTokenStream().LT(-1); - if (current.type === Token2.EOF && lookback !== null) { - current = lookback; - } - return recognizer.getTokenFactory().create(current.source, expectedTokenType, tokenText, Token2.DEFAULT_CHANNEL, -1, -1, current.line, current.column); - } - getExpectedTokens(recognizer) { - return recognizer.getExpectedTokens(); - } - getTokenErrorDisplay(t) { - if (t === null) { - return ""; - } - let s = t.text; - if (s === null) { - if (t.type === Token2.EOF) { - s = ""; - } else { - s = "<" + t.type + ">"; - } - } - return this.escapeWSAndQuote(s); - } - escapeWSAndQuote(s) { - s = s.replace(/\n/g, "\\n"); - s = s.replace(/\r/g, "\\r"); - s = s.replace(/\t/g, "\\t"); - return "'" + s + "'"; - } - getErrorRecoverySet(recognizer) { - const atn = recognizer._interp.atn; - let ctx = recognizer._ctx; - const recoverSet = new IntervalSet(); - while (ctx !== null && ctx.invokingState >= 0) { - const invokingState = atn.states[ctx.invokingState]; - const rt = invokingState.transitions[0]; - const follow = atn.nextTokens(rt.followState); - recoverSet.addSet(follow); - ctx = ctx.parentCtx; - } - recoverSet.removeOne(Token2.EPSILON); - return recoverSet; - } - consumeUntil(recognizer, set) { - let ttype = recognizer.getTokenStream().LA(1); - while (ttype !== Token2.EOF && !set.contains(ttype)) { - recognizer.consume(); - ttype = recognizer.getTokenStream().LA(1); - } - } - }; - var BailErrorStrategy = class extends DefaultErrorStrategy { - constructor() { - super(); - } - recover(recognizer, e) { - let context = recognizer._ctx; - while (context !== null) { - context.exception = e; - context = context.parentCtx; - } - throw new ParseCancellationException(e); - } - recoverInline(recognizer) { - this.recover(recognizer, new InputMismatchException(recognizer)); - } - sync(recognizer) { - } - }; - module2.exports = {BailErrorStrategy, DefaultErrorStrategy}; -}); - -// node_modules/antlr4/src/antlr4/error/index.js -var require_error = __commonJS((exports, module2) => { - module2.exports.RecognitionException = require_Errors().RecognitionException; - module2.exports.NoViableAltException = require_Errors().NoViableAltException; - module2.exports.LexerNoViableAltException = require_Errors().LexerNoViableAltException; - module2.exports.InputMismatchException = require_Errors().InputMismatchException; - module2.exports.FailedPredicateException = require_Errors().FailedPredicateException; - module2.exports.DiagnosticErrorListener = require_DiagnosticErrorListener2(); - module2.exports.BailErrorStrategy = require_ErrorStrategy().BailErrorStrategy; - module2.exports.DefaultErrorStrategy = require_ErrorStrategy().DefaultErrorStrategy; - module2.exports.ErrorListener = require_ErrorListener().ErrorListener; -}); - -// node_modules/antlr4/src/antlr4/InputStream.js -var require_InputStream = __commonJS((exports, module2) => { - var {Token: Token2} = require_Token2(); - require_codepointat(); - require_fromcodepoint(); - var InputStream = class { - constructor(data, decodeToUnicodeCodePoints) { - this.name = ""; - this.strdata = data; - this.decodeToUnicodeCodePoints = decodeToUnicodeCodePoints || false; - this._index = 0; - this.data = []; - if (this.decodeToUnicodeCodePoints) { - for (let i = 0; i < this.strdata.length; ) { - const codePoint = this.strdata.codePointAt(i); - this.data.push(codePoint); - i += codePoint <= 65535 ? 1 : 2; - } - } else { - for (let i = 0; i < this.strdata.length; i++) { - const codeUnit = this.strdata.charCodeAt(i); - this.data.push(codeUnit); - } - } - this._size = this.data.length; - } - reset() { - this._index = 0; - } - consume() { - if (this._index >= this._size) { - throw "cannot consume EOF"; - } - this._index += 1; - } - LA(offset) { - if (offset === 0) { - return 0; - } - if (offset < 0) { - offset += 1; - } - const pos = this._index + offset - 1; - if (pos < 0 || pos >= this._size) { - return Token2.EOF; - } - return this.data[pos]; - } - LT(offset) { - return this.LA(offset); - } - mark() { - return -1; - } - release(marker) { - } - seek(_index) { - if (_index <= this._index) { - this._index = _index; - return; - } - this._index = Math.min(_index, this._size); - } - getText(start, stop) { - if (stop >= this._size) { - stop = this._size - 1; - } - if (start >= this._size) { - return ""; - } else { - if (this.decodeToUnicodeCodePoints) { - let result = ""; - for (let i = start; i <= stop; i++) { - result += String.fromCodePoint(this.data[i]); - } - return result; - } else { - return this.strdata.slice(start, stop + 1); - } - } - } - toString() { - return this.strdata; - } - get index() { - return this._index; - } - get size() { - return this._size; - } - }; - module2.exports = InputStream; -}); - -// node_modules/antlr4/src/antlr4/CharStreams.js -var require_CharStreams2 = __commonJS((exports, module2) => { - var InputStream = require_InputStream(); - var fs = require("fs"); - var CharStreams = { - fromString: function(str) { - return new InputStream(str, true); - }, - fromBlob: function(blob, encoding, onLoad, onError) { - const reader = new window.FileReader(); - reader.onload = function(e) { - const is = new InputStream(e.target.result, true); - onLoad(is); - }; - reader.onerror = onError; - reader.readAsText(blob, encoding); - }, - fromBuffer: function(buffer, encoding) { - return new InputStream(buffer.toString(encoding), true); - }, - fromPath: function(path, encoding, callback) { - fs.readFile(path, encoding, function(err, data) { - let is = null; - if (data !== null) { - is = new InputStream(data, true); - } - callback(err, is); - }); - }, - fromPathSync: function(path, encoding) { - const data = fs.readFileSync(path, encoding); - return new InputStream(data, true); - } - }; - module2.exports = CharStreams; -}); - -// node_modules/antlr4/src/antlr4/FileStream.js -var require_FileStream = __commonJS((exports, module2) => { - var InputStream = require_InputStream(); - var fs = require("fs"); - var FileStream = class extends InputStream { - constructor(fileName, decodeToUnicodeCodePoints) { - const data = fs.readFileSync(fileName, "utf8"); - super(data, decodeToUnicodeCodePoints); - this.fileName = fileName; - } - }; - module2.exports = FileStream; -}); - -// node_modules/antlr4/src/antlr4/BufferedTokenStream.js -var require_BufferedTokenStream2 = __commonJS((exports, module2) => { - var {Token: Token2} = require_Token2(); - var Lexer2 = require_Lexer2(); - var {Interval} = require_IntervalSet2(); - var TokenStream = class { - }; - var BufferedTokenStream = class extends TokenStream { - constructor(tokenSource) { - super(); - this.tokenSource = tokenSource; - this.tokens = []; - this.index = -1; - this.fetchedEOF = false; - } - mark() { - return 0; - } - release(marker) { - } - reset() { - this.seek(0); - } - seek(index) { - this.lazyInit(); - this.index = this.adjustSeekIndex(index); - } - get(index) { - this.lazyInit(); - return this.tokens[index]; - } - consume() { - let skipEofCheck = false; - if (this.index >= 0) { - if (this.fetchedEOF) { - skipEofCheck = this.index < this.tokens.length - 1; - } else { - skipEofCheck = this.index < this.tokens.length; - } - } else { - skipEofCheck = false; - } - if (!skipEofCheck && this.LA(1) === Token2.EOF) { - throw "cannot consume EOF"; - } - if (this.sync(this.index + 1)) { - this.index = this.adjustSeekIndex(this.index + 1); - } - } - sync(i) { - const n = i - this.tokens.length + 1; - if (n > 0) { - const fetched = this.fetch(n); - return fetched >= n; - } - return true; - } - fetch(n) { - if (this.fetchedEOF) { - return 0; - } - for (let i = 0; i < n; i++) { - const t = this.tokenSource.nextToken(); - t.tokenIndex = this.tokens.length; - this.tokens.push(t); - if (t.type === Token2.EOF) { - this.fetchedEOF = true; - return i + 1; - } - } - return n; - } - getTokens(start, stop, types) { - if (types === void 0) { - types = null; - } - if (start < 0 || stop < 0) { - return null; - } - this.lazyInit(); - const subset = []; - if (stop >= this.tokens.length) { - stop = this.tokens.length - 1; - } - for (let i = start; i < stop; i++) { - const t = this.tokens[i]; - if (t.type === Token2.EOF) { - break; - } - if (types === null || types.contains(t.type)) { - subset.push(t); - } - } - return subset; - } - LA(i) { - return this.LT(i).type; - } - LB(k) { - if (this.index - k < 0) { - return null; - } - return this.tokens[this.index - k]; - } - LT(k) { - this.lazyInit(); - if (k === 0) { - return null; - } - if (k < 0) { - return this.LB(-k); - } - const i = this.index + k - 1; - this.sync(i); - if (i >= this.tokens.length) { - return this.tokens[this.tokens.length - 1]; - } - return this.tokens[i]; - } - adjustSeekIndex(i) { - return i; - } - lazyInit() { - if (this.index === -1) { - this.setup(); - } - } - setup() { - this.sync(0); - this.index = this.adjustSeekIndex(0); - } - setTokenSource(tokenSource) { - this.tokenSource = tokenSource; - this.tokens = []; - this.index = -1; - this.fetchedEOF = false; - } - nextTokenOnChannel(i, channel) { - this.sync(i); - if (i >= this.tokens.length) { - return -1; - } - let token = this.tokens[i]; - while (token.channel !== this.channel) { - if (token.type === Token2.EOF) { - return -1; - } - i += 1; - this.sync(i); - token = this.tokens[i]; - } - return i; - } - previousTokenOnChannel(i, channel) { - while (i >= 0 && this.tokens[i].channel !== channel) { - i -= 1; - } - return i; - } - getHiddenTokensToRight(tokenIndex, channel) { - if (channel === void 0) { - channel = -1; - } - this.lazyInit(); - if (tokenIndex < 0 || tokenIndex >= this.tokens.length) { - throw "" + tokenIndex + " not in 0.." + this.tokens.length - 1; - } - const nextOnChannel = this.nextTokenOnChannel(tokenIndex + 1, Lexer2.DEFAULT_TOKEN_CHANNEL); - const from_ = tokenIndex + 1; - const to = nextOnChannel === -1 ? this.tokens.length - 1 : nextOnChannel; - return this.filterForChannel(from_, to, channel); - } - getHiddenTokensToLeft(tokenIndex, channel) { - if (channel === void 0) { - channel = -1; - } - this.lazyInit(); - if (tokenIndex < 0 || tokenIndex >= this.tokens.length) { - throw "" + tokenIndex + " not in 0.." + this.tokens.length - 1; - } - const prevOnChannel = this.previousTokenOnChannel(tokenIndex - 1, Lexer2.DEFAULT_TOKEN_CHANNEL); - if (prevOnChannel === tokenIndex - 1) { - return null; - } - const from_ = prevOnChannel + 1; - const to = tokenIndex - 1; - return this.filterForChannel(from_, to, channel); - } - filterForChannel(left, right, channel) { - const hidden = []; - for (let i = left; i < right + 1; i++) { - const t = this.tokens[i]; - if (channel === -1) { - if (t.channel !== Lexer2.DEFAULT_TOKEN_CHANNEL) { - hidden.push(t); - } - } else if (t.channel === channel) { - hidden.push(t); - } - } - if (hidden.length === 0) { - return null; - } - return hidden; - } - getSourceName() { - return this.tokenSource.getSourceName(); - } - getText(interval) { - this.lazyInit(); - this.fill(); - if (interval === void 0 || interval === null) { - interval = new Interval(0, this.tokens.length - 1); - } - let start = interval.start; - if (start instanceof Token2) { - start = start.tokenIndex; - } - let stop = interval.stop; - if (stop instanceof Token2) { - stop = stop.tokenIndex; - } - if (start === null || stop === null || start < 0 || stop < 0) { - return ""; - } - if (stop >= this.tokens.length) { - stop = this.tokens.length - 1; - } - let s = ""; - for (let i = start; i < stop + 1; i++) { - const t = this.tokens[i]; - if (t.type === Token2.EOF) { - break; - } - s = s + t.text; - } - return s; - } - fill() { - this.lazyInit(); - while (this.fetch(1e3) === 1e3) { - continue; - } - } - }; - module2.exports = BufferedTokenStream; -}); - -// node_modules/antlr4/src/antlr4/CommonTokenStream.js -var require_CommonTokenStream2 = __commonJS((exports, module2) => { - var Token2 = require_Token2().Token; - var BufferedTokenStream = require_BufferedTokenStream2(); - var CommonTokenStream2 = class extends BufferedTokenStream { - constructor(lexer, channel) { - super(lexer); - this.channel = channel === void 0 ? Token2.DEFAULT_CHANNEL : channel; - } - adjustSeekIndex(i) { - return this.nextTokenOnChannel(i, this.channel); - } - LB(k) { - if (k === 0 || this.index - k < 0) { - return null; - } - let i = this.index; - let n = 1; - while (n <= k) { - i = this.previousTokenOnChannel(i - 1, this.channel); - n += 1; - } - if (i < 0) { - return null; - } - return this.tokens[i]; - } - LT(k) { - this.lazyInit(); - if (k === 0) { - return null; - } - if (k < 0) { - return this.LB(-k); - } - let i = this.index; - let n = 1; - while (n < k) { - if (this.sync(i + 1)) { - i = this.nextTokenOnChannel(i + 1, this.channel); - } - n += 1; - } - return this.tokens[i]; - } - getNumberOfOnChannelTokens() { - let n = 0; - this.fill(); - for (let i = 0; i < this.tokens.length; i++) { - const t = this.tokens[i]; - if (t.channel === this.channel) { - n += 1; - } - if (t.type === Token2.EOF) { - break; - } - } - return n; - } - }; - module2.exports = CommonTokenStream2; -}); - -// node_modules/antlr4/src/antlr4/Parser.js -var require_Parser2 = __commonJS((exports, module2) => { - var {Token: Token2} = require_Token2(); - var {ParseTreeListener, TerminalNode, ErrorNode: ErrorNode2} = require_Tree(); - var Recognizer = require_Recognizer2(); - var {DefaultErrorStrategy} = require_ErrorStrategy(); - var ATNDeserializer3 = require_ATNDeserializer2(); - var ATNDeserializationOptions = require_ATNDeserializationOptions2(); - var Lexer2 = require_Lexer2(); - var TraceListener = class extends ParseTreeListener { - constructor(parser) { - super(); - this.parser = parser; - } - enterEveryRule(ctx) { - console.log("enter " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text); - } - visitTerminal(node) { - console.log("consume " + node.symbol + " rule " + this.parser.ruleNames[this.parser._ctx.ruleIndex]); - } - exitEveryRule(ctx) { - console.log("exit " + this.parser.ruleNames[ctx.ruleIndex] + ", LT(1)=" + this.parser._input.LT(1).text); - } - }; - var Parser2 = class extends Recognizer { - constructor(input) { - super(); - this._input = null; - this._errHandler = new DefaultErrorStrategy(); - this._precedenceStack = []; - this._precedenceStack.push(0); - this._ctx = null; - this.buildParseTrees = true; - this._tracer = null; - this._parseListeners = null; - this._syntaxErrors = 0; - this.setInputStream(input); - } - reset() { - if (this._input !== null) { - this._input.seek(0); - } - this._errHandler.reset(this); - this._ctx = null; - this._syntaxErrors = 0; - this.setTrace(false); - this._precedenceStack = []; - this._precedenceStack.push(0); - if (this._interp !== null) { - this._interp.reset(); - } - } - match(ttype) { - let t = this.getCurrentToken(); - if (t.type === ttype) { - this._errHandler.reportMatch(this); - this.consume(); - } else { - t = this._errHandler.recoverInline(this); - if (this.buildParseTrees && t.tokenIndex === -1) { - this._ctx.addErrorNode(t); - } - } - return t; - } - matchWildcard() { - let t = this.getCurrentToken(); - if (t.type > 0) { - this._errHandler.reportMatch(this); - this.consume(); - } else { - t = this._errHandler.recoverInline(this); - if (this._buildParseTrees && t.tokenIndex === -1) { - this._ctx.addErrorNode(t); - } - } - return t; - } - getParseListeners() { - return this._parseListeners || []; - } - addParseListener(listener) { - if (listener === null) { - throw "listener"; - } - if (this._parseListeners === null) { - this._parseListeners = []; - } - this._parseListeners.push(listener); - } - removeParseListener(listener) { - if (this._parseListeners !== null) { - const idx = this._parseListeners.indexOf(listener); - if (idx >= 0) { - this._parseListeners.splice(idx, 1); - } - if (this._parseListeners.length === 0) { - this._parseListeners = null; - } - } - } - removeParseListeners() { - this._parseListeners = null; - } - triggerEnterRuleEvent() { - if (this._parseListeners !== null) { - const ctx = this._ctx; - this._parseListeners.map(function(listener) { - listener.enterEveryRule(ctx); - ctx.enterRule(listener); - }); - } - } - triggerExitRuleEvent() { - if (this._parseListeners !== null) { - const ctx = this._ctx; - this._parseListeners.slice(0).reverse().map(function(listener) { - ctx.exitRule(listener); - listener.exitEveryRule(ctx); - }); - } - } - getTokenFactory() { - return this._input.tokenSource._factory; - } - setTokenFactory(factory) { - this._input.tokenSource._factory = factory; - } - getATNWithBypassAlts() { - const serializedAtn = this.getSerializedATN(); - if (serializedAtn === null) { - throw "The current parser does not support an ATN with bypass alternatives."; - } - let result = this.bypassAltsAtnCache[serializedAtn]; - if (result === null) { - const deserializationOptions = new ATNDeserializationOptions(); - deserializationOptions.generateRuleBypassTransitions = true; - result = new ATNDeserializer3(deserializationOptions).deserialize(serializedAtn); - this.bypassAltsAtnCache[serializedAtn] = result; - } - return result; - } - compileParseTreePattern(pattern, patternRuleIndex, lexer) { - lexer = lexer || null; - if (lexer === null) { - if (this.getTokenStream() !== null) { - const tokenSource = this.getTokenStream().tokenSource; - if (tokenSource instanceof Lexer2) { - lexer = tokenSource; - } - } - } - if (lexer === null) { - throw "Parser can't discover a lexer to use"; - } - const m = new ParseTreePatternMatcher(lexer, this); - return m.compile(pattern, patternRuleIndex); - } - getInputStream() { - return this.getTokenStream(); - } - setInputStream(input) { - this.setTokenStream(input); - } - getTokenStream() { - return this._input; - } - setTokenStream(input) { - this._input = null; - this.reset(); - this._input = input; - } - getCurrentToken() { - return this._input.LT(1); - } - notifyErrorListeners(msg, offendingToken, err) { - offendingToken = offendingToken || null; - err = err || null; - if (offendingToken === null) { - offendingToken = this.getCurrentToken(); - } - this._syntaxErrors += 1; - const line = offendingToken.line; - const column = offendingToken.column; - const listener = this.getErrorListenerDispatch(); - listener.syntaxError(this, offendingToken, line, column, msg, err); - } - consume() { - const o = this.getCurrentToken(); - if (o.type !== Token2.EOF) { - this.getInputStream().consume(); - } - const hasListener = this._parseListeners !== null && this._parseListeners.length > 0; - if (this.buildParseTrees || hasListener) { - let node; - if (this._errHandler.inErrorRecoveryMode(this)) { - node = this._ctx.addErrorNode(o); - } else { - node = this._ctx.addTokenNode(o); - } - node.invokingState = this.state; - if (hasListener) { - this._parseListeners.map(function(listener) { - if (node instanceof ErrorNode2 || node.isErrorNode !== void 0 && node.isErrorNode()) { - listener.visitErrorNode(node); - } else if (node instanceof TerminalNode) { - listener.visitTerminal(node); - } - }); - } - } - return o; - } - addContextToParseTree() { - if (this._ctx.parentCtx !== null) { - this._ctx.parentCtx.addChild(this._ctx); - } - } - enterRule(localctx, state, ruleIndex) { - this.state = state; - this._ctx = localctx; - this._ctx.start = this._input.LT(1); - if (this.buildParseTrees) { - this.addContextToParseTree(); - } - if (this._parseListeners !== null) { - this.triggerEnterRuleEvent(); - } - } - exitRule() { - this._ctx.stop = this._input.LT(-1); - if (this._parseListeners !== null) { - this.triggerExitRuleEvent(); - } - this.state = this._ctx.invokingState; - this._ctx = this._ctx.parentCtx; - } - enterOuterAlt(localctx, altNum) { - localctx.setAltNumber(altNum); - if (this.buildParseTrees && this._ctx !== localctx) { - if (this._ctx.parentCtx !== null) { - this._ctx.parentCtx.removeLastChild(); - this._ctx.parentCtx.addChild(localctx); - } - } - this._ctx = localctx; - } - getPrecedence() { - if (this._precedenceStack.length === 0) { - return -1; - } else { - return this._precedenceStack[this._precedenceStack.length - 1]; - } - } - enterRecursionRule(localctx, state, ruleIndex, precedence) { - this.state = state; - this._precedenceStack.push(precedence); - this._ctx = localctx; - this._ctx.start = this._input.LT(1); - if (this._parseListeners !== null) { - this.triggerEnterRuleEvent(); - } - } - pushNewRecursionContext(localctx, state, ruleIndex) { - const previous = this._ctx; - previous.parentCtx = localctx; - previous.invokingState = state; - previous.stop = this._input.LT(-1); - this._ctx = localctx; - this._ctx.start = previous.start; - if (this.buildParseTrees) { - this._ctx.addChild(previous); - } - if (this._parseListeners !== null) { - this.triggerEnterRuleEvent(); - } - } - unrollRecursionContexts(parentCtx) { - this._precedenceStack.pop(); - this._ctx.stop = this._input.LT(-1); - const retCtx = this._ctx; - if (this._parseListeners !== null) { - while (this._ctx !== parentCtx) { - this.triggerExitRuleEvent(); - this._ctx = this._ctx.parentCtx; - } - } else { - this._ctx = parentCtx; - } - retCtx.parentCtx = parentCtx; - if (this.buildParseTrees && parentCtx !== null) { - parentCtx.addChild(retCtx); - } - } - getInvokingContext(ruleIndex) { - let ctx = this._ctx; - while (ctx !== null) { - if (ctx.ruleIndex === ruleIndex) { - return ctx; - } - ctx = ctx.parentCtx; - } - return null; - } - precpred(localctx, precedence) { - return precedence >= this._precedenceStack[this._precedenceStack.length - 1]; - } - inContext(context) { - return false; - } - isExpectedToken(symbol) { - const atn = this._interp.atn; - let ctx = this._ctx; - const s = atn.states[this.state]; - let following = atn.nextTokens(s); - if (following.contains(symbol)) { - return true; - } - if (!following.contains(Token2.EPSILON)) { - return false; - } - while (ctx !== null && ctx.invokingState >= 0 && following.contains(Token2.EPSILON)) { - const invokingState = atn.states[ctx.invokingState]; - const rt = invokingState.transitions[0]; - following = atn.nextTokens(rt.followState); - if (following.contains(symbol)) { - return true; - } - ctx = ctx.parentCtx; - } - if (following.contains(Token2.EPSILON) && symbol === Token2.EOF) { - return true; - } else { - return false; - } - } - getExpectedTokens() { - return this._interp.atn.getExpectedTokens(this.state, this._ctx); - } - getExpectedTokensWithinCurrentRule() { - const atn = this._interp.atn; - const s = atn.states[this.state]; - return atn.nextTokens(s); - } - getRuleIndex(ruleName) { - const ruleIndex = this.getRuleIndexMap()[ruleName]; - if (ruleIndex !== null) { - return ruleIndex; - } else { - return -1; - } - } - getRuleInvocationStack(p) { - p = p || null; - if (p === null) { - p = this._ctx; - } - const stack = []; - while (p !== null) { - const ruleIndex = p.ruleIndex; - if (ruleIndex < 0) { - stack.push("n/a"); - } else { - stack.push(this.ruleNames[ruleIndex]); - } - p = p.parentCtx; - } - return stack; - } - getDFAStrings() { - return this._interp.decisionToDFA.toString(); - } - dumpDFA() { - let seenOne = false; - for (let i = 0; i < this._interp.decisionToDFA.length; i++) { - const dfa = this._interp.decisionToDFA[i]; - if (dfa.states.length > 0) { - if (seenOne) { - console.log(); - } - this.printer.println("Decision " + dfa.decision + ":"); - this.printer.print(dfa.toString(this.literalNames, this.symbolicNames)); - seenOne = true; - } - } - } - getSourceName() { - return this._input.sourceName; - } - setTrace(trace) { - if (!trace) { - this.removeParseListener(this._tracer); - this._tracer = null; - } else { - if (this._tracer !== null) { - this.removeParseListener(this._tracer); - } - this._tracer = new TraceListener(this); - this.addParseListener(this._tracer); - } - } - }; - Parser2.bypassAltsAtnCache = {}; - module2.exports = Parser2; -}); - -// node_modules/antlr4/src/antlr4/index.js -var require_antlr4 = __commonJS((exports) => { - exports.atn = require_atn(); - exports.codepointat = require_codepointat(); - exports.dfa = require_dfa(); - exports.fromcodepoint = require_fromcodepoint(); - exports.tree = require_tree(); - exports.error = require_error(); - exports.Token = require_Token2().Token; - exports.CharStreams = require_CharStreams2(); - exports.CommonToken = require_Token2().CommonToken; - exports.InputStream = require_InputStream(); - exports.FileStream = require_FileStream(); - exports.CommonTokenStream = require_CommonTokenStream2(); - exports.Lexer = require_Lexer2(); - exports.Parser = require_Parser2(); - var pc = require_PredictionContext2(); - exports.PredictionContextCache = pc.PredictionContextCache; - exports.ParserRuleContext = require_ParserRuleContext2(); - exports.Interval = require_IntervalSet2().Interval; - exports.IntervalSet = require_IntervalSet2().IntervalSet; - exports.Utils = require_Utils2(); - exports.LL1Analyzer = require_LL1Analyzer2().LL1Analyzer; -}); - -// src/antlr/Solidity.tokens -var require_Solidity = __commonJS((exports, module2) => { - module2.exports = "./Solidity-JSLPOCIO.tokens"; -}); - -// src/tokens-string.js -var require_tokens_string = __commonJS((exports, module2) => { - if (typeof BROWSER !== "undefined") { - module2.exports = require_Solidity(); - } else { - module2.exports = require("fs").readFileSync(require("path").join(__dirname, "./antlr/Solidity.tokens")).toString(); - } -}); - -// src/index.ts -__markAsModule(exports); -__export(exports, { - ParserError: () => ParserError, - parse: () => parse, - tokenize: () => tokenize, - visit: () => visit -}); - -// src/parser.ts -var import_antlr4ts = __toModule(require_antlr4ts()); - -// src/antlr/SolidityLexer.ts -var import_ATNDeserializer = __toModule(require_ATNDeserializer()); -var import_Lexer = __toModule(require_Lexer()); -var import_LexerATNSimulator = __toModule(require_LexerATNSimulator()); -var import_VocabularyImpl = __toModule(require_VocabularyImpl()); -var Utils = __toModule(require_Utils()); -var _SolidityLexer = class extends import_Lexer.Lexer { - get vocabulary() { - return _SolidityLexer.VOCABULARY; - } - constructor(input) { - super(input); - this._interp = new import_LexerATNSimulator.LexerATNSimulator(_SolidityLexer._ATN, this); - } - get grammarFileName() { - return "Solidity.g4"; - } - get ruleNames() { - return _SolidityLexer.ruleNames; - } - get serializedATN() { - return _SolidityLexer._serializedATN; - } - get channelNames() { - return _SolidityLexer.channelNames; - } - get modeNames() { - return _SolidityLexer.modeNames; - } - static get _ATN() { - if (!_SolidityLexer.__ATN) { - _SolidityLexer.__ATN = new import_ATNDeserializer.ATNDeserializer().deserialize(Utils.toCharArray(_SolidityLexer._serializedATN)); - } - return _SolidityLexer.__ATN; - } -}; -var SolidityLexer = _SolidityLexer; -SolidityLexer.T__0 = 1; -SolidityLexer.T__1 = 2; -SolidityLexer.T__2 = 3; -SolidityLexer.T__3 = 4; -SolidityLexer.T__4 = 5; -SolidityLexer.T__5 = 6; -SolidityLexer.T__6 = 7; -SolidityLexer.T__7 = 8; -SolidityLexer.T__8 = 9; -SolidityLexer.T__9 = 10; -SolidityLexer.T__10 = 11; -SolidityLexer.T__11 = 12; -SolidityLexer.T__12 = 13; -SolidityLexer.T__13 = 14; -SolidityLexer.T__14 = 15; -SolidityLexer.T__15 = 16; -SolidityLexer.T__16 = 17; -SolidityLexer.T__17 = 18; -SolidityLexer.T__18 = 19; -SolidityLexer.T__19 = 20; -SolidityLexer.T__20 = 21; -SolidityLexer.T__21 = 22; -SolidityLexer.T__22 = 23; -SolidityLexer.T__23 = 24; -SolidityLexer.T__24 = 25; -SolidityLexer.T__25 = 26; -SolidityLexer.T__26 = 27; -SolidityLexer.T__27 = 28; -SolidityLexer.T__28 = 29; -SolidityLexer.T__29 = 30; -SolidityLexer.T__30 = 31; -SolidityLexer.T__31 = 32; -SolidityLexer.T__32 = 33; -SolidityLexer.T__33 = 34; -SolidityLexer.T__34 = 35; -SolidityLexer.T__35 = 36; -SolidityLexer.T__36 = 37; -SolidityLexer.T__37 = 38; -SolidityLexer.T__38 = 39; -SolidityLexer.T__39 = 40; -SolidityLexer.T__40 = 41; -SolidityLexer.T__41 = 42; -SolidityLexer.T__42 = 43; -SolidityLexer.T__43 = 44; -SolidityLexer.T__44 = 45; -SolidityLexer.T__45 = 46; -SolidityLexer.T__46 = 47; -SolidityLexer.T__47 = 48; -SolidityLexer.T__48 = 49; -SolidityLexer.T__49 = 50; -SolidityLexer.T__50 = 51; -SolidityLexer.T__51 = 52; -SolidityLexer.T__52 = 53; -SolidityLexer.T__53 = 54; -SolidityLexer.T__54 = 55; -SolidityLexer.T__55 = 56; -SolidityLexer.T__56 = 57; -SolidityLexer.T__57 = 58; -SolidityLexer.T__58 = 59; -SolidityLexer.T__59 = 60; -SolidityLexer.T__60 = 61; -SolidityLexer.T__61 = 62; -SolidityLexer.T__62 = 63; -SolidityLexer.T__63 = 64; -SolidityLexer.T__64 = 65; -SolidityLexer.T__65 = 66; -SolidityLexer.T__66 = 67; -SolidityLexer.T__67 = 68; -SolidityLexer.T__68 = 69; -SolidityLexer.T__69 = 70; -SolidityLexer.T__70 = 71; -SolidityLexer.T__71 = 72; -SolidityLexer.T__72 = 73; -SolidityLexer.T__73 = 74; -SolidityLexer.T__74 = 75; -SolidityLexer.T__75 = 76; -SolidityLexer.T__76 = 77; -SolidityLexer.T__77 = 78; -SolidityLexer.T__78 = 79; -SolidityLexer.T__79 = 80; -SolidityLexer.T__80 = 81; -SolidityLexer.T__81 = 82; -SolidityLexer.T__82 = 83; -SolidityLexer.T__83 = 84; -SolidityLexer.T__84 = 85; -SolidityLexer.T__85 = 86; -SolidityLexer.T__86 = 87; -SolidityLexer.T__87 = 88; -SolidityLexer.T__88 = 89; -SolidityLexer.T__89 = 90; -SolidityLexer.T__90 = 91; -SolidityLexer.T__91 = 92; -SolidityLexer.T__92 = 93; -SolidityLexer.T__93 = 94; -SolidityLexer.T__94 = 95; -SolidityLexer.T__95 = 96; -SolidityLexer.T__96 = 97; -SolidityLexer.Int = 98; -SolidityLexer.Uint = 99; -SolidityLexer.Byte = 100; -SolidityLexer.Fixed = 101; -SolidityLexer.Ufixed = 102; -SolidityLexer.BooleanLiteral = 103; -SolidityLexer.DecimalNumber = 104; -SolidityLexer.HexNumber = 105; -SolidityLexer.NumberUnit = 106; -SolidityLexer.HexLiteralFragment = 107; -SolidityLexer.ReservedKeyword = 108; -SolidityLexer.AnonymousKeyword = 109; -SolidityLexer.BreakKeyword = 110; -SolidityLexer.ConstantKeyword = 111; -SolidityLexer.ImmutableKeyword = 112; -SolidityLexer.ContinueKeyword = 113; -SolidityLexer.LeaveKeyword = 114; -SolidityLexer.ExternalKeyword = 115; -SolidityLexer.IndexedKeyword = 116; -SolidityLexer.InternalKeyword = 117; -SolidityLexer.PayableKeyword = 118; -SolidityLexer.PrivateKeyword = 119; -SolidityLexer.PublicKeyword = 120; -SolidityLexer.VirtualKeyword = 121; -SolidityLexer.PureKeyword = 122; -SolidityLexer.TypeKeyword = 123; -SolidityLexer.ViewKeyword = 124; -SolidityLexer.ConstructorKeyword = 125; -SolidityLexer.FallbackKeyword = 126; -SolidityLexer.ReceiveKeyword = 127; -SolidityLexer.Identifier = 128; -SolidityLexer.StringLiteralFragment = 129; -SolidityLexer.VersionLiteral = 130; -SolidityLexer.WS = 131; -SolidityLexer.COMMENT = 132; -SolidityLexer.LINE_COMMENT = 133; -SolidityLexer.channelNames = [ - "DEFAULT_TOKEN_CHANNEL", - "HIDDEN" -]; -SolidityLexer.modeNames = [ - "DEFAULT_MODE" -]; -SolidityLexer.ruleNames = [ - "T__0", - "T__1", - "T__2", - "T__3", - "T__4", - "T__5", - "T__6", - "T__7", - "T__8", - "T__9", - "T__10", - "T__11", - "T__12", - "T__13", - "T__14", - "T__15", - "T__16", - "T__17", - "T__18", - "T__19", - "T__20", - "T__21", - "T__22", - "T__23", - "T__24", - "T__25", - "T__26", - "T__27", - "T__28", - "T__29", - "T__30", - "T__31", - "T__32", - "T__33", - "T__34", - "T__35", - "T__36", - "T__37", - "T__38", - "T__39", - "T__40", - "T__41", - "T__42", - "T__43", - "T__44", - "T__45", - "T__46", - "T__47", - "T__48", - "T__49", - "T__50", - "T__51", - "T__52", - "T__53", - "T__54", - "T__55", - "T__56", - "T__57", - "T__58", - "T__59", - "T__60", - "T__61", - "T__62", - "T__63", - "T__64", - "T__65", - "T__66", - "T__67", - "T__68", - "T__69", - "T__70", - "T__71", - "T__72", - "T__73", - "T__74", - "T__75", - "T__76", - "T__77", - "T__78", - "T__79", - "T__80", - "T__81", - "T__82", - "T__83", - "T__84", - "T__85", - "T__86", - "T__87", - "T__88", - "T__89", - "T__90", - "T__91", - "T__92", - "T__93", - "T__94", - "T__95", - "T__96", - "Int", - "Uint", - "Byte", - "Fixed", - "Ufixed", - "BooleanLiteral", - "DecimalNumber", - "DecimalDigits", - "HexNumber", - "HexDigits", - "NumberUnit", - "HexLiteralFragment", - "HexPair", - "HexCharacter", - "ReservedKeyword", - "AnonymousKeyword", - "BreakKeyword", - "ConstantKeyword", - "ImmutableKeyword", - "ContinueKeyword", - "LeaveKeyword", - "ExternalKeyword", - "IndexedKeyword", - "InternalKeyword", - "PayableKeyword", - "PrivateKeyword", - "PublicKeyword", - "VirtualKeyword", - "PureKeyword", - "TypeKeyword", - "ViewKeyword", - "ConstructorKeyword", - "FallbackKeyword", - "ReceiveKeyword", - "Identifier", - "IdentifierStart", - "IdentifierPart", - "StringLiteralFragment", - "DoubleQuotedStringCharacter", - "SingleQuotedStringCharacter", - "VersionLiteral", - "WS", - "COMMENT", - "LINE_COMMENT" -]; -SolidityLexer._LITERAL_NAMES = [ - void 0, - "'pragma'", - "';'", - "'||'", - "'^'", - "'~'", - "'>='", - "'>'", - "'<'", - "'<='", - "'='", - "'as'", - "'import'", - "'*'", - "'from'", - "'{'", - "','", - "'}'", - "'abstract'", - "'contract'", - "'interface'", - "'library'", - "'is'", - "'('", - "')'", - "'error'", - "'using'", - "'for'", - "'struct'", - "'modifier'", - "'function'", - "'returns'", - "'event'", - "'enum'", - "'['", - "']'", - "'address'", - "'.'", - "'mapping'", - "'=>'", - "'memory'", - "'storage'", - "'calldata'", - "'if'", - "'else'", - "'try'", - "'catch'", - "'while'", - "'unchecked'", - "'assembly'", - "'do'", - "'return'", - "'throw'", - "'emit'", - "'revert'", - "'var'", - "'bool'", - "'string'", - "'byte'", - "'++'", - "'--'", - "'new'", - "':'", - "'+'", - "'-'", - "'after'", - "'delete'", - "'!'", - "'**'", - "'/'", - "'%'", - "'<<'", - "'>>'", - "'&'", - "'|'", - "'=='", - "'!='", - "'&&'", - "'?'", - "'|='", - "'^='", - "'&='", - "'<<='", - "'>>='", - "'+='", - "'-='", - "'*='", - "'/='", - "'%='", - "'let'", - "':='", - "'=:'", - "'switch'", - "'case'", - "'default'", - "'->'", - "'callback'", - "'override'", - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - "'anonymous'", - "'break'", - "'constant'", - "'immutable'", - "'continue'", - "'leave'", - "'external'", - "'indexed'", - "'internal'", - "'payable'", - "'private'", - "'public'", - "'virtual'", - "'pure'", - "'type'", - "'view'", - "'constructor'", - "'fallback'", - "'receive'" -]; -SolidityLexer._SYMBOLIC_NAMES = [ - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - "Int", - "Uint", - "Byte", - "Fixed", - "Ufixed", - "BooleanLiteral", - "DecimalNumber", - "HexNumber", - "NumberUnit", - "HexLiteralFragment", - "ReservedKeyword", - "AnonymousKeyword", - "BreakKeyword", - "ConstantKeyword", - "ImmutableKeyword", - "ContinueKeyword", - "LeaveKeyword", - "ExternalKeyword", - "IndexedKeyword", - "InternalKeyword", - "PayableKeyword", - "PrivateKeyword", - "PublicKeyword", - "VirtualKeyword", - "PureKeyword", - "TypeKeyword", - "ViewKeyword", - "ConstructorKeyword", - "FallbackKeyword", - "ReceiveKeyword", - "Identifier", - "StringLiteralFragment", - "VersionLiteral", - "WS", - "COMMENT", - "LINE_COMMENT" -]; -SolidityLexer.VOCABULARY = new import_VocabularyImpl.VocabularyImpl(_SolidityLexer._LITERAL_NAMES, _SolidityLexer._SYMBOLIC_NAMES, []); -SolidityLexer._serializedATNSegments = 4; -SolidityLexer._serializedATNSegment0 = `\uC91D\uCABA\u058D\uAFBA\u4F53\u0607\uEA8B\uC241\x87\u074E\b     \x07 \x07\b \b  - -\v \v\f \f\r \r                   ! !" "# #$ $% %& &' '( () )* *+ +, ,- -. ./ /0 01 12 23 34 45 56 67 78 89 9: :; ;< <= => >? ?@ @A AB BC CD DE EF FG GH HI IJ JK KL LM MN NO OP PQ QR RS ST TU UV VW WX XY YZ Z[ [\\ \\] ]^ ^_ _\` \`a ab bc cd de ef fg gh hi ij jk kl lm mn no op pq qr rs st tu uv vw wx xy yz z{ {| |} }~ ~\x7F \x7F\x80 \x80\x81 \x81\x82 \x82\x83 \x83\x84 \x84\x85 \x85\x86 \x86\x87 \x87\x88 \x88\x89 \x89\x8A \x8A\x8B \x8B\x8C \x8C\x8D \x8D\x8E \x8E\x07\x07\x07\b\b   - - -\v\v\f\f\f\r\r\r\r\r\r\r        !!!!!!"""""##$$%%%%%%%%&&''''''''((()))))))********+++++++++,,,-----....//////00000011111111112222222223334444444555555666667777777888899999:::::::;;;;;<<<===>>>>??@@AABBBBBBCCCCCCCDDEEEFFGGHHHIIIJJKKLLLMMMNNNOOPPPQQQRRRSSSSTTTTUUUVVVWWWXXXYYYZZZZ[[[\\\\\\]]]]]]]^^^^^________\`\`\`aaaaaaaaabbbbbbbbbccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccccc\u0389 -cdddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddddd\u0462 -deeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee\u0540 -efffffffffffff\u054E -f\rff\u054Ffff\u0554 -f\rff\u0555f\u0558 -fggggggggggggggg\u0568 -g\rgg\u0569ggg\u056E -g\rgg\u056Fg\u0572 -ghhhhhhhhhh\u057D -hiii\u0581 -iiii\u0585 -iiii\u0589 -ijjj\u058D -jj\x07j\u0590 -j\fjj\u0593\vjkkkklll\u059B -ll\x07l\u059E -l\fll\u05A1\vlmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmmm\u05DB -mnnnnnnn\u05E3 -nnnnn\u05E8 -nnn\u05EB -noooppqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqqq\u064A -qrrrrrrrrrrsssssstttttttttuuuuuuuuuuvvvvvvvvvwwwwwwxxxxxxxxxyyyyyyyyzzzzzzzzz{{{{{{{{||||||||}}}}}}}~~~~~~~~\x7F\x7F\x7F\x7F\x7F\x80\x80\x80\x80\x80\x81\x81\x81\x81\x81\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x82\x83\x83\x83\x83\x83\x83\x83\x83\x83\x84\x84\x84\x84\x84\x84\x84\x84\x85\x85\x07\x85\u06E5 -\x85\f\x85\x85\u06E8\v\x85\x86\x86\x87\x87\x88\x88\x88\x88\x88\x88\x88\x88\u06F5 -\x88\x88\x88\x07\x88\u06F9 -\x88\f\x88\x88\u06FC\v\x88\x88\x88\x88\x88\x88\x88\x88\x88\x88\u0706 -\x88\x88\x88\x07\x88\u070A -\x88\f\x88\x88\u070D\v\x88\x88\x88\u0710 -\x88\x89\x89\x89\x89\u0715 -\x89\x8A\x8A\x8A\x8A\u071A -\x8A\x8B\x8B\u071D -\x8B\r\x8B\x8B\u071E\x8B\x8B\x8B\u0723 -\x8B\r\x8B\x8B\u0724\x8B\x8B\x8B\u0729 -\x8B\r\x8B\x8B\u072A\x8B\u072D -\x8B\x8C\x8C\u0730 -\x8C\r\x8C\x8C\u0731\x8C\x8C\x8D\x8D\x8D\x8D\x07\x8D\u073A -\x8D\f\x8D\x8D\u073D\v\x8D\x8D\x8D\x8D\x8D\x8D\x8E\x8E\x8E\x8E\x07\x8E\u0748 -\x8E\f\x8E\x8E\u074B\v\x8E\x8E\x8E\u073B\x8F\x07 \v\x07\r\b  -\v\f\r!#%')+-/13579;= ?!A"C#E$G%I&K'M(O)Q*S+U,W-Y.[/]0_1a2c3e4g5i6k7m8o9q:s;u{?}@\x7FA\x81B\x83C\x85D\x87E\x89F\x8BG\x8DH\x8FI\x91J\x93K\x95L\x97M\x99N\x9BO\x9DP\x9FQ\xA1R\xA3S\xA5T\xA7U\xA9V\xABW\xADX\xAFY\xB1Z\xB3[\xB5\\\xB7]\xB9^\xBB_\xBD\`\xBFa\xC1b\xC3c\xC5d\xC7e\xC9f\xCBg\xCDh\xCFi\xD1j\xD3\xD5k\xD7\xD9l\xDBm\xDD\xDF\xE1n\xE3o\xE5p\xE7q\xE9r\xEBs\xEDt\xEFu\xF1v\xF3w\xF5x\xF7y\xF9z\xFB{\xFD|\xFF}\u0101~\u0103\x7F\u0105\x80\u0107\x81\u0109\x82\u010B\u010D\u010F\x83\u0111\u0113\u0115\x84\u0117\x85\u0119\x86\u011B\x87\f2;GGggZZzz2;CHch&&C\\aac|\x07&&2;C\\aac|\f\f$$^^\f\f))^^\v\f""\f\f\u07DF\x07 \v\r!#%')+-/13579;=?ACEGIKMOQSUWY[]_acegikmoqsuwy{}\x7F\x81\x83\x85\x87\x89\x8B\x8D\x8F\x91\x93\x95\x97\x99\x9B\x9D\x9F\xA1\xA3\xA5\xA7\xA9\xAB\xAD\xAF\xB1\xB3\xB5\xB7\xB9\xBB\xBD\xBF\xC1\xC3\xC5\xC7\xC9\xCB\xCD\xCF\xD1\xD5\xD9\xDB\xE1\xE3\xE5\xE7\xE9\xEB\xED\xEF`; -SolidityLexer._serializedATNSegment1 = "\xF1\xF3\xF5\xF7\xF9\xFB\xFD\xFF\u0101\u0103\u0105\u0107\u0109\u010F\u0115\u0117\u0119\u011B\u011D\u0124\x07\u0126 \u0129\v\u012B\r\u012D\u0130\u0132\u0134\u0137\u0139\u013C\u0143\u0145\u014A!\u014C#\u014E%\u0150'\u0159)\u0162+\u016C-\u0174/\u01771\u01793\u017B5\u01817\u01879\u018B;\u0192=\u019B?\u01A4A\u01ACC\u01B2E\u01B7G\u01B9I\u01BBK\u01C3M\u01C5O\u01CDQ\u01D0S\u01D7U\u01DFW\u01E8Y\u01EB[\u01F0]\u01F4_\u01FAa\u0200c\u020Ae\u0213g\u0216i\u021Dk\u0223m\u0228o\u022Fq\u0233s\u0238u\u023Fw\u0244y\u0247{\u024A}\u024E\x7F\u0250\x81\u0252\x83\u0254\x85\u025A\x87\u0261\x89\u0263\x8B\u0266\x8D\u0268\x8F\u026A\x91\u026D\x93\u0270\x95\u0272\x97\u0274\x99\u0277\x9B\u027A\x9D\u027D\x9F\u027F\xA1\u0282\xA3\u0285\xA5\u0288\xA7\u028C\xA9\u0290\xAB\u0293\xAD\u0296\xAF\u0299\xB1\u029C\xB3\u029F\xB5\u02A3\xB7\u02A6\xB9\u02A9\xBB\u02B0\xBD\u02B5\xBF\u02BD\xC1\u02C0\xC3\u02C9\xC5\u0388\xC7\u0461\xC9\u053F\xCB\u0557\xCD\u0571\xCF\u057C\xD1\u0584\xD3\u058A\xD5\u0594\xD7\u0598\xD9\u05DA\xDB\u05DC\xDD\u05EC\xDF\u05EF\xE1\u0649\xE3\u064B\xE5\u0655\xE7\u065B\xE9\u0664\xEB\u066E\xED\u0677\xEF\u067D\xF1\u0686\xF3\u068E\xF5\u0697\xF7\u069F\xF9\u06A7\xFB\u06AE\xFD\u06B6\xFF\u06BB\u0101\u06C0\u0103\u06C5\u0105\u06D1\u0107\u06DA\u0109\u06E2\u010B\u06E9\u010D\u06EB\u010F\u070F\u0111\u0714\u0113\u0719\u0115\u071C\u0117\u072F\u0119\u0735\u011B\u0743\u011D\u011E\x07r\u011E\u011F\x07t\u011F\u0120\x07c\u0120\u0121\x07i\u0121\u0122\x07o\u0122\u0123\x07c\u0123\u0124\u0125\x07=\u0125\u0126\u0127\x07~\u0127\u0128\x07~\u0128\b\u0129\u012A\x07`\u012A\n\u012B\u012C\x07\x80\u012C\f\u012D\u012E\x07@\u012E\u012F\x07?\u012F\u0130\u0131\x07@\u0131\u0132\u0133\x07>\u0133\u0134\u0135\x07>\u0135\u0136\x07?\u0136\u0137\u0138\x07?\u0138\u0139\u013A\x07c\u013A\u013B\x07u\u013B\u013C\u013D\x07k\u013D\u013E\x07o\u013E\u013F\x07r\u013F\u0140\x07q\u0140\u0141\x07t\u0141\u0142\x07v\u0142\u0143\u0144\x07,\u0144\u0145\u0146\x07h\u0146\u0147\x07t\u0147\u0148\x07q\u0148\u0149\x07o\u0149\u014A\u014B\x07}\u014B \u014C\u014D\x07.\u014D\"\u014E\u014F\x07\x7F\u014F$\u0150\u0151\x07c\u0151\u0152\x07d\u0152\u0153\x07u\u0153\u0154\x07v\u0154\u0155\x07t\u0155\u0156\x07c\u0156\u0157\x07e\u0157\u0158\x07v\u0158&\u0159\u015A\x07e\u015A\u015B\x07q\u015B\u015C\x07p\u015C\u015D\x07v\u015D\u015E\x07t\u015E\u015F\x07c\u015F\u0160\x07e\u0160\u0161\x07v\u0161(\u0162\u0163\x07k\u0163\u0164\x07p\u0164\u0165\x07v\u0165\u0166\x07g\u0166\u0167\x07t\u0167\u0168\x07h\u0168\u0169\x07c\u0169\u016A\x07e\u016A\u016B\x07g\u016B*\u016C\u016D\x07n\u016D\u016E\x07k\u016E\u016F\x07d\u016F\u0170\x07t\u0170\u0171\x07c\u0171\u0172\x07t\u0172\u0173\x07{\u0173,\u0174\u0175\x07k\u0175\u0176\x07u\u0176.\u0177\u0178\x07*\u01780\u0179\u017A\x07+\u017A2\u017B\u017C\x07g\u017C\u017D\x07t\u017D\u017E\x07t\u017E\u017F\x07q\u017F\u0180\x07t\u01804\u0181\u0182\x07w\u0182\u0183\x07u\u0183\u0184\x07k\u0184\u0185\x07p\u0185\u0186\x07i\u01866\u0187\u0188\x07h\u0188\u0189\x07q\u0189\u018A\x07t\u018A8\u018B\u018C\x07u\u018C\u018D\x07v\u018D\u018E\x07t\u018E\u018F\x07w\u018F\u0190\x07e\u0190\u0191\x07v\u0191:\u0192\u0193\x07o\u0193\u0194\x07q\u0194\u0195\x07f\u0195\u0196\x07k\u0196\u0197\x07h\u0197\u0198\x07k\u0198\u0199\x07g\u0199\u019A\x07t\u019A<\u019B\u019C\x07h\u019C\u019D\x07w\u019D\u019E\x07p\u019E\u019F\x07e\u019F\u01A0\x07v\u01A0\u01A1\x07k\u01A1\u01A2\x07q\u01A2\u01A3\x07p\u01A3>\u01A4\u01A5\x07t\u01A5\u01A6\x07g\u01A6\u01A7\x07v\u01A7\u01A8\x07w\u01A8\u01A9\x07t\u01A9\u01AA\x07p\u01AA\u01AB\x07u\u01AB@\u01AC\u01AD\x07g\u01AD\u01AE\x07x\u01AE\u01AF\x07g\u01AF\u01B0\x07p\u01B0\u01B1\x07v\u01B1B\u01B2\u01B3\x07g\u01B3\u01B4\x07p\u01B4\u01B5\x07w\u01B5\u01B6\x07o\u01B6D\u01B7\u01B8\x07]\u01B8F\u01B9\u01BA\x07_\u01BAH\u01BB\u01BC\x07c\u01BC\u01BD\x07f\u01BD\u01BE\x07f\u01BE\u01BF\x07t\u01BF\u01C0\x07g\u01C0\u01C1\x07u\u01C1\u01C2\x07u\u01C2J\u01C3\u01C4\x070\u01C4L\u01C5\u01C6\x07o\u01C6\u01C7\x07c\u01C7\u01C8\x07r\u01C8\u01C9\x07r\u01C9\u01CA\x07k\u01CA\u01CB\x07p\u01CB\u01CC\x07i\u01CCN\u01CD\u01CE\x07?\u01CE\u01CF\x07@\u01CFP\u01D0\u01D1\x07o\u01D1\u01D2\x07g\u01D2\u01D3\x07o\u01D3\u01D4\x07q\u01D4\u01D5\x07t\u01D5\u01D6\x07{\u01D6R\u01D7\u01D8\x07u\u01D8\u01D9\x07v\u01D9\u01DA\x07q\u01DA\u01DB\x07t\u01DB\u01DC\x07c\u01DC\u01DD\x07i\u01DD\u01DE\x07g\u01DET\u01DF\u01E0\x07e\u01E0\u01E1\x07c\u01E1\u01E2\x07n\u01E2\u01E3\x07n\u01E3\u01E4\x07f\u01E4\u01E5\x07c\u01E5\u01E6\x07v\u01E6\u01E7\x07c\u01E7V\u01E8\u01E9\x07k\u01E9\u01EA\x07h\u01EAX\u01EB\u01EC\x07g\u01EC\u01ED\x07n\u01ED\u01EE\x07u\u01EE\u01EF\x07g\u01EFZ\u01F0\u01F1\x07v\u01F1\u01F2\x07t\u01F2\u01F3\x07{\u01F3\\\u01F4\u01F5\x07e\u01F5\u01F6\x07c\u01F6\u01F7\x07v\u01F7\u01F8\x07e\u01F8\u01F9\x07j\u01F9^\u01FA\u01FB\x07y\u01FB\u01FC\x07j\u01FC\u01FD\x07k\u01FD\u01FE\x07n\u01FE\u01FF\x07g\u01FF`\u0200\u0201\x07w\u0201\u0202\x07p\u0202\u0203\x07e\u0203\u0204\x07j\u0204\u0205\x07g\u0205\u0206\x07e\u0206\u0207\x07m\u0207\u0208\x07g\u0208\u0209\x07f\u0209b\u020A\u020B\x07c\u020B\u020C\x07u\u020C\u020D\x07u\u020D\u020E\x07g\u020E\u020F\x07o\u020F\u0210\x07d\u0210\u0211\x07n\u0211\u0212\x07{\u0212d\u0213\u0214\x07f\u0214\u0215\x07q\u0215f\u0216\u0217\x07t\u0217\u0218\x07g\u0218\u0219\x07v\u0219\u021A\x07w\u021A\u021B\x07t\u021B\u021C\x07p\u021Ch\u021D\u021E\x07v\u021E\u021F\x07j\u021F\u0220\x07t\u0220\u0221\x07q\u0221\u0222\x07y\u0222j\u0223\u0224\x07g\u0224\u0225\x07o\u0225\u0226\x07k\u0226\u0227\x07v\u0227l\u0228\u0229\x07t\u0229\u022A\x07g\u022A\u022B\x07x\u022B\u022C\x07g\u022C\u022D\x07t\u022D\u022E\x07v\u022En\u022F\u0230\x07x\u0230\u0231\x07c\u0231\u0232\x07t\u0232p\u0233\u0234\x07d\u0234\u0235\x07q\u0235\u0236\x07q\u0236\u0237\x07n\u0237r\u0238\u0239\x07u\u0239\u023A\x07v\u023A\u023B\x07t\u023B\u023C\x07k\u023C\u023D\x07p\u023D\u023E\x07i\u023Et\u023F\u0240\x07d\u0240\u0241\x07{\u0241\u0242\x07v\u0242\u0243\x07g\u0243v\u0244\u0245\x07-\u0245\u0246\x07-\u0246x\u0247\u0248\x07/\u0248\u0249\x07/\u0249z\u024A\u024B\x07p\u024B\u024C\x07g\u024C\u024D\x07y\u024D|\u024E\u024F\x07<\u024F~\u0250\u0251\x07-\u0251\x80\u0252\u0253\x07/\u0253\x82\u0254\u0255\x07c\u0255\u0256\x07h\u0256\u0257\x07v\u0257\u0258\x07g\u0258\u0259\x07t\u0259\x84\u025A\u025B\x07f\u025B\u025C\x07g\u025C\u025D\x07n\u025D\u025E\x07g\u025E\u025F\x07v\u025F\u0260\x07g\u0260\x86\u0261\u0262\x07#\u0262\x88\u0263\u0264\x07,\u0264\u0265\x07,\u0265\x8A\u0266\u0267\x071\u0267\x8C\u0268\u0269\x07'\u0269\x8E\u026A\u026B\x07>\u026B\u026C\x07>\u026C\x90\u026D\u026E\x07@\u026E\u026F\x07@\u026F\x92\u0270\u0271\x07(\u0271\x94\u0272\u0273\x07~\u0273\x96\u0274\u0275\x07?\u0275\u0276\x07?\u0276\x98\u0277\u0278\x07#\u0278\u0279\x07?\u0279\x9A\u027A\u027B\x07(\u027B\u027C\x07(\u027C\x9C\u027D\u027E\x07A\u027E\x9E\u027F\u0280\x07~\u0280\u0281\x07?\u0281\xA0\u0282\u0283\x07`\u0283\u0284\x07?\u0284\xA2\u0285\u0286\x07(\u0286\u0287\x07?\u0287\xA4\u0288\u0289\x07>\u0289\u028A\x07>\u028A\u028B\x07?\u028B\xA6\u028C\u028D\x07@\u028D\u028E\x07@\u028E\u028F\x07?\u028F\xA8\u0290\u0291\x07-\u0291\u0292\x07?\u0292\xAA\u0293\u0294\x07/\u0294\u0295\x07?\u0295\xAC\u0296\u0297\x07,\u0297\u0298\x07?\u0298\xAE\u0299\u029A\x071\u029A\u029B\x07?\u029B\xB0\u029C\u029D\x07'\u029D\u029E\x07?\u029E\xB2\u029F\u02A0\x07n\u02A0\u02A1\x07g\u02A1\u02A2\x07v\u02A2\xB4\u02A3\u02A4\x07<\u02A4\u02A5\x07?\u02A5\xB6\u02A6\u02A7\x07?\u02A7\u02A8\x07<\u02A8\xB8\u02A9\u02AA\x07u\u02AA\u02AB\x07y\u02AB\u02AC\x07k\u02AC\u02AD\x07v\u02AD\u02AE\x07e\u02AE\u02AF\x07j\u02AF\xBA\u02B0\u02B1\x07e\u02B1\u02B2\x07c\u02B2\u02B3\x07u\u02B3\u02B4\x07g\u02B4\xBC\u02B5\u02B6\x07f\u02B6\u02B7\x07g\u02B7\u02B8\x07h\u02B8\u02B9\x07c\u02B9\u02BA\x07w\u02BA\u02BB\x07n\u02BB\u02BC\x07v\u02BC\xBE\u02BD\u02BE\x07/\u02BE\u02BF\x07@\u02BF\xC0\u02C0\u02C1\x07e\u02C1\u02C2\x07c\u02C2\u02C3\x07n\u02C3\u02C4\x07n\u02C4\u02C5\x07d\u02C5\u02C6\x07c\u02C6\u02C7\x07e\u02C7\u02C8\x07m\u02C8\xC2\u02C9\u02CA\x07q\u02CA\u02CB\x07x\u02CB\u02CC\x07g\u02CC\u02CD\x07t\u02CD\u02CE\x07t\u02CE\u02CF\x07k\u02CF\u02D0\x07f\u02D0\u02D1\x07g\u02D1\xC4\u02D2\u02D3\x07k\u02D3\u02D4\x07p\u02D4\u0389\x07v\u02D5\u02D6\x07k\u02D6\u02D7\x07p\u02D7\u02D8\x07v\u02D8\u0389\x07:\u02D9\u02DA\x07k\u02DA\u02DB\x07p\u02DB\u02DC\x07v\u02DC\u02DD\x073\u02DD\u0389\x078\u02DE\u02DF\x07k\u02DF\u02E0\x07p\u02E0\u02E1\x07v\u02E1\u02E2\x074\u02E2\u0389\x076\u02E3\u02E4\x07k\u02E4\u02E5\x07p\u02E5\u02E6\x07v\u02E6\u02E7\x075\u02E7\u0389\x074\u02E8\u02E9\x07k\u02E9\u02EA\x07p\u02EA\u02EB\x07v\u02EB\u02EC\x076\u02EC\u0389\x072\u02ED\u02EE\x07k\u02EE\u02EF\x07p\u02EF\u02F0\x07v\u02F0\u02F1\x076\u02F1\u0389\x07:\u02F2\u02F3\x07k\u02F3\u02F4\x07p\u02F4\u02F5\x07v\u02F5\u02F6\x077\u02F6\u0389\x078\u02F7\u02F8\x07k\u02F8\u02F9\x07p\u02F9\u02FA\x07v\u02FA\u02FB\x078\u02FB\u0389\x076\u02FC\u02FD\x07k\u02FD\u02FE\x07p\u02FE\u02FF\x07v\u02FF\u0300\x079\u0300\u0389\x074\u0301\u0302\x07k\u0302\u0303\x07p\u0303\u0304\x07v\u0304\u0305\x07:\u0305\u0389\x072\u0306\u0307\x07k\u0307\u0308\x07p\u0308\u0309\x07v\u0309\u030A\x07:\u030A\u0389\x07:\u030B\u030C\x07k\u030C\u030D\x07p\u030D\u030E\x07v\u030E\u030F\x07;\u030F\u0389\x078\u0310\u0311\x07k\u0311\u0312\x07p\u0312\u0313\x07v\u0313\u0314\x073\u0314\u0315\x072\u0315\u0389\x076\u0316\u0317\x07k\u0317\u0318\x07p\u0318\u0319\x07v\u0319\u031A\x073\u031A\u031B\x073\u031B\u0389\x074\u031C\u031D\x07k\u031D\u031E\x07p\u031E\u031F\x07v\u031F\u0320\x073\u0320\u0321\x074\u0321\u0389\x072\u0322\u0323\x07k\u0323\u0324\x07p\u0324\u0325\x07v\u0325\u0326\x073\u0326\u0327\x074\u0327\u0389\x07:\u0328\u0329\x07k\u0329\u032A\x07p\u032A\u032B\x07v\u032B\u032C\x073\u032C\u032D\x075\u032D\u0389\x078\u032E\u032F\x07k\u032F\u0330\x07p\u0330\u0331\x07v\u0331\u0332\x073\u0332\u0333\x076\u0333\u0389\x076\u0334\u0335\x07k\u0335\u0336\x07p\u0336\u0337\x07v\u0337\u0338\x073\u0338\u0339\x077\u0339\u0389\x074\u033A\u033B\x07k\u033B\u033C\x07p\u033C\u033D\x07v\u033D\u033E\x073\u033E\u033F\x078\u033F\u0389\x072\u0340\u0341\x07k\u0341\u0342\x07p\u0342\u0343\x07v\u0343\u0344\x073\u0344\u0345\x078\u0345\u0389\x07:\u0346\u0347\x07k\u0347\u0348\x07p\u0348\u0349\x07v\u0349\u034A\x073\u034A\u034B\x079\u034B\u0389\x078\u034C\u034D\x07k\u034D\u034E\x07p\u034E\u034F\x07v\u034F\u0350\x073\u0350\u0351\x07:\u0351\u0389\x076\u0352\u0353\x07k\u0353\u0354\x07p\u0354\u0355\x07v\u0355\u0356\x073\u0356\u0357\x07;\u0357\u0389\x074\u0358\u0359\x07k\u0359\u035A\x07p\u035A\u035B\x07v\u035B\u035C\x074\u035C\u035D\x072\u035D\u0389\x072\u035E\u035F\x07k\u035F\u0360\x07p\u0360\u0361\x07v\u0361\u0362\x074\u0362\u0363\x072\u0363\u0389\x07:\u0364\u0365\x07k\u0365\u0366\x07p\u0366\u0367\x07v\u0367\u0368\x074\u0368\u0369\x073\u0369\u0389\x078\u036A\u036B\x07k\u036B\u036C\x07p\u036C\u036D\x07v\u036D\u036E\x074\u036E\u036F\x074\u036F\u0389\x076\u0370\u0371\x07k\u0371\u0372\x07p\u0372\u0373\x07v\u0373\u0374\x074\u0374\u0375\x075\u0375\u0389\x074\u0376\u0377\x07k\u0377\u0378\x07p\u0378\u0379\x07v\u0379\u037A\x074\u037A\u037B\x076\u037B\u0389\x072\u037C\u037D\x07k\u037D\u037E\x07p\u037E\u037F\x07v\u037F\u0380\x074\u0380\u0381\x076\u0381\u0389\x07:\u0382\u0383\x07k\u0383\u0384\x07p\u0384\u0385\x07v\u0385\u0386\x074\u0386\u0387\x077\u0387\u0389\x078\u0388\u02D2\u0388\u02D5\u0388\u02D9\u0388\u02DE\u0388\u02E3\u0388\u02E8\u0388\u02ED\u0388\u02F2\u0388\u02F7\u0388\u02FC\u0388\u0301\u0388\u0306\u0388\u030B\u0388\u0310\u0388\u0316\u0388\u031C\u0388\u0322\u0388\u0328\u0388\u032E\u0388\u0334\u0388\u033A\u0388\u0340\u0388\u0346\u0388\u034C\u0388\u0352\u0388\u0358\u0388\u035E\u0388\u0364\u0388\u036A\u0388\u0370\u0388\u0376\u0388\u037C\u0388\u0382\u0389\xC6\u038A\u038B\x07w\u038B\u038C\x07k\u038C\u038D\x07p\u038D\u0462\x07v\u038E\u038F\x07w\u038F\u0390\x07k\u0390\u0391\x07p\u0391\u0392\x07v\u0392\u0462\x07:\u0393\u0394\x07w\u0394\u0395\x07k\u0395\u0396\x07p\u0396\u0397\x07v\u0397\u0398\x073\u0398\u0462\x078\u0399\u039A\x07w\u039A\u039B\x07k\u039B\u039C\x07p\u039C\u039D\x07v\u039D\u039E\x074\u039E\u0462\x076"; -SolidityLexer._serializedATNSegment2 = "\u039F\u03A0\x07w\u03A0\u03A1\x07k\u03A1\u03A2\x07p\u03A2\u03A3\x07v\u03A3\u03A4\x075\u03A4\u0462\x074\u03A5\u03A6\x07w\u03A6\u03A7\x07k\u03A7\u03A8\x07p\u03A8\u03A9\x07v\u03A9\u03AA\x076\u03AA\u0462\x072\u03AB\u03AC\x07w\u03AC\u03AD\x07k\u03AD\u03AE\x07p\u03AE\u03AF\x07v\u03AF\u03B0\x076\u03B0\u0462\x07:\u03B1\u03B2\x07w\u03B2\u03B3\x07k\u03B3\u03B4\x07p\u03B4\u03B5\x07v\u03B5\u03B6\x077\u03B6\u0462\x078\u03B7\u03B8\x07w\u03B8\u03B9\x07k\u03B9\u03BA\x07p\u03BA\u03BB\x07v\u03BB\u03BC\x078\u03BC\u0462\x076\u03BD\u03BE\x07w\u03BE\u03BF\x07k\u03BF\u03C0\x07p\u03C0\u03C1\x07v\u03C1\u03C2\x079\u03C2\u0462\x074\u03C3\u03C4\x07w\u03C4\u03C5\x07k\u03C5\u03C6\x07p\u03C6\u03C7\x07v\u03C7\u03C8\x07:\u03C8\u0462\x072\u03C9\u03CA\x07w\u03CA\u03CB\x07k\u03CB\u03CC\x07p\u03CC\u03CD\x07v\u03CD\u03CE\x07:\u03CE\u0462\x07:\u03CF\u03D0\x07w\u03D0\u03D1\x07k\u03D1\u03D2\x07p\u03D2\u03D3\x07v\u03D3\u03D4\x07;\u03D4\u0462\x078\u03D5\u03D6\x07w\u03D6\u03D7\x07k\u03D7\u03D8\x07p\u03D8\u03D9\x07v\u03D9\u03DA\x073\u03DA\u03DB\x072\u03DB\u0462\x076\u03DC\u03DD\x07w\u03DD\u03DE\x07k\u03DE\u03DF\x07p\u03DF\u03E0\x07v\u03E0\u03E1\x073\u03E1\u03E2\x073\u03E2\u0462\x074\u03E3\u03E4\x07w\u03E4\u03E5\x07k\u03E5\u03E6\x07p\u03E6\u03E7\x07v\u03E7\u03E8\x073\u03E8\u03E9\x074\u03E9\u0462\x072\u03EA\u03EB\x07w\u03EB\u03EC\x07k\u03EC\u03ED\x07p\u03ED\u03EE\x07v\u03EE\u03EF\x073\u03EF\u03F0\x074\u03F0\u0462\x07:\u03F1\u03F2\x07w\u03F2\u03F3\x07k\u03F3\u03F4\x07p\u03F4\u03F5\x07v\u03F5\u03F6\x073\u03F6\u03F7\x075\u03F7\u0462\x078\u03F8\u03F9\x07w\u03F9\u03FA\x07k\u03FA\u03FB\x07p\u03FB\u03FC\x07v\u03FC\u03FD\x073\u03FD\u03FE\x076\u03FE\u0462\x076\u03FF\u0400\x07w\u0400\u0401\x07k\u0401\u0402\x07p\u0402\u0403\x07v\u0403\u0404\x073\u0404\u0405\x077\u0405\u0462\x074\u0406\u0407\x07w\u0407\u0408\x07k\u0408\u0409\x07p\u0409\u040A\x07v\u040A\u040B\x073\u040B\u040C\x078\u040C\u0462\x072\u040D\u040E\x07w\u040E\u040F\x07k\u040F\u0410\x07p\u0410\u0411\x07v\u0411\u0412\x073\u0412\u0413\x078\u0413\u0462\x07:\u0414\u0415\x07w\u0415\u0416\x07k\u0416\u0417\x07p\u0417\u0418\x07v\u0418\u0419\x073\u0419\u041A\x079\u041A\u0462\x078\u041B\u041C\x07w\u041C\u041D\x07k\u041D\u041E\x07p\u041E\u041F\x07v\u041F\u0420\x073\u0420\u0421\x07:\u0421\u0462\x076\u0422\u0423\x07w\u0423\u0424\x07k\u0424\u0425\x07p\u0425\u0426\x07v\u0426\u0427\x073\u0427\u0428\x07;\u0428\u0462\x074\u0429\u042A\x07w\u042A\u042B\x07k\u042B\u042C\x07p\u042C\u042D\x07v\u042D\u042E\x074\u042E\u042F\x072\u042F\u0462\x072\u0430\u0431\x07w\u0431\u0432\x07k\u0432\u0433\x07p\u0433\u0434\x07v\u0434\u0435\x074\u0435\u0436\x072\u0436\u0462\x07:\u0437\u0438\x07w\u0438\u0439\x07k\u0439\u043A\x07p\u043A\u043B\x07v\u043B\u043C\x074\u043C\u043D\x073\u043D\u0462\x078\u043E\u043F\x07w\u043F\u0440\x07k\u0440\u0441\x07p\u0441\u0442\x07v\u0442\u0443\x074\u0443\u0444\x074\u0444\u0462\x076\u0445\u0446\x07w\u0446\u0447\x07k\u0447\u0448\x07p\u0448\u0449\x07v\u0449\u044A\x074\u044A\u044B\x075\u044B\u0462\x074\u044C\u044D\x07w\u044D\u044E\x07k\u044E\u044F\x07p\u044F\u0450\x07v\u0450\u0451\x074\u0451\u0452\x076\u0452\u0462\x072\u0453\u0454\x07w\u0454\u0455\x07k\u0455\u0456\x07p\u0456\u0457\x07v\u0457\u0458\x074\u0458\u0459\x076\u0459\u0462\x07:\u045A\u045B\x07w\u045B\u045C\x07k\u045C\u045D\x07p\u045D\u045E\x07v\u045E\u045F\x074\u045F\u0460\x077\u0460\u0462\x078\u0461\u038A\u0461\u038E\u0461\u0393\u0461\u0399\u0461\u039F\u0461\u03A5\u0461\u03AB\u0461\u03B1\u0461\u03B7\u0461\u03BD\u0461\u03C3\u0461\u03C9\u0461\u03CF\u0461\u03D5\u0461\u03DC\u0461\u03E3\u0461\u03EA\u0461\u03F1\u0461\u03F8\u0461\u03FF\u0461\u0406\u0461\u040D\u0461\u0414\u0461\u041B\u0461\u0422\u0461\u0429\u0461\u0430\u0461\u0437\u0461\u043E\u0461\u0445\u0461\u044C\u0461\u0453\u0461\u045A\u0462\xC8\u0463\u0464\x07d\u0464\u0465\x07{\u0465\u0466\x07v\u0466\u0467\x07g\u0467\u0540\x07u\u0468\u0469\x07d\u0469\u046A\x07{\u046A\u046B\x07v\u046B\u046C\x07g\u046C\u046D\x07u\u046D\u0540\x073\u046E\u046F\x07d\u046F\u0470\x07{\u0470\u0471\x07v\u0471\u0472\x07g\u0472\u0473\x07u\u0473\u0540\x074\u0474\u0475\x07d\u0475\u0476\x07{\u0476\u0477\x07v\u0477\u0478\x07g\u0478\u0479\x07u\u0479\u0540\x075\u047A\u047B\x07d\u047B\u047C\x07{\u047C\u047D\x07v\u047D\u047E\x07g\u047E\u047F\x07u\u047F\u0540\x076\u0480\u0481\x07d\u0481\u0482\x07{\u0482\u0483\x07v\u0483\u0484\x07g\u0484\u0485\x07u\u0485\u0540\x077\u0486\u0487\x07d\u0487\u0488\x07{\u0488\u0489\x07v\u0489\u048A\x07g\u048A\u048B\x07u\u048B\u0540\x078\u048C\u048D\x07d\u048D\u048E\x07{\u048E\u048F\x07v\u048F\u0490\x07g\u0490\u0491\x07u\u0491\u0540\x079\u0492\u0493\x07d\u0493\u0494\x07{\u0494\u0495\x07v\u0495\u0496\x07g\u0496\u0497\x07u\u0497\u0540\x07:\u0498\u0499\x07d\u0499\u049A\x07{\u049A\u049B\x07v\u049B\u049C\x07g\u049C\u049D\x07u\u049D\u0540\x07;\u049E\u049F\x07d\u049F\u04A0\x07{\u04A0\u04A1\x07v\u04A1\u04A2\x07g\u04A2\u04A3\x07u\u04A3\u04A4\x073\u04A4\u0540\x072\u04A5\u04A6\x07d\u04A6\u04A7\x07{\u04A7\u04A8\x07v\u04A8\u04A9\x07g\u04A9\u04AA\x07u\u04AA\u04AB\x073\u04AB\u0540\x073\u04AC\u04AD\x07d\u04AD\u04AE\x07{\u04AE\u04AF\x07v\u04AF\u04B0\x07g\u04B0\u04B1\x07u\u04B1\u04B2\x073\u04B2\u0540\x074\u04B3\u04B4\x07d\u04B4\u04B5\x07{\u04B5\u04B6\x07v\u04B6\u04B7\x07g\u04B7\u04B8\x07u\u04B8\u04B9\x073\u04B9\u0540\x075\u04BA\u04BB\x07d\u04BB\u04BC\x07{\u04BC\u04BD\x07v\u04BD\u04BE\x07g\u04BE\u04BF\x07u\u04BF\u04C0\x073\u04C0\u0540\x076\u04C1\u04C2\x07d\u04C2\u04C3\x07{\u04C3\u04C4\x07v\u04C4\u04C5\x07g\u04C5\u04C6\x07u\u04C6\u04C7\x073\u04C7\u0540\x077\u04C8\u04C9\x07d\u04C9\u04CA\x07{\u04CA\u04CB\x07v\u04CB\u04CC\x07g\u04CC\u04CD\x07u\u04CD\u04CE\x073\u04CE\u0540\x078\u04CF\u04D0\x07d\u04D0\u04D1\x07{\u04D1\u04D2\x07v\u04D2\u04D3\x07g\u04D3\u04D4\x07u\u04D4\u04D5\x073\u04D5\u0540\x079\u04D6\u04D7\x07d\u04D7\u04D8\x07{\u04D8\u04D9\x07v\u04D9\u04DA\x07g\u04DA\u04DB\x07u\u04DB\u04DC\x073\u04DC\u0540\x07:\u04DD\u04DE\x07d\u04DE\u04DF\x07{\u04DF\u04E0\x07v\u04E0\u04E1\x07g\u04E1\u04E2\x07u\u04E2\u04E3\x073\u04E3\u0540\x07;\u04E4\u04E5\x07d\u04E5\u04E6\x07{\u04E6\u04E7\x07v\u04E7\u04E8\x07g\u04E8\u04E9\x07u\u04E9\u04EA\x074\u04EA\u0540\x072\u04EB\u04EC\x07d\u04EC\u04ED\x07{\u04ED\u04EE\x07v\u04EE\u04EF\x07g\u04EF\u04F0\x07u\u04F0\u04F1\x074\u04F1\u0540\x073\u04F2\u04F3\x07d\u04F3\u04F4\x07{\u04F4\u04F5\x07v\u04F5\u04F6\x07g\u04F6\u04F7\x07u\u04F7\u04F8\x074\u04F8\u0540\x074\u04F9\u04FA\x07d\u04FA\u04FB\x07{\u04FB\u04FC\x07v\u04FC\u04FD\x07g\u04FD\u04FE\x07u\u04FE\u04FF\x074\u04FF\u0540\x075\u0500\u0501\x07d\u0501\u0502\x07{\u0502\u0503\x07v\u0503\u0504\x07g\u0504\u0505\x07u\u0505\u0506\x074\u0506\u0540\x076\u0507\u0508\x07d\u0508\u0509\x07{\u0509\u050A\x07v\u050A\u050B\x07g\u050B\u050C\x07u\u050C\u050D\x074\u050D\u0540\x077\u050E\u050F\x07d\u050F\u0510\x07{\u0510\u0511\x07v\u0511\u0512\x07g\u0512\u0513\x07u\u0513\u0514\x074\u0514\u0540\x078\u0515\u0516\x07d\u0516\u0517\x07{\u0517\u0518\x07v\u0518\u0519\x07g\u0519\u051A\x07u\u051A\u051B\x074\u051B\u0540\x079\u051C\u051D\x07d\u051D\u051E\x07{\u051E\u051F\x07v\u051F\u0520\x07g\u0520\u0521\x07u\u0521\u0522\x074\u0522\u0540\x07:\u0523\u0524\x07d\u0524\u0525\x07{\u0525\u0526\x07v\u0526\u0527\x07g\u0527\u0528\x07u\u0528\u0529\x074\u0529\u0540\x07;\u052A\u052B\x07d\u052B\u052C\x07{\u052C\u052D\x07v\u052D\u052E\x07g\u052E\u052F\x07u\u052F\u0530\x075\u0530\u0540\x072\u0531\u0532\x07d\u0532\u0533\x07{\u0533\u0534\x07v\u0534\u0535\x07g\u0535\u0536\x07u\u0536\u0537\x075\u0537\u0540\x073\u0538\u0539\x07d\u0539\u053A\x07{\u053A\u053B\x07v\u053B\u053C\x07g\u053C\u053D\x07u\u053D\u053E\x075\u053E\u0540\x074\u053F\u0463\u053F\u0468\u053F\u046E\u053F\u0474\u053F\u047A\u053F\u0480\u053F\u0486\u053F\u048C\u053F\u0492\u053F\u0498\u053F\u049E\u053F\u04A5\u053F\u04AC\u053F\u04B3\u053F\u04BA\u053F\u04C1\u053F\u04C8\u053F\u04CF\u053F\u04D6\u053F\u04DD\u053F\u04E4\u053F\u04EB\u053F\u04F2\u053F\u04F9\u053F\u0500\u053F\u0507\u053F\u050E\u053F\u0515\u053F\u051C\u053F\u0523\u053F\u052A\u053F\u0531\u053F\u0538\u0540\xCA\u0541\u0542\x07h\u0542\u0543\x07k\u0543\u0544\x07z\u0544\u0545\x07g\u0545\u0558\x07f\u0546\u0547\x07h\u0547\u0548\x07k\u0548\u0549\x07z\u0549\u054A\x07g\u054A\u054B\x07f\u054B\u054D\u054C\u054E \u054D\u054C\u054E\u054F\u054F\u054D\u054F\u0550\u0550\u0551\u0551\u0553\x07z\u0552\u0554 \u0553\u0552\u0554\u0555\u0555\u0553\u0555\u0556\u0556\u0558\u0557\u0541\u0557\u0546\u0558\xCC\u0559\u055A\x07w\u055A\u055B\x07h\u055B\u055C\x07k\u055C\u055D\x07z\u055D\u055E\x07g\u055E\u0572\x07f\u055F\u0560\x07w\u0560\u0561\x07h\u0561\u0562\x07k\u0562\u0563\x07z\u0563\u0564\x07g\u0564\u0565\x07f\u0565\u0567\u0566\u0568 \u0567\u0566\u0568\u0569\u0569\u0567\u0569\u056A\u056A\u056B\u056B\u056D\x07z\u056C\u056E \u056D\u056C\u056E\u056F\u056F\u056D\u056F\u0570\u0570\u0572\u0571\u0559\u0571\u055F\u0572\xCE\u0573\u0574\x07v\u0574\u0575\x07t\u0575\u0576\x07w\u0576\u057D\x07g\u0577\u0578\x07h\u0578\u0579\x07c\u0579\u057A\x07n\u057A\u057B\x07u\u057B\u057D\x07g\u057C\u0573\u057C\u0577\u057D\xD0\u057E\u0585\xD3j\u057F\u0581\xD3j\u0580\u057F\u0580\u0581\u0581\u0582\u0582\u0583\x070\u0583\u0585\xD3j\u0584\u057E\u0584\u0580\u0585\u0588\u0586\u0587 \u0587\u0589\xD3j\u0588\u0586\u0588\u0589\u0589\xD2\u058A\u0591 \u058B\u058D\x07a\u058C\u058B\u058C\u058D\u058D\u058E\u058E\u0590 \u058F\u058C\u0590\u0593\u0591\u058F\u0591\u0592\u0592\xD4\u0593\u0591\u0594\u0595\x072\u0595\u0596 \u0596\u0597\xD7l\u0597\xD6\u0598\u059F\xDFp\u0599\u059B\x07a\u059A\u0599\u059A\u059B\u059B\u059C\u059C\u059E\xDFp\u059D\u059A\u059E\u05A1\u059F\u059D\u059F\u05A0\u05A0\xD8\u05A1\u059F\u05A2\u05A3\x07y\u05A3\u05A4\x07g\u05A4\u05DB\x07k\u05A5\u05A6\x07i\u05A6\u05A7\x07y\u05A7\u05A8\x07g\u05A8\u05DB\x07k\u05A9\u05AA\x07u\u05AA\u05AB\x07|\u05AB\u05AC\x07c\u05AC\u05AD\x07d\u05AD\u05DB\x07q\u05AE\u05AF\x07h\u05AF\u05B0\x07k\u05B0\u05B1\x07p\u05B1\u05B2\x07p\u05B2\u05B3\x07g\u05B3\u05DB\x07{\u05B4\u05B5\x07g\u05B5\u05B6\x07v\u05B6\u05B7\x07j\u05B7\u05B8\x07g\u05B8\u05DB\x07t\u05B9\u05BA\x07u\u05BA\u05BB\x07g\u05BB\u05BC\x07e\u05BC\u05BD\x07q\u05BD\u05BE\x07p\u05BE\u05BF\x07f\u05BF\u05DB\x07u\u05C0\u05C1\x07o\u05C1\u05C2\x07k\u05C2\u05C3\x07p\u05C3\u05C4\x07w\u05C4\u05C5\x07v\u05C5\u05C6\x07g\u05C6\u05DB\x07u\u05C7\u05C8\x07j\u05C8\u05C9\x07q\u05C9\u05CA\x07w\u05CA\u05CB\x07t\u05CB\u05DB\x07u\u05CC\u05CD\x07f\u05CD\u05CE\x07c\u05CE\u05CF\x07{\u05CF\u05DB\x07u\u05D0\u05D1\x07y\u05D1\u05D2\x07g\u05D2\u05D3\x07g\u05D3\u05D4\x07m\u05D4\u05DB\x07u\u05D5\u05D6\x07{\u05D6\u05D7\x07g\u05D7\u05D8\x07c\u05D8\u05D9\x07t\u05D9\u05DB\x07u\u05DA\u05A2\u05DA\u05A5\u05DA\u05A9\u05DA\u05AE\u05DA\u05B4\u05DA\u05B9\u05DA\u05C0\u05DA\u05C7\u05DA\u05CC\u05DA\u05D0\u05DA\u05D5\u05DB\xDA\u05DC\u05DD\x07j\u05DD\u05DE\x07g\u05DE\u05DF\x07z\u05DF\u05EA\u05E0\u05E2\x07$\u05E1\u05E3\xD7l\u05E2\u05E1\u05E2\u05E3\u05E3\u05E4\u05E4\u05EB\x07$\u05E5\u05E7\x07)\u05E6\u05E8\xD7l\u05E7\u05E6\u05E7\u05E8\u05E8\u05E9\u05E9\u05EB\x07)\u05EA\u05E0\u05EA\u05E5\u05EB\xDC\u05EC\u05ED\xDFp\u05ED\u05EE\xDFp\u05EE\xDE\u05EF\u05F0 \u05F0\xE0\u05F1\u05F2\x07c\u05F2\u05F3\x07d\u05F3\u05F4\x07u\u05F4\u05F5\x07v\u05F5\u05F6\x07t\u05F6\u05F7\x07c\u05F7\u05F8\x07e\u05F8\u064A\x07v\u05F9\u05FA\x07c\u05FA\u05FB\x07h\u05FB\u05FC\x07v\u05FC\u05FD\x07g\u05FD\u064A\x07t\u05FE\u05FF\x07e\u05FF\u0600\x07c\u0600\u0601\x07u\u0601\u064A\x07g\u0602\u0603\x07e\u0603\u0604\x07c\u0604\u0605\x07v\u0605\u0606\x07e\u0606\u064A\x07j\u0607\u0608\x07f\u0608\u0609\x07g\u0609\u060A\x07h\u060A\u060B\x07c\u060B\u060C\x07w\u060C\u060D\x07n\u060D\u064A\x07v\u060E\u060F\x07h\u060F\u0610\x07k\u0610\u0611\x07p\u0611\u0612\x07c\u0612\u064A\x07n\u0613\u0614\x07k\u0614\u064A\x07p\u0615\u0616\x07k\u0616\u0617\x07p\u0617\u0618\x07n\u0618\u0619\x07k\u0619\u061A\x07p\u061A\u064A\x07g\u061B\u061C\x07n\u061C\u061D\x07g\u061D\u064A\x07v\u061E\u061F\x07o\u061F\u0620\x07c\u0620\u0621\x07v\u0621\u0622\x07e\u0622\u064A\x07j\u0623\u0624\x07p\u0624\u0625\x07w\u0625\u0626\x07n\u0626\u064A\x07n\u0627\u0628\x07q\u0628\u064A\x07h\u0629\u062A\x07t\u062A\u062B\x07g\u062B\u062C\x07n\u062C\u062D\x07q\u062D\u062E\x07e\u062E\u062F\x07c\u062F\u0630\x07v\u0630\u0631\x07c\u0631\u0632\x07d\u0632\u0633\x07n\u0633\u064A\x07g\u0634\u0635\x07u\u0635\u0636\x07v\u0636\u0637\x07c\u0637\u0638\x07v\u0638\u0639\x07k\u0639\u064A\x07e\u063A\u063B\x07u\u063B\u063C\x07y\u063C\u063D\x07k\u063D\u063E\x07v\u063E\u063F\x07e\u063F\u064A\x07j\u0640\u0641\x07v\u0641\u0642\x07t\u0642\u064A\x07{\u0643\u0644\x07v\u0644\u0645\x07{\u0645\u0646\x07r\u0646\u0647\x07g\u0647\u0648\x07q\u0648\u064A\x07h\u0649\u05F1\u0649\u05F9\u0649\u05FE\u0649\u0602\u0649\u0607\u0649\u060E\u0649\u0613\u0649\u0615\u0649\u061B\u0649\u061E\u0649\u0623\u0649\u0627\u0649\u0629\u0649\u0634\u0649\u063A\u0649\u0640\u0649\u0643\u064A\xE2\u064B\u064C\x07c\u064C\u064D\x07p\u064D\u064E\x07q\u064E\u064F\x07p\u064F\u0650\x07{\u0650\u0651\x07o\u0651\u0652\x07q\u0652\u0653\x07w\u0653\u0654\x07u\u0654\xE4\u0655\u0656\x07d\u0656\u0657\x07t\u0657\u0658\x07g\u0658\u0659\x07c\u0659\u065A\x07m\u065A\xE6\u065B\u065C\x07e\u065C\u065D\x07q\u065D\u065E\x07p\u065E\u065F\x07u\u065F\u0660\x07v\u0660\u0661\x07c\u0661\u0662\x07p\u0662\u0663\x07v\u0663\xE8\u0664\u0665\x07k\u0665\u0666\x07o\u0666\u0667\x07o\u0667\u0668\x07w\u0668\u0669\x07v\u0669\u066A\x07c\u066A\u066B\x07d\u066B\u066C\x07n\u066C\u066D\x07g\u066D\xEA\u066E\u066F\x07e\u066F\u0670\x07q\u0670\u0671\x07p\u0671\u0672\x07v\u0672\u0673\x07k\u0673\u0674\x07p\u0674\u0675\x07w\u0675"; -SolidityLexer._serializedATNSegment3 = "\u0676\x07g\u0676\xEC\u0677\u0678\x07n\u0678\u0679\x07g\u0679\u067A\x07c\u067A\u067B\x07x\u067B\u067C\x07g\u067C\xEE\u067D\u067E\x07g\u067E\u067F\x07z\u067F\u0680\x07v\u0680\u0681\x07g\u0681\u0682\x07t\u0682\u0683\x07p\u0683\u0684\x07c\u0684\u0685\x07n\u0685\xF0\u0686\u0687\x07k\u0687\u0688\x07p\u0688\u0689\x07f\u0689\u068A\x07g\u068A\u068B\x07z\u068B\u068C\x07g\u068C\u068D\x07f\u068D\xF2\u068E\u068F\x07k\u068F\u0690\x07p\u0690\u0691\x07v\u0691\u0692\x07g\u0692\u0693\x07t\u0693\u0694\x07p\u0694\u0695\x07c\u0695\u0696\x07n\u0696\xF4\u0697\u0698\x07r\u0698\u0699\x07c\u0699\u069A\x07{\u069A\u069B\x07c\u069B\u069C\x07d\u069C\u069D\x07n\u069D\u069E\x07g\u069E\xF6\u069F\u06A0\x07r\u06A0\u06A1\x07t\u06A1\u06A2\x07k\u06A2\u06A3\x07x\u06A3\u06A4\x07c\u06A4\u06A5\x07v\u06A5\u06A6\x07g\u06A6\xF8\u06A7\u06A8\x07r\u06A8\u06A9\x07w\u06A9\u06AA\x07d\u06AA\u06AB\x07n\u06AB\u06AC\x07k\u06AC\u06AD\x07e\u06AD\xFA\u06AE\u06AF\x07x\u06AF\u06B0\x07k\u06B0\u06B1\x07t\u06B1\u06B2\x07v\u06B2\u06B3\x07w\u06B3\u06B4\x07c\u06B4\u06B5\x07n\u06B5\xFC\u06B6\u06B7\x07r\u06B7\u06B8\x07w\u06B8\u06B9\x07t\u06B9\u06BA\x07g\u06BA\xFE\u06BB\u06BC\x07v\u06BC\u06BD\x07{\u06BD\u06BE\x07r\u06BE\u06BF\x07g\u06BF\u0100\u06C0\u06C1\x07x\u06C1\u06C2\x07k\u06C2\u06C3\x07g\u06C3\u06C4\x07y\u06C4\u0102\u06C5\u06C6\x07e\u06C6\u06C7\x07q\u06C7\u06C8\x07p\u06C8\u06C9\x07u\u06C9\u06CA\x07v\u06CA\u06CB\x07t\u06CB\u06CC\x07w\u06CC\u06CD\x07e\u06CD\u06CE\x07v\u06CE\u06CF\x07q\u06CF\u06D0\x07t\u06D0\u0104\u06D1\u06D2\x07h\u06D2\u06D3\x07c\u06D3\u06D4\x07n\u06D4\u06D5\x07n\u06D5\u06D6\x07d\u06D6\u06D7\x07c\u06D7\u06D8\x07e\u06D8\u06D9\x07m\u06D9\u0106\u06DA\u06DB\x07t\u06DB\u06DC\x07g\u06DC\u06DD\x07e\u06DD\u06DE\x07g\u06DE\u06DF\x07k\u06DF\u06E0\x07x\u06E0\u06E1\x07g\u06E1\u0108\u06E2\u06E6\u010B\x86\u06E3\u06E5\u010D\x87\u06E4\u06E3\u06E5\u06E8\u06E6\u06E4\u06E6\u06E7\u06E7\u010A\u06E8\u06E6\u06E9\u06EA \u06EA\u010C\u06EB\u06EC \x07\u06EC\u010E\u06ED\u06EE\x07w\u06EE\u06EF\x07p\u06EF\u06F0\x07k\u06F0\u06F1\x07e\u06F1\u06F2\x07q\u06F2\u06F3\x07f\u06F3\u06F5\x07g\u06F4\u06ED\u06F4\u06F5\u06F5\u06F6\u06F6\u06FA\x07$\u06F7\u06F9\u0111\x89\u06F8\u06F7\u06F9\u06FC\u06FA\u06F8\u06FA\u06FB\u06FB\u06FD\u06FC\u06FA\u06FD\u0710\x07$\u06FE\u06FF\x07w\u06FF\u0700\x07p\u0700\u0701\x07k\u0701\u0702\x07e\u0702\u0703\x07q\u0703\u0704\x07f\u0704\u0706\x07g\u0705\u06FE\u0705\u0706\u0706\u0707\u0707\u070B\x07)\u0708\u070A\u0113\x8A\u0709\u0708\u070A\u070D\u070B\u0709\u070B\u070C\u070C\u070E\u070D\u070B\u070E\u0710\x07)\u070F\u06F4\u070F\u0705\u0710\u0110\u0711\u0715\n\b\u0712\u0713\x07^\u0713\u0715\v\u0714\u0711\u0714\u0712\u0715\u0112\u0716\u071A\n \u0717\u0718\x07^\u0718\u071A\v\u0719\u0716\u0719\u0717\u071A\u0114\u071B\u071D \u071C\u071B\u071D\u071E\u071E\u071C\u071E\u071F\u071F\u0720\u0720\u0722\x070\u0721\u0723 \u0722\u0721\u0723\u0724\u0724\u0722\u0724\u0725\u0725\u072C\u0726\u0728\x070\u0727\u0729 \u0728\u0727\u0729\u072A\u072A\u0728\u072A\u072B\u072B\u072D\u072C\u0726\u072C\u072D\u072D\u0116\u072E\u0730 \n\u072F\u072E\u0730\u0731\u0731\u072F\u0731\u0732\u0732\u0733\u0733\u0734\b\x8C\u0734\u0118\u0735\u0736\x071\u0736\u0737\x07,\u0737\u073B\u0738\u073A\v\u0739\u0738\u073A\u073D\u073B\u073C\u073B\u0739\u073C\u073E\u073D\u073B\u073E\u073F\x07,\u073F\u0740\x071\u0740\u0741\u0741\u0742\b\x8D\u0742\u011A\u0743\u0744\x071\u0744\u0745\x071\u0745\u0749\u0746\u0748\n\v\u0747\u0746\u0748\u074B\u0749\u0747\u0749\u074A\u074A\u074C\u074B\u0749\u074C\u074D\b\x8E\u074D\u011C(\u0388\u0461\u053F\u054F\u0555\u0557\u0569\u056F\u0571\u057C\u0580\u0584\u0588\u058C\u0591\u059A\u059F\u05DA\u05E2\u05E7\u05EA\u0649\u06E6\u06F4\u06FA\u0705\u070B\u070F\u0714\u0719\u071E\u0724\u072A\u072C\u0731\u073B\u0749\b"; -SolidityLexer._serializedATN = Utils.join([ - _SolidityLexer._serializedATNSegment0, - _SolidityLexer._serializedATNSegment1, - _SolidityLexer._serializedATNSegment2, - _SolidityLexer._serializedATNSegment3 -], ""); - -// src/antlr/SolidityParser.ts -var import_ATN = __toModule(require_ATN()); -var import_ATNDeserializer2 = __toModule(require_ATNDeserializer()); -var import_FailedPredicateException = __toModule(require_FailedPredicateException()); -var import_NoViableAltException = __toModule(require_NoViableAltException()); -var import_Parser = __toModule(require_Parser()); -var import_ParserRuleContext = __toModule(require_ParserRuleContext()); -var import_ParserATNSimulator = __toModule(require_ParserATNSimulator()); -var import_RecognitionException = __toModule(require_RecognitionException()); -var import_Token = __toModule(require_Token()); -var import_VocabularyImpl2 = __toModule(require_VocabularyImpl()); -var Utils2 = __toModule(require_Utils()); -var _SolidityParser = class extends import_Parser.Parser { - get vocabulary() { - return _SolidityParser.VOCABULARY; - } - get grammarFileName() { - return "Solidity.g4"; - } - get ruleNames() { - return _SolidityParser.ruleNames; - } - get serializedATN() { - return _SolidityParser._serializedATN; - } - createFailedPredicateException(predicate, message) { - return new import_FailedPredicateException.FailedPredicateException(this, predicate, message); - } - constructor(input) { - super(input); - this._interp = new import_ParserATNSimulator.ParserATNSimulator(_SolidityParser._ATN, this); - } - sourceUnit() { - let _localctx = new SourceUnitContext(this._ctx, this.state); - this.enterRule(_localctx, 0, _SolidityParser.RULE_sourceUnit); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 205; - this._errHandler.sync(this); - _la = this._input.LA(1); - while ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__0 | 1 << _SolidityParser.T__11 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__17 | 1 << _SolidityParser.T__18 | 1 << _SolidityParser.T__19 | 1 << _SolidityParser.T__20 | 1 << _SolidityParser.T__24 | 1 << _SolidityParser.T__27 | 1 << _SolidityParser.T__29)) !== 0 || (_la - 33 & ~31) === 0 && (1 << _la - 33 & (1 << _SolidityParser.T__32 - 33 | 1 << _SolidityParser.T__35 - 33 | 1 << _SolidityParser.T__37 - 33 | 1 << _SolidityParser.T__41 - 33 | 1 << _SolidityParser.T__53 - 33 | 1 << _SolidityParser.T__54 - 33 | 1 << _SolidityParser.T__55 - 33 | 1 << _SolidityParser.T__56 - 33 | 1 << _SolidityParser.T__57 - 33)) !== 0 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.Int - 96 | 1 << _SolidityParser.Uint - 96 | 1 << _SolidityParser.Byte - 96 | 1 << _SolidityParser.Fixed - 96 | 1 << _SolidityParser.Ufixed - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.TypeKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.FallbackKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 203; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 0, this._ctx)) { - case 1: - { - this.state = 194; - this.pragmaDirective(); - } - break; - case 2: - { - this.state = 195; - this.importDirective(); - } - break; - case 3: - { - this.state = 196; - this.contractDefinition(); - } - break; - case 4: - { - this.state = 197; - this.enumDefinition(); - } - break; - case 5: - { - this.state = 198; - this.structDefinition(); - } - break; - case 6: - { - this.state = 199; - this.functionDefinition(); - } - break; - case 7: - { - this.state = 200; - this.fileLevelConstant(); - } - break; - case 8: - { - this.state = 201; - this.customErrorDefinition(); - } - break; - case 9: - { - this.state = 202; - this.typeDefinition(); - } - break; - } - } - this.state = 207; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 208; - this.match(_SolidityParser.EOF); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - pragmaDirective() { - let _localctx = new PragmaDirectiveContext(this._ctx, this.state); - this.enterRule(_localctx, 2, _SolidityParser.RULE_pragmaDirective); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 210; - this.match(_SolidityParser.T__0); - this.state = 211; - this.pragmaName(); - this.state = 212; - this.pragmaValue(); - this.state = 213; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - pragmaName() { - let _localctx = new PragmaNameContext(this._ctx, this.state); - this.enterRule(_localctx, 4, _SolidityParser.RULE_pragmaName); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 215; - this.identifier(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - pragmaValue() { - let _localctx = new PragmaValueContext(this._ctx, this.state); - this.enterRule(_localctx, 6, _SolidityParser.RULE_pragmaValue); - try { - this.state = 219; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 2, this._ctx)) { - case 1: - this.enterOuterAlt(_localctx, 1); - { - this.state = 217; - this.version(); - } - break; - case 2: - this.enterOuterAlt(_localctx, 2); - { - this.state = 218; - this.expression(0); - } - break; - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - version() { - let _localctx = new VersionContext(this._ctx, this.state); - this.enterRule(_localctx, 8, _SolidityParser.RULE_version); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 221; - this.versionConstraint(); - this.state = 228; - this._errHandler.sync(this); - _la = this._input.LA(1); - while ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__2 | 1 << _SolidityParser.T__3 | 1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__5 | 1 << _SolidityParser.T__6 | 1 << _SolidityParser.T__7 | 1 << _SolidityParser.T__8 | 1 << _SolidityParser.T__9)) !== 0 || _la === _SolidityParser.DecimalNumber || _la === _SolidityParser.VersionLiteral) { - { - { - this.state = 223; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__2) { - { - this.state = 222; - this.match(_SolidityParser.T__2); - } - } - this.state = 225; - this.versionConstraint(); - } - } - this.state = 230; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - versionOperator() { - let _localctx = new VersionOperatorContext(this._ctx, this.state); - this.enterRule(_localctx, 10, _SolidityParser.RULE_versionOperator); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 231; - _la = this._input.LA(1); - if (!((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__3 | 1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__5 | 1 << _SolidityParser.T__6 | 1 << _SolidityParser.T__7 | 1 << _SolidityParser.T__8 | 1 << _SolidityParser.T__9)) !== 0)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - versionConstraint() { - let _localctx = new VersionConstraintContext(this._ctx, this.state); - this.enterRule(_localctx, 12, _SolidityParser.RULE_versionConstraint); - let _la; - try { - this.state = 241; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 7, this._ctx)) { - case 1: - this.enterOuterAlt(_localctx, 1); - { - this.state = 234; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__3 | 1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__5 | 1 << _SolidityParser.T__6 | 1 << _SolidityParser.T__7 | 1 << _SolidityParser.T__8 | 1 << _SolidityParser.T__9)) !== 0) { - { - this.state = 233; - this.versionOperator(); - } - } - this.state = 236; - this.match(_SolidityParser.VersionLiteral); - } - break; - case 2: - this.enterOuterAlt(_localctx, 2); - { - this.state = 238; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__3 | 1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__5 | 1 << _SolidityParser.T__6 | 1 << _SolidityParser.T__7 | 1 << _SolidityParser.T__8 | 1 << _SolidityParser.T__9)) !== 0) { - { - this.state = 237; - this.versionOperator(); - } - } - this.state = 240; - this.match(_SolidityParser.DecimalNumber); - } - break; - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - importDeclaration() { - let _localctx = new ImportDeclarationContext(this._ctx, this.state); - this.enterRule(_localctx, 14, _SolidityParser.RULE_importDeclaration); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 243; - this.identifier(); - this.state = 246; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__10) { - { - this.state = 244; - this.match(_SolidityParser.T__10); - this.state = 245; - this.identifier(); - } - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - importDirective() { - let _localctx = new ImportDirectiveContext(this._ctx, this.state); - this.enterRule(_localctx, 16, _SolidityParser.RULE_importDirective); - let _la; - try { - this.state = 284; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 13, this._ctx)) { - case 1: - this.enterOuterAlt(_localctx, 1); - { - this.state = 248; - this.match(_SolidityParser.T__11); - this.state = 249; - this.importPath(); - this.state = 252; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__10) { - { - this.state = 250; - this.match(_SolidityParser.T__10); - this.state = 251; - this.identifier(); - } - } - this.state = 254; - this.match(_SolidityParser.T__1); - } - break; - case 2: - this.enterOuterAlt(_localctx, 2); - { - this.state = 256; - this.match(_SolidityParser.T__11); - this.state = 259; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__12: - { - this.state = 257; - this.match(_SolidityParser.T__12); - } - break; - case _SolidityParser.T__13: - case _SolidityParser.T__24: - case _SolidityParser.T__41: - case _SolidityParser.T__53: - case _SolidityParser.T__95: - case _SolidityParser.LeaveKeyword: - case _SolidityParser.PayableKeyword: - case _SolidityParser.ConstructorKeyword: - case _SolidityParser.ReceiveKeyword: - case _SolidityParser.Identifier: - { - this.state = 258; - this.identifier(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - this.state = 263; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__10) { - { - this.state = 261; - this.match(_SolidityParser.T__10); - this.state = 262; - this.identifier(); - } - } - this.state = 265; - this.match(_SolidityParser.T__13); - this.state = 266; - this.importPath(); - this.state = 267; - this.match(_SolidityParser.T__1); - } - break; - case 3: - this.enterOuterAlt(_localctx, 3); - { - this.state = 269; - this.match(_SolidityParser.T__11); - this.state = 270; - this.match(_SolidityParser.T__14); - this.state = 271; - this.importDeclaration(); - this.state = 276; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 272; - this.match(_SolidityParser.T__15); - this.state = 273; - this.importDeclaration(); - } - } - this.state = 278; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 279; - this.match(_SolidityParser.T__16); - this.state = 280; - this.match(_SolidityParser.T__13); - this.state = 281; - this.importPath(); - this.state = 282; - this.match(_SolidityParser.T__1); - } - break; - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - importPath() { - let _localctx = new ImportPathContext(this._ctx, this.state); - this.enterRule(_localctx, 18, _SolidityParser.RULE_importPath); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 286; - this.match(_SolidityParser.StringLiteralFragment); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - contractDefinition() { - let _localctx = new ContractDefinitionContext(this._ctx, this.state); - this.enterRule(_localctx, 20, _SolidityParser.RULE_contractDefinition); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 289; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__17) { - { - this.state = 288; - this.match(_SolidityParser.T__17); - } - } - this.state = 291; - _la = this._input.LA(1); - if (!((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__18 | 1 << _SolidityParser.T__19 | 1 << _SolidityParser.T__20)) !== 0)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - this.state = 292; - this.identifier(); - this.state = 302; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__21) { - { - this.state = 293; - this.match(_SolidityParser.T__21); - this.state = 294; - this.inheritanceSpecifier(); - this.state = 299; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 295; - this.match(_SolidityParser.T__15); - this.state = 296; - this.inheritanceSpecifier(); - } - } - this.state = 301; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } - this.state = 304; - this.match(_SolidityParser.T__14); - this.state = 308; - this._errHandler.sync(this); - _la = this._input.LA(1); - while ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__24 | 1 << _SolidityParser.T__25 | 1 << _SolidityParser.T__27 | 1 << _SolidityParser.T__28 | 1 << _SolidityParser.T__29)) !== 0 || (_la - 32 & ~31) === 0 && (1 << _la - 32 & (1 << _SolidityParser.T__31 - 32 | 1 << _SolidityParser.T__32 - 32 | 1 << _SolidityParser.T__35 - 32 | 1 << _SolidityParser.T__37 - 32 | 1 << _SolidityParser.T__41 - 32 | 1 << _SolidityParser.T__53 - 32 | 1 << _SolidityParser.T__54 - 32 | 1 << _SolidityParser.T__55 - 32 | 1 << _SolidityParser.T__56 - 32 | 1 << _SolidityParser.T__57 - 32)) !== 0 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.Int - 96 | 1 << _SolidityParser.Uint - 96 | 1 << _SolidityParser.Byte - 96 | 1 << _SolidityParser.Fixed - 96 | 1 << _SolidityParser.Ufixed - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.TypeKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.FallbackKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - { - this.state = 305; - this.contractPart(); - } - } - this.state = 310; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 311; - this.match(_SolidityParser.T__16); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - inheritanceSpecifier() { - let _localctx = new InheritanceSpecifierContext(this._ctx, this.state); - this.enterRule(_localctx, 22, _SolidityParser.RULE_inheritanceSpecifier); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 313; - this.userDefinedTypeName(); - this.state = 319; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__22) { - { - this.state = 314; - this.match(_SolidityParser.T__22); - this.state = 316; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - this.state = 315; - this.expressionList(); - } - } - this.state = 318; - this.match(_SolidityParser.T__23); - } - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - contractPart() { - let _localctx = new ContractPartContext(this._ctx, this.state); - this.enterRule(_localctx, 24, _SolidityParser.RULE_contractPart); - try { - this.state = 330; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 20, this._ctx)) { - case 1: - this.enterOuterAlt(_localctx, 1); - { - this.state = 321; - this.stateVariableDeclaration(); - } - break; - case 2: - this.enterOuterAlt(_localctx, 2); - { - this.state = 322; - this.usingForDeclaration(); - } - break; - case 3: - this.enterOuterAlt(_localctx, 3); - { - this.state = 323; - this.structDefinition(); - } - break; - case 4: - this.enterOuterAlt(_localctx, 4); - { - this.state = 324; - this.modifierDefinition(); - } - break; - case 5: - this.enterOuterAlt(_localctx, 5); - { - this.state = 325; - this.functionDefinition(); - } - break; - case 6: - this.enterOuterAlt(_localctx, 6); - { - this.state = 326; - this.eventDefinition(); - } - break; - case 7: - this.enterOuterAlt(_localctx, 7); - { - this.state = 327; - this.enumDefinition(); - } - break; - case 8: - this.enterOuterAlt(_localctx, 8); - { - this.state = 328; - this.customErrorDefinition(); - } - break; - case 9: - this.enterOuterAlt(_localctx, 9); - { - this.state = 329; - this.typeDefinition(); - } - break; - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - stateVariableDeclaration() { - let _localctx = new StateVariableDeclarationContext(this._ctx, this.state); - this.enterRule(_localctx, 26, _SolidityParser.RULE_stateVariableDeclaration); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 332; - this.typeName(0); - this.state = 341; - this._errHandler.sync(this); - _la = this._input.LA(1); - while ((_la - 97 & ~31) === 0 && (1 << _la - 97 & (1 << _SolidityParser.T__96 - 97 | 1 << _SolidityParser.ConstantKeyword - 97 | 1 << _SolidityParser.ImmutableKeyword - 97 | 1 << _SolidityParser.InternalKeyword - 97 | 1 << _SolidityParser.PrivateKeyword - 97 | 1 << _SolidityParser.PublicKeyword - 97)) !== 0) { - { - this.state = 339; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.PublicKeyword: - { - this.state = 333; - this.match(_SolidityParser.PublicKeyword); - } - break; - case _SolidityParser.InternalKeyword: - { - this.state = 334; - this.match(_SolidityParser.InternalKeyword); - } - break; - case _SolidityParser.PrivateKeyword: - { - this.state = 335; - this.match(_SolidityParser.PrivateKeyword); - } - break; - case _SolidityParser.ConstantKeyword: - { - this.state = 336; - this.match(_SolidityParser.ConstantKeyword); - } - break; - case _SolidityParser.ImmutableKeyword: - { - this.state = 337; - this.match(_SolidityParser.ImmutableKeyword); - } - break; - case _SolidityParser.T__96: - { - this.state = 338; - this.overrideSpecifier(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } - this.state = 343; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 344; - this.identifier(); - this.state = 347; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__9) { - { - this.state = 345; - this.match(_SolidityParser.T__9); - this.state = 346; - this.expression(0); - } - } - this.state = 349; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - fileLevelConstant() { - let _localctx = new FileLevelConstantContext(this._ctx, this.state); - this.enterRule(_localctx, 28, _SolidityParser.RULE_fileLevelConstant); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 351; - this.typeName(0); - this.state = 352; - this.match(_SolidityParser.ConstantKeyword); - this.state = 353; - this.identifier(); - this.state = 354; - this.match(_SolidityParser.T__9); - this.state = 355; - this.expression(0); - this.state = 356; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - customErrorDefinition() { - let _localctx = new CustomErrorDefinitionContext(this._ctx, this.state); - this.enterRule(_localctx, 30, _SolidityParser.RULE_customErrorDefinition); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 358; - this.match(_SolidityParser.T__24); - this.state = 359; - this.identifier(); - this.state = 360; - this.parameterList(); - this.state = 361; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - typeDefinition() { - let _localctx = new TypeDefinitionContext(this._ctx, this.state); - this.enterRule(_localctx, 32, _SolidityParser.RULE_typeDefinition); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 363; - this.match(_SolidityParser.TypeKeyword); - this.state = 364; - this.identifier(); - this.state = 365; - this.match(_SolidityParser.T__21); - this.state = 366; - this.elementaryTypeName(); - this.state = 367; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - usingForDeclaration() { - let _localctx = new UsingForDeclarationContext(this._ctx, this.state); - this.enterRule(_localctx, 34, _SolidityParser.RULE_usingForDeclaration); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 369; - this.match(_SolidityParser.T__25); - this.state = 370; - this.userDefinedTypeName(); - this.state = 371; - this.match(_SolidityParser.T__26); - this.state = 374; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__12: - { - this.state = 372; - this.match(_SolidityParser.T__12); - } - break; - case _SolidityParser.T__13: - case _SolidityParser.T__24: - case _SolidityParser.T__29: - case _SolidityParser.T__35: - case _SolidityParser.T__37: - case _SolidityParser.T__41: - case _SolidityParser.T__53: - case _SolidityParser.T__54: - case _SolidityParser.T__55: - case _SolidityParser.T__56: - case _SolidityParser.T__57: - case _SolidityParser.T__95: - case _SolidityParser.Int: - case _SolidityParser.Uint: - case _SolidityParser.Byte: - case _SolidityParser.Fixed: - case _SolidityParser.Ufixed: - case _SolidityParser.LeaveKeyword: - case _SolidityParser.PayableKeyword: - case _SolidityParser.ConstructorKeyword: - case _SolidityParser.ReceiveKeyword: - case _SolidityParser.Identifier: - { - this.state = 373; - this.typeName(0); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - this.state = 376; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - structDefinition() { - let _localctx = new StructDefinitionContext(this._ctx, this.state); - this.enterRule(_localctx, 36, _SolidityParser.RULE_structDefinition); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 378; - this.match(_SolidityParser.T__27); - this.state = 379; - this.identifier(); - this.state = 380; - this.match(_SolidityParser.T__14); - this.state = 391; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__24 | 1 << _SolidityParser.T__29)) !== 0 || (_la - 36 & ~31) === 0 && (1 << _la - 36 & (1 << _SolidityParser.T__35 - 36 | 1 << _SolidityParser.T__37 - 36 | 1 << _SolidityParser.T__41 - 36 | 1 << _SolidityParser.T__53 - 36 | 1 << _SolidityParser.T__54 - 36 | 1 << _SolidityParser.T__55 - 36 | 1 << _SolidityParser.T__56 - 36 | 1 << _SolidityParser.T__57 - 36)) !== 0 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.Int - 96 | 1 << _SolidityParser.Uint - 96 | 1 << _SolidityParser.Byte - 96 | 1 << _SolidityParser.Fixed - 96 | 1 << _SolidityParser.Ufixed - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 381; - this.variableDeclaration(); - this.state = 382; - this.match(_SolidityParser.T__1); - this.state = 388; - this._errHandler.sync(this); - _la = this._input.LA(1); - while ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__24 | 1 << _SolidityParser.T__29)) !== 0 || (_la - 36 & ~31) === 0 && (1 << _la - 36 & (1 << _SolidityParser.T__35 - 36 | 1 << _SolidityParser.T__37 - 36 | 1 << _SolidityParser.T__41 - 36 | 1 << _SolidityParser.T__53 - 36 | 1 << _SolidityParser.T__54 - 36 | 1 << _SolidityParser.T__55 - 36 | 1 << _SolidityParser.T__56 - 36 | 1 << _SolidityParser.T__57 - 36)) !== 0 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.Int - 96 | 1 << _SolidityParser.Uint - 96 | 1 << _SolidityParser.Byte - 96 | 1 << _SolidityParser.Fixed - 96 | 1 << _SolidityParser.Ufixed - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - { - this.state = 383; - this.variableDeclaration(); - this.state = 384; - this.match(_SolidityParser.T__1); - } - } - this.state = 390; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } - this.state = 393; - this.match(_SolidityParser.T__16); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - modifierDefinition() { - let _localctx = new ModifierDefinitionContext(this._ctx, this.state); - this.enterRule(_localctx, 38, _SolidityParser.RULE_modifierDefinition); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 395; - this.match(_SolidityParser.T__28); - this.state = 396; - this.identifier(); - this.state = 398; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__22) { - { - this.state = 397; - this.parameterList(); - } - } - this.state = 404; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__96 || _la === _SolidityParser.VirtualKeyword) { - { - this.state = 402; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.VirtualKeyword: - { - this.state = 400; - this.match(_SolidityParser.VirtualKeyword); - } - break; - case _SolidityParser.T__96: - { - this.state = 401; - this.overrideSpecifier(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } - this.state = 406; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 409; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__1: - { - this.state = 407; - this.match(_SolidityParser.T__1); - } - break; - case _SolidityParser.T__14: - { - this.state = 408; - this.block(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - modifierInvocation() { - let _localctx = new ModifierInvocationContext(this._ctx, this.state); - this.enterRule(_localctx, 40, _SolidityParser.RULE_modifierInvocation); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 411; - this.identifier(); - this.state = 417; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__22) { - { - this.state = 412; - this.match(_SolidityParser.T__22); - this.state = 414; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - this.state = 413; - this.expressionList(); - } - } - this.state = 416; - this.match(_SolidityParser.T__23); - } - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - functionDefinition() { - let _localctx = new FunctionDefinitionContext(this._ctx, this.state); - this.enterRule(_localctx, 42, _SolidityParser.RULE_functionDefinition); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 419; - this.functionDescriptor(); - this.state = 420; - this.parameterList(); - this.state = 421; - this.modifierList(); - this.state = 423; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__30) { - { - this.state = 422; - this.returnParameters(); - } - } - this.state = 427; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__1: - { - this.state = 425; - this.match(_SolidityParser.T__1); - } - break; - case _SolidityParser.T__14: - { - this.state = 426; - this.block(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - functionDescriptor() { - let _localctx = new FunctionDescriptorContext(this._ctx, this.state); - this.enterRule(_localctx, 44, _SolidityParser.RULE_functionDescriptor); - let _la; - try { - this.state = 436; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__29: - this.enterOuterAlt(_localctx, 1); - { - this.state = 429; - this.match(_SolidityParser.T__29); - this.state = 431; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 430; - this.identifier(); - } - } - } - break; - case _SolidityParser.ConstructorKeyword: - this.enterOuterAlt(_localctx, 2); - { - this.state = 433; - this.match(_SolidityParser.ConstructorKeyword); - } - break; - case _SolidityParser.FallbackKeyword: - this.enterOuterAlt(_localctx, 3); - { - this.state = 434; - this.match(_SolidityParser.FallbackKeyword); - } - break; - case _SolidityParser.ReceiveKeyword: - this.enterOuterAlt(_localctx, 4); - { - this.state = 435; - this.match(_SolidityParser.ReceiveKeyword); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - returnParameters() { - let _localctx = new ReturnParametersContext(this._ctx, this.state); - this.enterRule(_localctx, 46, _SolidityParser.RULE_returnParameters); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 438; - this.match(_SolidityParser.T__30); - this.state = 439; - this.parameterList(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - modifierList() { - let _localctx = new ModifierListContext(this._ctx, this.state); - this.enterRule(_localctx, 48, _SolidityParser.RULE_modifierList); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 451; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.T__96 - 96 | 1 << _SolidityParser.ConstantKeyword - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.ExternalKeyword - 96 | 1 << _SolidityParser.InternalKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.PrivateKeyword - 96 | 1 << _SolidityParser.PublicKeyword - 96 | 1 << _SolidityParser.VirtualKeyword - 96 | 1 << _SolidityParser.PureKeyword - 96 | 1 << _SolidityParser.ViewKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 449; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 37, this._ctx)) { - case 1: - { - this.state = 441; - this.match(_SolidityParser.ExternalKeyword); - } - break; - case 2: - { - this.state = 442; - this.match(_SolidityParser.PublicKeyword); - } - break; - case 3: - { - this.state = 443; - this.match(_SolidityParser.InternalKeyword); - } - break; - case 4: - { - this.state = 444; - this.match(_SolidityParser.PrivateKeyword); - } - break; - case 5: - { - this.state = 445; - this.match(_SolidityParser.VirtualKeyword); - } - break; - case 6: - { - this.state = 446; - this.stateMutability(); - } - break; - case 7: - { - this.state = 447; - this.modifierInvocation(); - } - break; - case 8: - { - this.state = 448; - this.overrideSpecifier(); - } - break; - } - } - this.state = 453; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - eventDefinition() { - let _localctx = new EventDefinitionContext(this._ctx, this.state); - this.enterRule(_localctx, 50, _SolidityParser.RULE_eventDefinition); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 454; - this.match(_SolidityParser.T__31); - this.state = 455; - this.identifier(); - this.state = 456; - this.eventParameterList(); - this.state = 458; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.AnonymousKeyword) { - { - this.state = 457; - this.match(_SolidityParser.AnonymousKeyword); - } - } - this.state = 460; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - enumValue() { - let _localctx = new EnumValueContext(this._ctx, this.state); - this.enterRule(_localctx, 52, _SolidityParser.RULE_enumValue); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 462; - this.identifier(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - enumDefinition() { - let _localctx = new EnumDefinitionContext(this._ctx, this.state); - this.enterRule(_localctx, 54, _SolidityParser.RULE_enumDefinition); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 464; - this.match(_SolidityParser.T__32); - this.state = 465; - this.identifier(); - this.state = 466; - this.match(_SolidityParser.T__14); - this.state = 468; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 467; - this.enumValue(); - } - } - this.state = 474; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 470; - this.match(_SolidityParser.T__15); - this.state = 471; - this.enumValue(); - } - } - this.state = 476; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 477; - this.match(_SolidityParser.T__16); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - parameterList() { - let _localctx = new ParameterListContext(this._ctx, this.state); - this.enterRule(_localctx, 56, _SolidityParser.RULE_parameterList); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 479; - this.match(_SolidityParser.T__22); - this.state = 488; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__24 | 1 << _SolidityParser.T__29)) !== 0 || (_la - 36 & ~31) === 0 && (1 << _la - 36 & (1 << _SolidityParser.T__35 - 36 | 1 << _SolidityParser.T__37 - 36 | 1 << _SolidityParser.T__41 - 36 | 1 << _SolidityParser.T__53 - 36 | 1 << _SolidityParser.T__54 - 36 | 1 << _SolidityParser.T__55 - 36 | 1 << _SolidityParser.T__56 - 36 | 1 << _SolidityParser.T__57 - 36)) !== 0 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.Int - 96 | 1 << _SolidityParser.Uint - 96 | 1 << _SolidityParser.Byte - 96 | 1 << _SolidityParser.Fixed - 96 | 1 << _SolidityParser.Ufixed - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 480; - this.parameter(); - this.state = 485; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 481; - this.match(_SolidityParser.T__15); - this.state = 482; - this.parameter(); - } - } - this.state = 487; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } - this.state = 490; - this.match(_SolidityParser.T__23); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - parameter() { - let _localctx = new ParameterContext(this._ctx, this.state); - this.enterRule(_localctx, 58, _SolidityParser.RULE_parameter); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 492; - this.typeName(0); - this.state = 494; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 44, this._ctx)) { - case 1: - { - this.state = 493; - this.storageLocation(); - } - break; - } - this.state = 497; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 496; - this.identifier(); - } - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - eventParameterList() { - let _localctx = new EventParameterListContext(this._ctx, this.state); - this.enterRule(_localctx, 60, _SolidityParser.RULE_eventParameterList); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 499; - this.match(_SolidityParser.T__22); - this.state = 508; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__24 | 1 << _SolidityParser.T__29)) !== 0 || (_la - 36 & ~31) === 0 && (1 << _la - 36 & (1 << _SolidityParser.T__35 - 36 | 1 << _SolidityParser.T__37 - 36 | 1 << _SolidityParser.T__41 - 36 | 1 << _SolidityParser.T__53 - 36 | 1 << _SolidityParser.T__54 - 36 | 1 << _SolidityParser.T__55 - 36 | 1 << _SolidityParser.T__56 - 36 | 1 << _SolidityParser.T__57 - 36)) !== 0 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.Int - 96 | 1 << _SolidityParser.Uint - 96 | 1 << _SolidityParser.Byte - 96 | 1 << _SolidityParser.Fixed - 96 | 1 << _SolidityParser.Ufixed - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 500; - this.eventParameter(); - this.state = 505; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 501; - this.match(_SolidityParser.T__15); - this.state = 502; - this.eventParameter(); - } - } - this.state = 507; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } - this.state = 510; - this.match(_SolidityParser.T__23); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - eventParameter() { - let _localctx = new EventParameterContext(this._ctx, this.state); - this.enterRule(_localctx, 62, _SolidityParser.RULE_eventParameter); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 512; - this.typeName(0); - this.state = 514; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.IndexedKeyword) { - { - this.state = 513; - this.match(_SolidityParser.IndexedKeyword); - } - } - this.state = 517; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 516; - this.identifier(); - } - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - functionTypeParameterList() { - let _localctx = new FunctionTypeParameterListContext(this._ctx, this.state); - this.enterRule(_localctx, 64, _SolidityParser.RULE_functionTypeParameterList); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 519; - this.match(_SolidityParser.T__22); - this.state = 528; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__24 | 1 << _SolidityParser.T__29)) !== 0 || (_la - 36 & ~31) === 0 && (1 << _la - 36 & (1 << _SolidityParser.T__35 - 36 | 1 << _SolidityParser.T__37 - 36 | 1 << _SolidityParser.T__41 - 36 | 1 << _SolidityParser.T__53 - 36 | 1 << _SolidityParser.T__54 - 36 | 1 << _SolidityParser.T__55 - 36 | 1 << _SolidityParser.T__56 - 36 | 1 << _SolidityParser.T__57 - 36)) !== 0 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.Int - 96 | 1 << _SolidityParser.Uint - 96 | 1 << _SolidityParser.Byte - 96 | 1 << _SolidityParser.Fixed - 96 | 1 << _SolidityParser.Ufixed - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 520; - this.functionTypeParameter(); - this.state = 525; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 521; - this.match(_SolidityParser.T__15); - this.state = 522; - this.functionTypeParameter(); - } - } - this.state = 527; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } - this.state = 530; - this.match(_SolidityParser.T__23); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - functionTypeParameter() { - let _localctx = new FunctionTypeParameterContext(this._ctx, this.state); - this.enterRule(_localctx, 66, _SolidityParser.RULE_functionTypeParameter); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 532; - this.typeName(0); - this.state = 534; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la - 40 & ~31) === 0 && (1 << _la - 40 & (1 << _SolidityParser.T__39 - 40 | 1 << _SolidityParser.T__40 - 40 | 1 << _SolidityParser.T__41 - 40)) !== 0) { - { - this.state = 533; - this.storageLocation(); - } - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - variableDeclaration() { - let _localctx = new VariableDeclarationContext(this._ctx, this.state); - this.enterRule(_localctx, 68, _SolidityParser.RULE_variableDeclaration); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 536; - this.typeName(0); - this.state = 538; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 53, this._ctx)) { - case 1: - { - this.state = 537; - this.storageLocation(); - } - break; - } - this.state = 540; - this.identifier(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - typeName(_p) { - if (_p === void 0) { - _p = 0; - } - let _parentctx = this._ctx; - let _parentState = this.state; - let _localctx = new TypeNameContext(this._ctx, _parentState); - let _prevctx = _localctx; - let _startState = 70; - this.enterRecursionRule(_localctx, 70, _SolidityParser.RULE_typeName, _p); - let _la; - try { - let _alt; - this.enterOuterAlt(_localctx, 1); - { - this.state = 549; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 54, this._ctx)) { - case 1: - { - this.state = 543; - this.elementaryTypeName(); - } - break; - case 2: - { - this.state = 544; - this.userDefinedTypeName(); - } - break; - case 3: - { - this.state = 545; - this.mapping(); - } - break; - case 4: - { - this.state = 546; - this.functionTypeName(); - } - break; - case 5: - { - this.state = 547; - this.match(_SolidityParser.T__35); - this.state = 548; - this.match(_SolidityParser.PayableKeyword); - } - break; - } - this._ctx._stop = this._input.tryLT(-1); - this.state = 559; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 56, this._ctx); - while (_alt !== 2 && _alt !== import_ATN.ATN.INVALID_ALT_NUMBER) { - if (_alt === 1) { - if (this._parseListeners != null) { - this.triggerExitRuleEvent(); - } - _prevctx = _localctx; - { - { - _localctx = new TypeNameContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_typeName); - this.state = 551; - if (!this.precpred(this._ctx, 3)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 3)"); - } - this.state = 552; - this.match(_SolidityParser.T__33); - this.state = 554; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - this.state = 553; - this.expression(0); - } - } - this.state = 556; - this.match(_SolidityParser.T__34); - } - } - } - this.state = 561; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 56, this._ctx); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.unrollRecursionContexts(_parentctx); - } - return _localctx; - } - userDefinedTypeName() { - let _localctx = new UserDefinedTypeNameContext(this._ctx, this.state); - this.enterRule(_localctx, 72, _SolidityParser.RULE_userDefinedTypeName); - try { - let _alt; - this.enterOuterAlt(_localctx, 1); - { - this.state = 562; - this.identifier(); - this.state = 567; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 57, this._ctx); - while (_alt !== 2 && _alt !== import_ATN.ATN.INVALID_ALT_NUMBER) { - if (_alt === 1) { - { - { - this.state = 563; - this.match(_SolidityParser.T__36); - this.state = 564; - this.identifier(); - } - } - } - this.state = 569; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 57, this._ctx); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - mappingKey() { - let _localctx = new MappingKeyContext(this._ctx, this.state); - this.enterRule(_localctx, 74, _SolidityParser.RULE_mappingKey); - try { - this.state = 572; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__35: - case _SolidityParser.T__54: - case _SolidityParser.T__55: - case _SolidityParser.T__56: - case _SolidityParser.T__57: - case _SolidityParser.Int: - case _SolidityParser.Uint: - case _SolidityParser.Byte: - case _SolidityParser.Fixed: - case _SolidityParser.Ufixed: - this.enterOuterAlt(_localctx, 1); - { - this.state = 570; - this.elementaryTypeName(); - } - break; - case _SolidityParser.T__13: - case _SolidityParser.T__24: - case _SolidityParser.T__41: - case _SolidityParser.T__53: - case _SolidityParser.T__95: - case _SolidityParser.LeaveKeyword: - case _SolidityParser.PayableKeyword: - case _SolidityParser.ConstructorKeyword: - case _SolidityParser.ReceiveKeyword: - case _SolidityParser.Identifier: - this.enterOuterAlt(_localctx, 2); - { - this.state = 571; - this.userDefinedTypeName(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - mapping() { - let _localctx = new MappingContext(this._ctx, this.state); - this.enterRule(_localctx, 76, _SolidityParser.RULE_mapping); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 574; - this.match(_SolidityParser.T__37); - this.state = 575; - this.match(_SolidityParser.T__22); - this.state = 576; - this.mappingKey(); - this.state = 577; - this.match(_SolidityParser.T__38); - this.state = 578; - this.typeName(0); - this.state = 579; - this.match(_SolidityParser.T__23); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - functionTypeName() { - let _localctx = new FunctionTypeNameContext(this._ctx, this.state); - this.enterRule(_localctx, 78, _SolidityParser.RULE_functionTypeName); - try { - let _alt; - this.enterOuterAlt(_localctx, 1); - { - this.state = 581; - this.match(_SolidityParser.T__29); - this.state = 582; - this.functionTypeParameterList(); - this.state = 588; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 60, this._ctx); - while (_alt !== 2 && _alt !== import_ATN.ATN.INVALID_ALT_NUMBER) { - if (_alt === 1) { - { - this.state = 586; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.InternalKeyword: - { - this.state = 583; - this.match(_SolidityParser.InternalKeyword); - } - break; - case _SolidityParser.ExternalKeyword: - { - this.state = 584; - this.match(_SolidityParser.ExternalKeyword); - } - break; - case _SolidityParser.ConstantKeyword: - case _SolidityParser.PayableKeyword: - case _SolidityParser.PureKeyword: - case _SolidityParser.ViewKeyword: - { - this.state = 585; - this.stateMutability(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } - } - this.state = 590; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 60, this._ctx); - } - this.state = 593; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 61, this._ctx)) { - case 1: - { - this.state = 591; - this.match(_SolidityParser.T__30); - this.state = 592; - this.functionTypeParameterList(); - } - break; - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - storageLocation() { - let _localctx = new StorageLocationContext(this._ctx, this.state); - this.enterRule(_localctx, 80, _SolidityParser.RULE_storageLocation); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 595; - _la = this._input.LA(1); - if (!((_la - 40 & ~31) === 0 && (1 << _la - 40 & (1 << _SolidityParser.T__39 - 40 | 1 << _SolidityParser.T__40 - 40 | 1 << _SolidityParser.T__41 - 40)) !== 0)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - stateMutability() { - let _localctx = new StateMutabilityContext(this._ctx, this.state); - this.enterRule(_localctx, 82, _SolidityParser.RULE_stateMutability); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 597; - _la = this._input.LA(1); - if (!((_la - 111 & ~31) === 0 && (1 << _la - 111 & (1 << _SolidityParser.ConstantKeyword - 111 | 1 << _SolidityParser.PayableKeyword - 111 | 1 << _SolidityParser.PureKeyword - 111 | 1 << _SolidityParser.ViewKeyword - 111)) !== 0)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - block() { - let _localctx = new BlockContext(this._ctx, this.state); - this.enterRule(_localctx, 84, _SolidityParser.RULE_block); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 599; - this.match(_SolidityParser.T__14); - this.state = 603; - this._errHandler.sync(this); - _la = this._input.LA(1); - while ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__14 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24 | 1 << _SolidityParser.T__26 | 1 << _SolidityParser.T__29)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__37 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__42 - 34 | 1 << _SolidityParser.T__44 - 34 | 1 << _SolidityParser.T__46 - 34 | 1 << _SolidityParser.T__47 - 34 | 1 << _SolidityParser.T__48 - 34 | 1 << _SolidityParser.T__49 - 34 | 1 << _SolidityParser.T__50 - 34 | 1 << _SolidityParser.T__51 - 34 | 1 << _SolidityParser.T__52 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.BreakKeyword - 98 | 1 << _SolidityParser.ContinueKeyword - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - { - this.state = 600; - this.statement(); - } - } - this.state = 605; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 606; - this.match(_SolidityParser.T__16); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - statement() { - let _localctx = new StatementContext(this._ctx, this.state); - this.enterRule(_localctx, 86, _SolidityParser.RULE_statement); - try { - this.state = 623; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 63, this._ctx)) { - case 1: - this.enterOuterAlt(_localctx, 1); - { - this.state = 608; - this.ifStatement(); - } - break; - case 2: - this.enterOuterAlt(_localctx, 2); - { - this.state = 609; - this.tryStatement(); - } - break; - case 3: - this.enterOuterAlt(_localctx, 3); - { - this.state = 610; - this.whileStatement(); - } - break; - case 4: - this.enterOuterAlt(_localctx, 4); - { - this.state = 611; - this.forStatement(); - } - break; - case 5: - this.enterOuterAlt(_localctx, 5); - { - this.state = 612; - this.block(); - } - break; - case 6: - this.enterOuterAlt(_localctx, 6); - { - this.state = 613; - this.inlineAssemblyStatement(); - } - break; - case 7: - this.enterOuterAlt(_localctx, 7); - { - this.state = 614; - this.doWhileStatement(); - } - break; - case 8: - this.enterOuterAlt(_localctx, 8); - { - this.state = 615; - this.continueStatement(); - } - break; - case 9: - this.enterOuterAlt(_localctx, 9); - { - this.state = 616; - this.breakStatement(); - } - break; - case 10: - this.enterOuterAlt(_localctx, 10); - { - this.state = 617; - this.returnStatement(); - } - break; - case 11: - this.enterOuterAlt(_localctx, 11); - { - this.state = 618; - this.throwStatement(); - } - break; - case 12: - this.enterOuterAlt(_localctx, 12); - { - this.state = 619; - this.emitStatement(); - } - break; - case 13: - this.enterOuterAlt(_localctx, 13); - { - this.state = 620; - this.simpleStatement(); - } - break; - case 14: - this.enterOuterAlt(_localctx, 14); - { - this.state = 621; - this.uncheckedStatement(); - } - break; - case 15: - this.enterOuterAlt(_localctx, 15); - { - this.state = 622; - this.revertStatement(); - } - break; - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - expressionStatement() { - let _localctx = new ExpressionStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 88, _SolidityParser.RULE_expressionStatement); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 625; - this.expression(0); - this.state = 626; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - ifStatement() { - let _localctx = new IfStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 90, _SolidityParser.RULE_ifStatement); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 628; - this.match(_SolidityParser.T__42); - this.state = 629; - this.match(_SolidityParser.T__22); - this.state = 630; - this.expression(0); - this.state = 631; - this.match(_SolidityParser.T__23); - this.state = 632; - this.statement(); - this.state = 635; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 64, this._ctx)) { - case 1: - { - this.state = 633; - this.match(_SolidityParser.T__43); - this.state = 634; - this.statement(); - } - break; - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - tryStatement() { - let _localctx = new TryStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 92, _SolidityParser.RULE_tryStatement); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 637; - this.match(_SolidityParser.T__44); - this.state = 638; - this.expression(0); - this.state = 640; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__30) { - { - this.state = 639; - this.returnParameters(); - } - } - this.state = 642; - this.block(); - this.state = 644; - this._errHandler.sync(this); - _la = this._input.LA(1); - do { - { - { - this.state = 643; - this.catchClause(); - } - } - this.state = 646; - this._errHandler.sync(this); - _la = this._input.LA(1); - } while (_la === _SolidityParser.T__45); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - catchClause() { - let _localctx = new CatchClauseContext(this._ctx, this.state); - this.enterRule(_localctx, 94, _SolidityParser.RULE_catchClause); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 648; - this.match(_SolidityParser.T__45); - this.state = 653; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 650; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 649; - this.identifier(); - } - } - this.state = 652; - this.parameterList(); - } - } - this.state = 655; - this.block(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - whileStatement() { - let _localctx = new WhileStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 96, _SolidityParser.RULE_whileStatement); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 657; - this.match(_SolidityParser.T__46); - this.state = 658; - this.match(_SolidityParser.T__22); - this.state = 659; - this.expression(0); - this.state = 660; - this.match(_SolidityParser.T__23); - this.state = 661; - this.statement(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - simpleStatement() { - let _localctx = new SimpleStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 98, _SolidityParser.RULE_simpleStatement); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 665; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 69, this._ctx)) { - case 1: - { - this.state = 663; - this.variableDeclarationStatement(); - } - break; - case 2: - { - this.state = 664; - this.expressionStatement(); - } - break; - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - uncheckedStatement() { - let _localctx = new UncheckedStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 100, _SolidityParser.RULE_uncheckedStatement); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 667; - this.match(_SolidityParser.T__47); - this.state = 668; - this.block(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - forStatement() { - let _localctx = new ForStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 102, _SolidityParser.RULE_forStatement); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 670; - this.match(_SolidityParser.T__26); - this.state = 671; - this.match(_SolidityParser.T__22); - this.state = 674; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__4: - case _SolidityParser.T__13: - case _SolidityParser.T__22: - case _SolidityParser.T__24: - case _SolidityParser.T__29: - case _SolidityParser.T__33: - case _SolidityParser.T__35: - case _SolidityParser.T__37: - case _SolidityParser.T__41: - case _SolidityParser.T__53: - case _SolidityParser.T__54: - case _SolidityParser.T__55: - case _SolidityParser.T__56: - case _SolidityParser.T__57: - case _SolidityParser.T__58: - case _SolidityParser.T__59: - case _SolidityParser.T__60: - case _SolidityParser.T__62: - case _SolidityParser.T__63: - case _SolidityParser.T__64: - case _SolidityParser.T__65: - case _SolidityParser.T__66: - case _SolidityParser.T__95: - case _SolidityParser.Int: - case _SolidityParser.Uint: - case _SolidityParser.Byte: - case _SolidityParser.Fixed: - case _SolidityParser.Ufixed: - case _SolidityParser.BooleanLiteral: - case _SolidityParser.DecimalNumber: - case _SolidityParser.HexNumber: - case _SolidityParser.HexLiteralFragment: - case _SolidityParser.LeaveKeyword: - case _SolidityParser.PayableKeyword: - case _SolidityParser.TypeKeyword: - case _SolidityParser.ConstructorKeyword: - case _SolidityParser.ReceiveKeyword: - case _SolidityParser.Identifier: - case _SolidityParser.StringLiteralFragment: - { - this.state = 672; - this.simpleStatement(); - } - break; - case _SolidityParser.T__1: - { - this.state = 673; - this.match(_SolidityParser.T__1); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - this.state = 678; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__4: - case _SolidityParser.T__13: - case _SolidityParser.T__22: - case _SolidityParser.T__24: - case _SolidityParser.T__33: - case _SolidityParser.T__35: - case _SolidityParser.T__41: - case _SolidityParser.T__53: - case _SolidityParser.T__54: - case _SolidityParser.T__55: - case _SolidityParser.T__56: - case _SolidityParser.T__57: - case _SolidityParser.T__58: - case _SolidityParser.T__59: - case _SolidityParser.T__60: - case _SolidityParser.T__62: - case _SolidityParser.T__63: - case _SolidityParser.T__64: - case _SolidityParser.T__65: - case _SolidityParser.T__66: - case _SolidityParser.T__95: - case _SolidityParser.Int: - case _SolidityParser.Uint: - case _SolidityParser.Byte: - case _SolidityParser.Fixed: - case _SolidityParser.Ufixed: - case _SolidityParser.BooleanLiteral: - case _SolidityParser.DecimalNumber: - case _SolidityParser.HexNumber: - case _SolidityParser.HexLiteralFragment: - case _SolidityParser.LeaveKeyword: - case _SolidityParser.PayableKeyword: - case _SolidityParser.TypeKeyword: - case _SolidityParser.ConstructorKeyword: - case _SolidityParser.ReceiveKeyword: - case _SolidityParser.Identifier: - case _SolidityParser.StringLiteralFragment: - { - this.state = 676; - this.expressionStatement(); - } - break; - case _SolidityParser.T__1: - { - this.state = 677; - this.match(_SolidityParser.T__1); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - this.state = 681; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - this.state = 680; - this.expression(0); - } - } - this.state = 683; - this.match(_SolidityParser.T__23); - this.state = 684; - this.statement(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - inlineAssemblyStatement() { - let _localctx = new InlineAssemblyStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 104, _SolidityParser.RULE_inlineAssemblyStatement); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 686; - this.match(_SolidityParser.T__48); - this.state = 688; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.StringLiteralFragment) { - { - this.state = 687; - this.match(_SolidityParser.StringLiteralFragment); - } - } - this.state = 690; - this.assemblyBlock(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - doWhileStatement() { - let _localctx = new DoWhileStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 106, _SolidityParser.RULE_doWhileStatement); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 692; - this.match(_SolidityParser.T__49); - this.state = 693; - this.statement(); - this.state = 694; - this.match(_SolidityParser.T__46); - this.state = 695; - this.match(_SolidityParser.T__22); - this.state = 696; - this.expression(0); - this.state = 697; - this.match(_SolidityParser.T__23); - this.state = 698; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - continueStatement() { - let _localctx = new ContinueStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 108, _SolidityParser.RULE_continueStatement); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 700; - this.match(_SolidityParser.ContinueKeyword); - this.state = 701; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - breakStatement() { - let _localctx = new BreakStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 110, _SolidityParser.RULE_breakStatement); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 703; - this.match(_SolidityParser.BreakKeyword); - this.state = 704; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - returnStatement() { - let _localctx = new ReturnStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 112, _SolidityParser.RULE_returnStatement); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 706; - this.match(_SolidityParser.T__50); - this.state = 708; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - this.state = 707; - this.expression(0); - } - } - this.state = 710; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - throwStatement() { - let _localctx = new ThrowStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 114, _SolidityParser.RULE_throwStatement); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 712; - this.match(_SolidityParser.T__51); - this.state = 713; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - emitStatement() { - let _localctx = new EmitStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 116, _SolidityParser.RULE_emitStatement); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 715; - this.match(_SolidityParser.T__52); - this.state = 716; - this.functionCall(); - this.state = 717; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - revertStatement() { - let _localctx = new RevertStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 118, _SolidityParser.RULE_revertStatement); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 719; - this.match(_SolidityParser.T__53); - this.state = 720; - this.functionCall(); - this.state = 721; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - variableDeclarationStatement() { - let _localctx = new VariableDeclarationStatementContext(this._ctx, this.state); - this.enterRule(_localctx, 120, _SolidityParser.RULE_variableDeclarationStatement); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 730; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 75, this._ctx)) { - case 1: - { - this.state = 723; - this.match(_SolidityParser.T__54); - this.state = 724; - this.identifierList(); - } - break; - case 2: - { - this.state = 725; - this.variableDeclaration(); - } - break; - case 3: - { - this.state = 726; - this.match(_SolidityParser.T__22); - this.state = 727; - this.variableDeclarationList(); - this.state = 728; - this.match(_SolidityParser.T__23); - } - break; - } - this.state = 734; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__9) { - { - this.state = 732; - this.match(_SolidityParser.T__9); - this.state = 733; - this.expression(0); - } - } - this.state = 736; - this.match(_SolidityParser.T__1); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - variableDeclarationList() { - let _localctx = new VariableDeclarationListContext(this._ctx, this.state); - this.enterRule(_localctx, 122, _SolidityParser.RULE_variableDeclarationList); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 739; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__24 | 1 << _SolidityParser.T__29)) !== 0 || (_la - 36 & ~31) === 0 && (1 << _la - 36 & (1 << _SolidityParser.T__35 - 36 | 1 << _SolidityParser.T__37 - 36 | 1 << _SolidityParser.T__41 - 36 | 1 << _SolidityParser.T__53 - 36 | 1 << _SolidityParser.T__54 - 36 | 1 << _SolidityParser.T__55 - 36 | 1 << _SolidityParser.T__56 - 36 | 1 << _SolidityParser.T__57 - 36)) !== 0 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.Int - 96 | 1 << _SolidityParser.Uint - 96 | 1 << _SolidityParser.Byte - 96 | 1 << _SolidityParser.Fixed - 96 | 1 << _SolidityParser.Ufixed - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 738; - this.variableDeclaration(); - } - } - this.state = 747; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 741; - this.match(_SolidityParser.T__15); - this.state = 743; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__24 | 1 << _SolidityParser.T__29)) !== 0 || (_la - 36 & ~31) === 0 && (1 << _la - 36 & (1 << _SolidityParser.T__35 - 36 | 1 << _SolidityParser.T__37 - 36 | 1 << _SolidityParser.T__41 - 36 | 1 << _SolidityParser.T__53 - 36 | 1 << _SolidityParser.T__54 - 36 | 1 << _SolidityParser.T__55 - 36 | 1 << _SolidityParser.T__56 - 36 | 1 << _SolidityParser.T__57 - 36)) !== 0 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.Int - 96 | 1 << _SolidityParser.Uint - 96 | 1 << _SolidityParser.Byte - 96 | 1 << _SolidityParser.Fixed - 96 | 1 << _SolidityParser.Ufixed - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 742; - this.variableDeclaration(); - } - } - } - } - this.state = 749; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - identifierList() { - let _localctx = new IdentifierListContext(this._ctx, this.state); - this.enterRule(_localctx, 124, _SolidityParser.RULE_identifierList); - let _la; - try { - let _alt; - this.enterOuterAlt(_localctx, 1); - { - this.state = 750; - this.match(_SolidityParser.T__22); - this.state = 757; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 81, this._ctx); - while (_alt !== 2 && _alt !== import_ATN.ATN.INVALID_ALT_NUMBER) { - if (_alt === 1) { - { - { - this.state = 752; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 751; - this.identifier(); - } - } - this.state = 754; - this.match(_SolidityParser.T__15); - } - } - } - this.state = 759; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 81, this._ctx); - } - this.state = 761; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 760; - this.identifier(); - } - } - this.state = 763; - this.match(_SolidityParser.T__23); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - elementaryTypeName() { - let _localctx = new ElementaryTypeNameContext(this._ctx, this.state); - this.enterRule(_localctx, 126, _SolidityParser.RULE_elementaryTypeName); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 765; - _la = this._input.LA(1); - if (!((_la - 36 & ~31) === 0 && (1 << _la - 36 & (1 << _SolidityParser.T__35 - 36 | 1 << _SolidityParser.T__54 - 36 | 1 << _SolidityParser.T__55 - 36 | 1 << _SolidityParser.T__56 - 36 | 1 << _SolidityParser.T__57 - 36)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98)) !== 0)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - expression(_p) { - if (_p === void 0) { - _p = 0; - } - let _parentctx = this._ctx; - let _parentState = this.state; - let _localctx = new ExpressionContext(this._ctx, _parentState); - let _prevctx = _localctx; - let _startState = 128; - this.enterRecursionRule(_localctx, 128, _SolidityParser.RULE_expression, _p); - let _la; - try { - let _alt; - this.enterOuterAlt(_localctx, 1); - { - this.state = 785; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 83, this._ctx)) { - case 1: - { - this.state = 768; - this.match(_SolidityParser.T__60); - this.state = 769; - this.typeName(0); - } - break; - case 2: - { - this.state = 770; - this.match(_SolidityParser.T__22); - this.state = 771; - this.expression(0); - this.state = 772; - this.match(_SolidityParser.T__23); - } - break; - case 3: - { - this.state = 774; - _la = this._input.LA(1); - if (!(_la === _SolidityParser.T__58 || _la === _SolidityParser.T__59)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - this.state = 775; - this.expression(19); - } - break; - case 4: - { - this.state = 776; - _la = this._input.LA(1); - if (!(_la === _SolidityParser.T__62 || _la === _SolidityParser.T__63)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - this.state = 777; - this.expression(18); - } - break; - case 5: - { - this.state = 778; - _la = this._input.LA(1); - if (!(_la === _SolidityParser.T__64 || _la === _SolidityParser.T__65)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - this.state = 779; - this.expression(17); - } - break; - case 6: - { - this.state = 780; - this.match(_SolidityParser.T__66); - this.state = 781; - this.expression(16); - } - break; - case 7: - { - this.state = 782; - this.match(_SolidityParser.T__4); - this.state = 783; - this.expression(15); - } - break; - case 8: - { - this.state = 784; - this.primaryExpression(); - } - break; - } - this._ctx._stop = this._input.tryLT(-1); - this.state = 861; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 87, this._ctx); - while (_alt !== 2 && _alt !== import_ATN.ATN.INVALID_ALT_NUMBER) { - if (_alt === 1) { - if (this._parseListeners != null) { - this.triggerExitRuleEvent(); - } - _prevctx = _localctx; - { - this.state = 859; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 86, this._ctx)) { - case 1: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 787; - if (!this.precpred(this._ctx, 14)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 14)"); - } - this.state = 788; - this.match(_SolidityParser.T__67); - this.state = 789; - this.expression(15); - } - break; - case 2: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 790; - if (!this.precpred(this._ctx, 13)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 13)"); - } - this.state = 791; - _la = this._input.LA(1); - if (!(_la === _SolidityParser.T__12 || _la === _SolidityParser.T__68 || _la === _SolidityParser.T__69)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - this.state = 792; - this.expression(14); - } - break; - case 3: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 793; - if (!this.precpred(this._ctx, 12)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 12)"); - } - this.state = 794; - _la = this._input.LA(1); - if (!(_la === _SolidityParser.T__62 || _la === _SolidityParser.T__63)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - this.state = 795; - this.expression(13); - } - break; - case 4: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 796; - if (!this.precpred(this._ctx, 11)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 11)"); - } - this.state = 797; - _la = this._input.LA(1); - if (!(_la === _SolidityParser.T__70 || _la === _SolidityParser.T__71)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - this.state = 798; - this.expression(12); - } - break; - case 5: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 799; - if (!this.precpred(this._ctx, 10)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 10)"); - } - this.state = 800; - this.match(_SolidityParser.T__72); - this.state = 801; - this.expression(11); - } - break; - case 6: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 802; - if (!this.precpred(this._ctx, 9)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 9)"); - } - this.state = 803; - this.match(_SolidityParser.T__3); - this.state = 804; - this.expression(10); - } - break; - case 7: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 805; - if (!this.precpred(this._ctx, 8)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 8)"); - } - this.state = 806; - this.match(_SolidityParser.T__73); - this.state = 807; - this.expression(9); - } - break; - case 8: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 808; - if (!this.precpred(this._ctx, 7)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 7)"); - } - this.state = 809; - _la = this._input.LA(1); - if (!((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__5 | 1 << _SolidityParser.T__6 | 1 << _SolidityParser.T__7 | 1 << _SolidityParser.T__8)) !== 0)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - this.state = 810; - this.expression(8); - } - break; - case 9: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 811; - if (!this.precpred(this._ctx, 6)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 6)"); - } - this.state = 812; - _la = this._input.LA(1); - if (!(_la === _SolidityParser.T__74 || _la === _SolidityParser.T__75)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - this.state = 813; - this.expression(7); - } - break; - case 10: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 814; - if (!this.precpred(this._ctx, 5)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 5)"); - } - this.state = 815; - this.match(_SolidityParser.T__76); - this.state = 816; - this.expression(6); - } - break; - case 11: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 817; - if (!this.precpred(this._ctx, 4)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 4)"); - } - this.state = 818; - this.match(_SolidityParser.T__2); - this.state = 819; - this.expression(5); - } - break; - case 12: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 820; - if (!this.precpred(this._ctx, 3)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 3)"); - } - this.state = 821; - this.match(_SolidityParser.T__77); - this.state = 822; - this.expression(0); - this.state = 823; - this.match(_SolidityParser.T__61); - this.state = 824; - this.expression(4); - } - break; - case 13: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 826; - if (!this.precpred(this._ctx, 2)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 2)"); - } - this.state = 827; - _la = this._input.LA(1); - if (!(_la === _SolidityParser.T__9 || (_la - 79 & ~31) === 0 && (1 << _la - 79 & (1 << _SolidityParser.T__78 - 79 | 1 << _SolidityParser.T__79 - 79 | 1 << _SolidityParser.T__80 - 79 | 1 << _SolidityParser.T__81 - 79 | 1 << _SolidityParser.T__82 - 79 | 1 << _SolidityParser.T__83 - 79 | 1 << _SolidityParser.T__84 - 79 | 1 << _SolidityParser.T__85 - 79 | 1 << _SolidityParser.T__86 - 79 | 1 << _SolidityParser.T__87 - 79)) !== 0)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - this.state = 828; - this.expression(3); - } - break; - case 14: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 829; - if (!this.precpred(this._ctx, 27)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 27)"); - } - this.state = 830; - _la = this._input.LA(1); - if (!(_la === _SolidityParser.T__58 || _la === _SolidityParser.T__59)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - } - break; - case 15: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 831; - if (!this.precpred(this._ctx, 25)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 25)"); - } - this.state = 832; - this.match(_SolidityParser.T__33); - this.state = 833; - this.expression(0); - this.state = 834; - this.match(_SolidityParser.T__34); - } - break; - case 16: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 836; - if (!this.precpred(this._ctx, 24)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 24)"); - } - this.state = 837; - this.match(_SolidityParser.T__33); - this.state = 839; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - this.state = 838; - this.expression(0); - } - } - this.state = 841; - this.match(_SolidityParser.T__61); - this.state = 843; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - this.state = 842; - this.expression(0); - } - } - this.state = 845; - this.match(_SolidityParser.T__34); - } - break; - case 17: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 846; - if (!this.precpred(this._ctx, 23)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 23)"); - } - this.state = 847; - this.match(_SolidityParser.T__36); - this.state = 848; - this.identifier(); - } - break; - case 18: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 849; - if (!this.precpred(this._ctx, 22)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 22)"); - } - this.state = 850; - this.match(_SolidityParser.T__14); - this.state = 851; - this.nameValueList(); - this.state = 852; - this.match(_SolidityParser.T__16); - } - break; - case 19: - { - _localctx = new ExpressionContext(_parentctx, _parentState); - this.pushNewRecursionContext(_localctx, _startState, _SolidityParser.RULE_expression); - this.state = 854; - if (!this.precpred(this._ctx, 21)) { - throw this.createFailedPredicateException("this.precpred(this._ctx, 21)"); - } - this.state = 855; - this.match(_SolidityParser.T__22); - this.state = 856; - this.functionCallArguments(); - this.state = 857; - this.match(_SolidityParser.T__23); - } - break; - } - } - } - this.state = 863; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 87, this._ctx); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.unrollRecursionContexts(_parentctx); - } - return _localctx; - } - primaryExpression() { - let _localctx = new PrimaryExpressionContext(this._ctx, this.state); - this.enterRule(_localctx, 130, _SolidityParser.RULE_primaryExpression); - try { - this.state = 881; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 90, this._ctx)) { - case 1: - this.enterOuterAlt(_localctx, 1); - { - this.state = 864; - this.match(_SolidityParser.BooleanLiteral); - } - break; - case 2: - this.enterOuterAlt(_localctx, 2); - { - this.state = 865; - this.numberLiteral(); - } - break; - case 3: - this.enterOuterAlt(_localctx, 3); - { - this.state = 866; - this.hexLiteral(); - } - break; - case 4: - this.enterOuterAlt(_localctx, 4); - { - this.state = 867; - this.stringLiteral(); - } - break; - case 5: - this.enterOuterAlt(_localctx, 5); - { - this.state = 868; - this.identifier(); - this.state = 871; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 88, this._ctx)) { - case 1: - { - this.state = 869; - this.match(_SolidityParser.T__33); - this.state = 870; - this.match(_SolidityParser.T__34); - } - break; - } - } - break; - case 6: - this.enterOuterAlt(_localctx, 6); - { - this.state = 873; - this.match(_SolidityParser.TypeKeyword); - } - break; - case 7: - this.enterOuterAlt(_localctx, 7); - { - this.state = 874; - this.match(_SolidityParser.PayableKeyword); - } - break; - case 8: - this.enterOuterAlt(_localctx, 8); - { - this.state = 875; - this.tupleExpression(); - } - break; - case 9: - this.enterOuterAlt(_localctx, 9); - { - this.state = 876; - this.typeNameExpression(); - this.state = 879; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 89, this._ctx)) { - case 1: - { - this.state = 877; - this.match(_SolidityParser.T__33); - this.state = 878; - this.match(_SolidityParser.T__34); - } - break; - } - } - break; - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - expressionList() { - let _localctx = new ExpressionListContext(this._ctx, this.state); - this.enterRule(_localctx, 132, _SolidityParser.RULE_expressionList); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 883; - this.expression(0); - this.state = 888; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 884; - this.match(_SolidityParser.T__15); - this.state = 885; - this.expression(0); - } - } - this.state = 890; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - nameValueList() { - let _localctx = new NameValueListContext(this._ctx, this.state); - this.enterRule(_localctx, 134, _SolidityParser.RULE_nameValueList); - let _la; - try { - let _alt; - this.enterOuterAlt(_localctx, 1); - { - this.state = 891; - this.nameValue(); - this.state = 896; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 92, this._ctx); - while (_alt !== 2 && _alt !== import_ATN.ATN.INVALID_ALT_NUMBER) { - if (_alt === 1) { - { - { - this.state = 892; - this.match(_SolidityParser.T__15); - this.state = 893; - this.nameValue(); - } - } - } - this.state = 898; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 92, this._ctx); - } - this.state = 900; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__15) { - { - this.state = 899; - this.match(_SolidityParser.T__15); - } - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - nameValue() { - let _localctx = new NameValueContext(this._ctx, this.state); - this.enterRule(_localctx, 136, _SolidityParser.RULE_nameValue); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 902; - this.identifier(); - this.state = 903; - this.match(_SolidityParser.T__61); - this.state = 904; - this.expression(0); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - functionCallArguments() { - let _localctx = new FunctionCallArgumentsContext(this._ctx, this.state); - this.enterRule(_localctx, 138, _SolidityParser.RULE_functionCallArguments); - let _la; - try { - this.state = 914; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__14: - this.enterOuterAlt(_localctx, 1); - { - this.state = 906; - this.match(_SolidityParser.T__14); - this.state = 908; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 907; - this.nameValueList(); - } - } - this.state = 910; - this.match(_SolidityParser.T__16); - } - break; - case _SolidityParser.T__4: - case _SolidityParser.T__13: - case _SolidityParser.T__22: - case _SolidityParser.T__23: - case _SolidityParser.T__24: - case _SolidityParser.T__33: - case _SolidityParser.T__35: - case _SolidityParser.T__41: - case _SolidityParser.T__53: - case _SolidityParser.T__54: - case _SolidityParser.T__55: - case _SolidityParser.T__56: - case _SolidityParser.T__57: - case _SolidityParser.T__58: - case _SolidityParser.T__59: - case _SolidityParser.T__60: - case _SolidityParser.T__62: - case _SolidityParser.T__63: - case _SolidityParser.T__64: - case _SolidityParser.T__65: - case _SolidityParser.T__66: - case _SolidityParser.T__95: - case _SolidityParser.Int: - case _SolidityParser.Uint: - case _SolidityParser.Byte: - case _SolidityParser.Fixed: - case _SolidityParser.Ufixed: - case _SolidityParser.BooleanLiteral: - case _SolidityParser.DecimalNumber: - case _SolidityParser.HexNumber: - case _SolidityParser.HexLiteralFragment: - case _SolidityParser.LeaveKeyword: - case _SolidityParser.PayableKeyword: - case _SolidityParser.TypeKeyword: - case _SolidityParser.ConstructorKeyword: - case _SolidityParser.ReceiveKeyword: - case _SolidityParser.Identifier: - case _SolidityParser.StringLiteralFragment: - this.enterOuterAlt(_localctx, 2); - { - this.state = 912; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - this.state = 911; - this.expressionList(); - } - } - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - functionCall() { - let _localctx = new FunctionCallContext(this._ctx, this.state); - this.enterRule(_localctx, 140, _SolidityParser.RULE_functionCall); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 916; - this.expression(0); - this.state = 917; - this.match(_SolidityParser.T__22); - this.state = 918; - this.functionCallArguments(); - this.state = 919; - this.match(_SolidityParser.T__23); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyBlock() { - let _localctx = new AssemblyBlockContext(this._ctx, this.state); - this.enterRule(_localctx, 142, _SolidityParser.RULE_assemblyBlock); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 921; - this.match(_SolidityParser.T__14); - this.state = 925; - this._errHandler.sync(this); - _la = this._input.LA(1); - while ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__14 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24 | 1 << _SolidityParser.T__26 | 1 << _SolidityParser.T__29)) !== 0 || (_la - 36 & ~31) === 0 && (1 << _la - 36 & (1 << _SolidityParser.T__35 - 36 | 1 << _SolidityParser.T__41 - 36 | 1 << _SolidityParser.T__42 - 36 | 1 << _SolidityParser.T__48 - 36 | 1 << _SolidityParser.T__50 - 36 | 1 << _SolidityParser.T__53 - 36 | 1 << _SolidityParser.T__57 - 36)) !== 0 || (_la - 89 & ~31) === 0 && (1 << _la - 89 & (1 << _SolidityParser.T__88 - 89 | 1 << _SolidityParser.T__90 - 89 | 1 << _SolidityParser.T__91 - 89 | 1 << _SolidityParser.T__95 - 89 | 1 << _SolidityParser.DecimalNumber - 89 | 1 << _SolidityParser.HexNumber - 89 | 1 << _SolidityParser.HexLiteralFragment - 89 | 1 << _SolidityParser.BreakKeyword - 89 | 1 << _SolidityParser.ContinueKeyword - 89 | 1 << _SolidityParser.LeaveKeyword - 89 | 1 << _SolidityParser.PayableKeyword - 89)) !== 0 || (_la - 125 & ~31) === 0 && (1 << _la - 125 & (1 << _SolidityParser.ConstructorKeyword - 125 | 1 << _SolidityParser.ReceiveKeyword - 125 | 1 << _SolidityParser.Identifier - 125 | 1 << _SolidityParser.StringLiteralFragment - 125)) !== 0) { - { - { - this.state = 922; - this.assemblyItem(); - } - } - this.state = 927; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 928; - this.match(_SolidityParser.T__16); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyItem() { - let _localctx = new AssemblyItemContext(this._ctx, this.state); - this.enterRule(_localctx, 144, _SolidityParser.RULE_assemblyItem); - try { - this.state = 948; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 98, this._ctx)) { - case 1: - this.enterOuterAlt(_localctx, 1); - { - this.state = 930; - this.identifier(); - } - break; - case 2: - this.enterOuterAlt(_localctx, 2); - { - this.state = 931; - this.assemblyBlock(); - } - break; - case 3: - this.enterOuterAlt(_localctx, 3); - { - this.state = 932; - this.assemblyExpression(); - } - break; - case 4: - this.enterOuterAlt(_localctx, 4); - { - this.state = 933; - this.assemblyLocalDefinition(); - } - break; - case 5: - this.enterOuterAlt(_localctx, 5); - { - this.state = 934; - this.assemblyAssignment(); - } - break; - case 6: - this.enterOuterAlt(_localctx, 6); - { - this.state = 935; - this.assemblyStackAssignment(); - } - break; - case 7: - this.enterOuterAlt(_localctx, 7); - { - this.state = 936; - this.labelDefinition(); - } - break; - case 8: - this.enterOuterAlt(_localctx, 8); - { - this.state = 937; - this.assemblySwitch(); - } - break; - case 9: - this.enterOuterAlt(_localctx, 9); - { - this.state = 938; - this.assemblyFunctionDefinition(); - } - break; - case 10: - this.enterOuterAlt(_localctx, 10); - { - this.state = 939; - this.assemblyFor(); - } - break; - case 11: - this.enterOuterAlt(_localctx, 11); - { - this.state = 940; - this.assemblyIf(); - } - break; - case 12: - this.enterOuterAlt(_localctx, 12); - { - this.state = 941; - this.match(_SolidityParser.BreakKeyword); - } - break; - case 13: - this.enterOuterAlt(_localctx, 13); - { - this.state = 942; - this.match(_SolidityParser.ContinueKeyword); - } - break; - case 14: - this.enterOuterAlt(_localctx, 14); - { - this.state = 943; - this.match(_SolidityParser.LeaveKeyword); - } - break; - case 15: - this.enterOuterAlt(_localctx, 15); - { - this.state = 944; - this.subAssembly(); - } - break; - case 16: - this.enterOuterAlt(_localctx, 16); - { - this.state = 945; - this.numberLiteral(); - } - break; - case 17: - this.enterOuterAlt(_localctx, 17); - { - this.state = 946; - this.stringLiteral(); - } - break; - case 18: - this.enterOuterAlt(_localctx, 18); - { - this.state = 947; - this.hexLiteral(); - } - break; - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyExpression() { - let _localctx = new AssemblyExpressionContext(this._ctx, this.state); - this.enterRule(_localctx, 146, _SolidityParser.RULE_assemblyExpression); - try { - this.state = 953; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 99, this._ctx)) { - case 1: - this.enterOuterAlt(_localctx, 1); - { - this.state = 950; - this.assemblyCall(); - } - break; - case 2: - this.enterOuterAlt(_localctx, 2); - { - this.state = 951; - this.assemblyLiteral(); - } - break; - case 3: - this.enterOuterAlt(_localctx, 3); - { - this.state = 952; - this.assemblyMember(); - } - break; - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyMember() { - let _localctx = new AssemblyMemberContext(this._ctx, this.state); - this.enterRule(_localctx, 148, _SolidityParser.RULE_assemblyMember); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 955; - this.identifier(); - this.state = 956; - this.match(_SolidityParser.T__36); - this.state = 957; - this.identifier(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyCall() { - let _localctx = new AssemblyCallContext(this._ctx, this.state); - this.enterRule(_localctx, 150, _SolidityParser.RULE_assemblyCall); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 963; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__50: - { - this.state = 959; - this.match(_SolidityParser.T__50); - } - break; - case _SolidityParser.T__35: - { - this.state = 960; - this.match(_SolidityParser.T__35); - } - break; - case _SolidityParser.T__57: - { - this.state = 961; - this.match(_SolidityParser.T__57); - } - break; - case _SolidityParser.T__13: - case _SolidityParser.T__24: - case _SolidityParser.T__41: - case _SolidityParser.T__53: - case _SolidityParser.T__95: - case _SolidityParser.LeaveKeyword: - case _SolidityParser.PayableKeyword: - case _SolidityParser.ConstructorKeyword: - case _SolidityParser.ReceiveKeyword: - case _SolidityParser.Identifier: - { - this.state = 962; - this.identifier(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - this.state = 977; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 103, this._ctx)) { - case 1: - { - this.state = 965; - this.match(_SolidityParser.T__22); - this.state = 967; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || (_la - 36 & ~31) === 0 && (1 << _la - 36 & (1 << _SolidityParser.T__35 - 36 | 1 << _SolidityParser.T__41 - 36 | 1 << _SolidityParser.T__50 - 36 | 1 << _SolidityParser.T__53 - 36 | 1 << _SolidityParser.T__57 - 36)) !== 0 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.DecimalNumber - 96 | 1 << _SolidityParser.HexNumber - 96 | 1 << _SolidityParser.HexLiteralFragment - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier || _la === _SolidityParser.StringLiteralFragment) { - { - this.state = 966; - this.assemblyExpression(); - } - } - this.state = 973; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 969; - this.match(_SolidityParser.T__15); - this.state = 970; - this.assemblyExpression(); - } - } - this.state = 975; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 976; - this.match(_SolidityParser.T__23); - } - break; - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyLocalDefinition() { - let _localctx = new AssemblyLocalDefinitionContext(this._ctx, this.state); - this.enterRule(_localctx, 152, _SolidityParser.RULE_assemblyLocalDefinition); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 979; - this.match(_SolidityParser.T__88); - this.state = 980; - this.assemblyIdentifierOrList(); - this.state = 983; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__89) { - { - this.state = 981; - this.match(_SolidityParser.T__89); - this.state = 982; - this.assemblyExpression(); - } - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyAssignment() { - let _localctx = new AssemblyAssignmentContext(this._ctx, this.state); - this.enterRule(_localctx, 154, _SolidityParser.RULE_assemblyAssignment); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 985; - this.assemblyIdentifierOrList(); - this.state = 986; - this.match(_SolidityParser.T__89); - this.state = 987; - this.assemblyExpression(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyIdentifierOrList() { - let _localctx = new AssemblyIdentifierOrListContext(this._ctx, this.state); - this.enterRule(_localctx, 156, _SolidityParser.RULE_assemblyIdentifierOrList); - try { - this.state = 995; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 105, this._ctx)) { - case 1: - this.enterOuterAlt(_localctx, 1); - { - this.state = 989; - this.identifier(); - } - break; - case 2: - this.enterOuterAlt(_localctx, 2); - { - this.state = 990; - this.assemblyMember(); - } - break; - case 3: - this.enterOuterAlt(_localctx, 3); - { - this.state = 991; - this.match(_SolidityParser.T__22); - this.state = 992; - this.assemblyIdentifierList(); - this.state = 993; - this.match(_SolidityParser.T__23); - } - break; - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyIdentifierList() { - let _localctx = new AssemblyIdentifierListContext(this._ctx, this.state); - this.enterRule(_localctx, 158, _SolidityParser.RULE_assemblyIdentifierList); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 997; - this.identifier(); - this.state = 1002; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 998; - this.match(_SolidityParser.T__15); - this.state = 999; - this.identifier(); - } - } - this.state = 1004; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyStackAssignment() { - let _localctx = new AssemblyStackAssignmentContext(this._ctx, this.state); - this.enterRule(_localctx, 160, _SolidityParser.RULE_assemblyStackAssignment); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 1005; - this.match(_SolidityParser.T__90); - this.state = 1006; - this.identifier(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - labelDefinition() { - let _localctx = new LabelDefinitionContext(this._ctx, this.state); - this.enterRule(_localctx, 162, _SolidityParser.RULE_labelDefinition); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 1008; - this.identifier(); - this.state = 1009; - this.match(_SolidityParser.T__61); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblySwitch() { - let _localctx = new AssemblySwitchContext(this._ctx, this.state); - this.enterRule(_localctx, 164, _SolidityParser.RULE_assemblySwitch); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 1011; - this.match(_SolidityParser.T__91); - this.state = 1012; - this.assemblyExpression(); - this.state = 1016; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__92 || _la === _SolidityParser.T__93) { - { - { - this.state = 1013; - this.assemblyCase(); - } - } - this.state = 1018; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyCase() { - let _localctx = new AssemblyCaseContext(this._ctx, this.state); - this.enterRule(_localctx, 166, _SolidityParser.RULE_assemblyCase); - try { - this.state = 1025; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__92: - this.enterOuterAlt(_localctx, 1); - { - this.state = 1019; - this.match(_SolidityParser.T__92); - this.state = 1020; - this.assemblyLiteral(); - this.state = 1021; - this.assemblyBlock(); - } - break; - case _SolidityParser.T__93: - this.enterOuterAlt(_localctx, 2); - { - this.state = 1023; - this.match(_SolidityParser.T__93); - this.state = 1024; - this.assemblyBlock(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyFunctionDefinition() { - let _localctx = new AssemblyFunctionDefinitionContext(this._ctx, this.state); - this.enterRule(_localctx, 168, _SolidityParser.RULE_assemblyFunctionDefinition); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 1027; - this.match(_SolidityParser.T__29); - this.state = 1028; - this.identifier(); - this.state = 1029; - this.match(_SolidityParser.T__22); - this.state = 1031; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier) { - { - this.state = 1030; - this.assemblyIdentifierList(); - } - } - this.state = 1033; - this.match(_SolidityParser.T__23); - this.state = 1035; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__94) { - { - this.state = 1034; - this.assemblyFunctionReturns(); - } - } - this.state = 1037; - this.assemblyBlock(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyFunctionReturns() { - let _localctx = new AssemblyFunctionReturnsContext(this._ctx, this.state); - this.enterRule(_localctx, 170, _SolidityParser.RULE_assemblyFunctionReturns); - try { - this.enterOuterAlt(_localctx, 1); - { - { - this.state = 1039; - this.match(_SolidityParser.T__94); - this.state = 1040; - this.assemblyIdentifierList(); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyFor() { - let _localctx = new AssemblyForContext(this._ctx, this.state); - this.enterRule(_localctx, 172, _SolidityParser.RULE_assemblyFor); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 1042; - this.match(_SolidityParser.T__26); - this.state = 1045; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__14: - { - this.state = 1043; - this.assemblyBlock(); - } - break; - case _SolidityParser.T__13: - case _SolidityParser.T__24: - case _SolidityParser.T__35: - case _SolidityParser.T__41: - case _SolidityParser.T__50: - case _SolidityParser.T__53: - case _SolidityParser.T__57: - case _SolidityParser.T__95: - case _SolidityParser.DecimalNumber: - case _SolidityParser.HexNumber: - case _SolidityParser.HexLiteralFragment: - case _SolidityParser.LeaveKeyword: - case _SolidityParser.PayableKeyword: - case _SolidityParser.ConstructorKeyword: - case _SolidityParser.ReceiveKeyword: - case _SolidityParser.Identifier: - case _SolidityParser.StringLiteralFragment: - { - this.state = 1044; - this.assemblyExpression(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - this.state = 1047; - this.assemblyExpression(); - this.state = 1050; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__14: - { - this.state = 1048; - this.assemblyBlock(); - } - break; - case _SolidityParser.T__13: - case _SolidityParser.T__24: - case _SolidityParser.T__35: - case _SolidityParser.T__41: - case _SolidityParser.T__50: - case _SolidityParser.T__53: - case _SolidityParser.T__57: - case _SolidityParser.T__95: - case _SolidityParser.DecimalNumber: - case _SolidityParser.HexNumber: - case _SolidityParser.HexLiteralFragment: - case _SolidityParser.LeaveKeyword: - case _SolidityParser.PayableKeyword: - case _SolidityParser.ConstructorKeyword: - case _SolidityParser.ReceiveKeyword: - case _SolidityParser.Identifier: - case _SolidityParser.StringLiteralFragment: - { - this.state = 1049; - this.assemblyExpression(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - this.state = 1052; - this.assemblyBlock(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyIf() { - let _localctx = new AssemblyIfContext(this._ctx, this.state); - this.enterRule(_localctx, 174, _SolidityParser.RULE_assemblyIf); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 1054; - this.match(_SolidityParser.T__42); - this.state = 1055; - this.assemblyExpression(); - this.state = 1056; - this.assemblyBlock(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - assemblyLiteral() { - let _localctx = new AssemblyLiteralContext(this._ctx, this.state); - this.enterRule(_localctx, 176, _SolidityParser.RULE_assemblyLiteral); - try { - this.state = 1062; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.StringLiteralFragment: - this.enterOuterAlt(_localctx, 1); - { - this.state = 1058; - this.stringLiteral(); - } - break; - case _SolidityParser.DecimalNumber: - this.enterOuterAlt(_localctx, 2); - { - this.state = 1059; - this.match(_SolidityParser.DecimalNumber); - } - break; - case _SolidityParser.HexNumber: - this.enterOuterAlt(_localctx, 3); - { - this.state = 1060; - this.match(_SolidityParser.HexNumber); - } - break; - case _SolidityParser.HexLiteralFragment: - this.enterOuterAlt(_localctx, 4); - { - this.state = 1061; - this.hexLiteral(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - subAssembly() { - let _localctx = new SubAssemblyContext(this._ctx, this.state); - this.enterRule(_localctx, 178, _SolidityParser.RULE_subAssembly); - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 1064; - this.match(_SolidityParser.T__48); - this.state = 1065; - this.identifier(); - this.state = 1066; - this.assemblyBlock(); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - tupleExpression() { - let _localctx = new TupleExpressionContext(this._ctx, this.state); - this.enterRule(_localctx, 180, _SolidityParser.RULE_tupleExpression); - let _la; - try { - this.state = 1094; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__22: - this.enterOuterAlt(_localctx, 1); - { - this.state = 1068; - this.match(_SolidityParser.T__22); - { - this.state = 1070; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - this.state = 1069; - this.expression(0); - } - } - this.state = 1078; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 1072; - this.match(_SolidityParser.T__15); - this.state = 1074; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - this.state = 1073; - this.expression(0); - } - } - } - } - this.state = 1080; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - this.state = 1081; - this.match(_SolidityParser.T__23); - } - break; - case _SolidityParser.T__33: - this.enterOuterAlt(_localctx, 2); - { - this.state = 1082; - this.match(_SolidityParser.T__33); - this.state = 1091; - this._errHandler.sync(this); - _la = this._input.LA(1); - if ((_la & ~31) === 0 && (1 << _la & (1 << _SolidityParser.T__4 | 1 << _SolidityParser.T__13 | 1 << _SolidityParser.T__22 | 1 << _SolidityParser.T__24)) !== 0 || (_la - 34 & ~31) === 0 && (1 << _la - 34 & (1 << _SolidityParser.T__33 - 34 | 1 << _SolidityParser.T__35 - 34 | 1 << _SolidityParser.T__41 - 34 | 1 << _SolidityParser.T__53 - 34 | 1 << _SolidityParser.T__54 - 34 | 1 << _SolidityParser.T__55 - 34 | 1 << _SolidityParser.T__56 - 34 | 1 << _SolidityParser.T__57 - 34 | 1 << _SolidityParser.T__58 - 34 | 1 << _SolidityParser.T__59 - 34 | 1 << _SolidityParser.T__60 - 34 | 1 << _SolidityParser.T__62 - 34 | 1 << _SolidityParser.T__63 - 34 | 1 << _SolidityParser.T__64 - 34)) !== 0 || (_la - 66 & ~31) === 0 && (1 << _la - 66 & (1 << _SolidityParser.T__65 - 66 | 1 << _SolidityParser.T__66 - 66 | 1 << _SolidityParser.T__95 - 66)) !== 0 || (_la - 98 & ~31) === 0 && (1 << _la - 98 & (1 << _SolidityParser.Int - 98 | 1 << _SolidityParser.Uint - 98 | 1 << _SolidityParser.Byte - 98 | 1 << _SolidityParser.Fixed - 98 | 1 << _SolidityParser.Ufixed - 98 | 1 << _SolidityParser.BooleanLiteral - 98 | 1 << _SolidityParser.DecimalNumber - 98 | 1 << _SolidityParser.HexNumber - 98 | 1 << _SolidityParser.HexLiteralFragment - 98 | 1 << _SolidityParser.LeaveKeyword - 98 | 1 << _SolidityParser.PayableKeyword - 98 | 1 << _SolidityParser.TypeKeyword - 98 | 1 << _SolidityParser.ConstructorKeyword - 98 | 1 << _SolidityParser.ReceiveKeyword - 98 | 1 << _SolidityParser.Identifier - 98 | 1 << _SolidityParser.StringLiteralFragment - 98)) !== 0) { - { - this.state = 1083; - this.expression(0); - this.state = 1088; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 1084; - this.match(_SolidityParser.T__15); - this.state = 1085; - this.expression(0); - } - } - this.state = 1090; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - } - } - this.state = 1093; - this.match(_SolidityParser.T__34); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - typeNameExpression() { - let _localctx = new TypeNameExpressionContext(this._ctx, this.state); - this.enterRule(_localctx, 182, _SolidityParser.RULE_typeNameExpression); - try { - this.state = 1098; - this._errHandler.sync(this); - switch (this._input.LA(1)) { - case _SolidityParser.T__35: - case _SolidityParser.T__54: - case _SolidityParser.T__55: - case _SolidityParser.T__56: - case _SolidityParser.T__57: - case _SolidityParser.Int: - case _SolidityParser.Uint: - case _SolidityParser.Byte: - case _SolidityParser.Fixed: - case _SolidityParser.Ufixed: - this.enterOuterAlt(_localctx, 1); - { - this.state = 1096; - this.elementaryTypeName(); - } - break; - case _SolidityParser.T__13: - case _SolidityParser.T__24: - case _SolidityParser.T__41: - case _SolidityParser.T__53: - case _SolidityParser.T__95: - case _SolidityParser.LeaveKeyword: - case _SolidityParser.PayableKeyword: - case _SolidityParser.ConstructorKeyword: - case _SolidityParser.ReceiveKeyword: - case _SolidityParser.Identifier: - this.enterOuterAlt(_localctx, 2); - { - this.state = 1097; - this.userDefinedTypeName(); - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - numberLiteral() { - let _localctx = new NumberLiteralContext(this._ctx, this.state); - this.enterRule(_localctx, 184, _SolidityParser.RULE_numberLiteral); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 1100; - _la = this._input.LA(1); - if (!(_la === _SolidityParser.DecimalNumber || _la === _SolidityParser.HexNumber)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - this.state = 1102; - this._errHandler.sync(this); - switch (this.interpreter.adaptivePredict(this._input, 121, this._ctx)) { - case 1: - { - this.state = 1101; - this.match(_SolidityParser.NumberUnit); - } - break; - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - identifier() { - let _localctx = new IdentifierContext(this._ctx, this.state); - this.enterRule(_localctx, 186, _SolidityParser.RULE_identifier); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 1104; - _la = this._input.LA(1); - if (!(_la === _SolidityParser.T__13 || _la === _SolidityParser.T__24 || _la === _SolidityParser.T__41 || _la === _SolidityParser.T__53 || (_la - 96 & ~31) === 0 && (1 << _la - 96 & (1 << _SolidityParser.T__95 - 96 | 1 << _SolidityParser.LeaveKeyword - 96 | 1 << _SolidityParser.PayableKeyword - 96 | 1 << _SolidityParser.ConstructorKeyword - 96 | 1 << _SolidityParser.ReceiveKeyword - 96)) !== 0 || _la === _SolidityParser.Identifier)) { - this._errHandler.recoverInline(this); - } else { - if (this._input.LA(1) === import_Token.Token.EOF) { - this.matchedEOF = true; - } - this._errHandler.reportMatch(this); - this.consume(); - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - hexLiteral() { - let _localctx = new HexLiteralContext(this._ctx, this.state); - this.enterRule(_localctx, 188, _SolidityParser.RULE_hexLiteral); - try { - let _alt; - this.enterOuterAlt(_localctx, 1); - { - this.state = 1107; - this._errHandler.sync(this); - _alt = 1; - do { - switch (_alt) { - case 1: - { - { - this.state = 1106; - this.match(_SolidityParser.HexLiteralFragment); - } - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - this.state = 1109; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 122, this._ctx); - } while (_alt !== 2 && _alt !== import_ATN.ATN.INVALID_ALT_NUMBER); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - overrideSpecifier() { - let _localctx = new OverrideSpecifierContext(this._ctx, this.state); - this.enterRule(_localctx, 190, _SolidityParser.RULE_overrideSpecifier); - let _la; - try { - this.enterOuterAlt(_localctx, 1); - { - this.state = 1111; - this.match(_SolidityParser.T__96); - this.state = 1123; - this._errHandler.sync(this); - _la = this._input.LA(1); - if (_la === _SolidityParser.T__22) { - { - this.state = 1112; - this.match(_SolidityParser.T__22); - this.state = 1113; - this.userDefinedTypeName(); - this.state = 1118; - this._errHandler.sync(this); - _la = this._input.LA(1); - while (_la === _SolidityParser.T__15) { - { - { - this.state = 1114; - this.match(_SolidityParser.T__15); - this.state = 1115; - this.userDefinedTypeName(); - } - } - this.state = 1120; - this._errHandler.sync(this); - _la = this._input.LA(1); - } - this.state = 1121; - this.match(_SolidityParser.T__23); - } - } - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - stringLiteral() { - let _localctx = new StringLiteralContext(this._ctx, this.state); - this.enterRule(_localctx, 192, _SolidityParser.RULE_stringLiteral); - try { - let _alt; - this.enterOuterAlt(_localctx, 1); - { - this.state = 1126; - this._errHandler.sync(this); - _alt = 1; - do { - switch (_alt) { - case 1: - { - { - this.state = 1125; - this.match(_SolidityParser.StringLiteralFragment); - } - } - break; - default: - throw new import_NoViableAltException.NoViableAltException(this); - } - this.state = 1128; - this._errHandler.sync(this); - _alt = this.interpreter.adaptivePredict(this._input, 125, this._ctx); - } while (_alt !== 2 && _alt !== import_ATN.ATN.INVALID_ALT_NUMBER); - } - } catch (re) { - if (re instanceof import_RecognitionException.RecognitionException) { - _localctx.exception = re; - this._errHandler.reportError(this, re); - this._errHandler.recover(this, re); - } else { - throw re; - } - } finally { - this.exitRule(); - } - return _localctx; - } - sempred(_localctx, ruleIndex, predIndex) { - switch (ruleIndex) { - case 35: - return this.typeName_sempred(_localctx, predIndex); - case 64: - return this.expression_sempred(_localctx, predIndex); - } - return true; - } - typeName_sempred(_localctx, predIndex) { - switch (predIndex) { - case 0: - return this.precpred(this._ctx, 3); - } - return true; - } - expression_sempred(_localctx, predIndex) { - switch (predIndex) { - case 1: - return this.precpred(this._ctx, 14); - case 2: - return this.precpred(this._ctx, 13); - case 3: - return this.precpred(this._ctx, 12); - case 4: - return this.precpred(this._ctx, 11); - case 5: - return this.precpred(this._ctx, 10); - case 6: - return this.precpred(this._ctx, 9); - case 7: - return this.precpred(this._ctx, 8); - case 8: - return this.precpred(this._ctx, 7); - case 9: - return this.precpred(this._ctx, 6); - case 10: - return this.precpred(this._ctx, 5); - case 11: - return this.precpred(this._ctx, 4); - case 12: - return this.precpred(this._ctx, 3); - case 13: - return this.precpred(this._ctx, 2); - case 14: - return this.precpred(this._ctx, 27); - case 15: - return this.precpred(this._ctx, 25); - case 16: - return this.precpred(this._ctx, 24); - case 17: - return this.precpred(this._ctx, 23); - case 18: - return this.precpred(this._ctx, 22); - case 19: - return this.precpred(this._ctx, 21); - } - return true; - } - static get _ATN() { - if (!_SolidityParser.__ATN) { - _SolidityParser.__ATN = new import_ATNDeserializer2.ATNDeserializer().deserialize(Utils2.toCharArray(_SolidityParser._serializedATN)); - } - return _SolidityParser.__ATN; - } -}; -var SolidityParser = _SolidityParser; -SolidityParser.T__0 = 1; -SolidityParser.T__1 = 2; -SolidityParser.T__2 = 3; -SolidityParser.T__3 = 4; -SolidityParser.T__4 = 5; -SolidityParser.T__5 = 6; -SolidityParser.T__6 = 7; -SolidityParser.T__7 = 8; -SolidityParser.T__8 = 9; -SolidityParser.T__9 = 10; -SolidityParser.T__10 = 11; -SolidityParser.T__11 = 12; -SolidityParser.T__12 = 13; -SolidityParser.T__13 = 14; -SolidityParser.T__14 = 15; -SolidityParser.T__15 = 16; -SolidityParser.T__16 = 17; -SolidityParser.T__17 = 18; -SolidityParser.T__18 = 19; -SolidityParser.T__19 = 20; -SolidityParser.T__20 = 21; -SolidityParser.T__21 = 22; -SolidityParser.T__22 = 23; -SolidityParser.T__23 = 24; -SolidityParser.T__24 = 25; -SolidityParser.T__25 = 26; -SolidityParser.T__26 = 27; -SolidityParser.T__27 = 28; -SolidityParser.T__28 = 29; -SolidityParser.T__29 = 30; -SolidityParser.T__30 = 31; -SolidityParser.T__31 = 32; -SolidityParser.T__32 = 33; -SolidityParser.T__33 = 34; -SolidityParser.T__34 = 35; -SolidityParser.T__35 = 36; -SolidityParser.T__36 = 37; -SolidityParser.T__37 = 38; -SolidityParser.T__38 = 39; -SolidityParser.T__39 = 40; -SolidityParser.T__40 = 41; -SolidityParser.T__41 = 42; -SolidityParser.T__42 = 43; -SolidityParser.T__43 = 44; -SolidityParser.T__44 = 45; -SolidityParser.T__45 = 46; -SolidityParser.T__46 = 47; -SolidityParser.T__47 = 48; -SolidityParser.T__48 = 49; -SolidityParser.T__49 = 50; -SolidityParser.T__50 = 51; -SolidityParser.T__51 = 52; -SolidityParser.T__52 = 53; -SolidityParser.T__53 = 54; -SolidityParser.T__54 = 55; -SolidityParser.T__55 = 56; -SolidityParser.T__56 = 57; -SolidityParser.T__57 = 58; -SolidityParser.T__58 = 59; -SolidityParser.T__59 = 60; -SolidityParser.T__60 = 61; -SolidityParser.T__61 = 62; -SolidityParser.T__62 = 63; -SolidityParser.T__63 = 64; -SolidityParser.T__64 = 65; -SolidityParser.T__65 = 66; -SolidityParser.T__66 = 67; -SolidityParser.T__67 = 68; -SolidityParser.T__68 = 69; -SolidityParser.T__69 = 70; -SolidityParser.T__70 = 71; -SolidityParser.T__71 = 72; -SolidityParser.T__72 = 73; -SolidityParser.T__73 = 74; -SolidityParser.T__74 = 75; -SolidityParser.T__75 = 76; -SolidityParser.T__76 = 77; -SolidityParser.T__77 = 78; -SolidityParser.T__78 = 79; -SolidityParser.T__79 = 80; -SolidityParser.T__80 = 81; -SolidityParser.T__81 = 82; -SolidityParser.T__82 = 83; -SolidityParser.T__83 = 84; -SolidityParser.T__84 = 85; -SolidityParser.T__85 = 86; -SolidityParser.T__86 = 87; -SolidityParser.T__87 = 88; -SolidityParser.T__88 = 89; -SolidityParser.T__89 = 90; -SolidityParser.T__90 = 91; -SolidityParser.T__91 = 92; -SolidityParser.T__92 = 93; -SolidityParser.T__93 = 94; -SolidityParser.T__94 = 95; -SolidityParser.T__95 = 96; -SolidityParser.T__96 = 97; -SolidityParser.Int = 98; -SolidityParser.Uint = 99; -SolidityParser.Byte = 100; -SolidityParser.Fixed = 101; -SolidityParser.Ufixed = 102; -SolidityParser.BooleanLiteral = 103; -SolidityParser.DecimalNumber = 104; -SolidityParser.HexNumber = 105; -SolidityParser.NumberUnit = 106; -SolidityParser.HexLiteralFragment = 107; -SolidityParser.ReservedKeyword = 108; -SolidityParser.AnonymousKeyword = 109; -SolidityParser.BreakKeyword = 110; -SolidityParser.ConstantKeyword = 111; -SolidityParser.ImmutableKeyword = 112; -SolidityParser.ContinueKeyword = 113; -SolidityParser.LeaveKeyword = 114; -SolidityParser.ExternalKeyword = 115; -SolidityParser.IndexedKeyword = 116; -SolidityParser.InternalKeyword = 117; -SolidityParser.PayableKeyword = 118; -SolidityParser.PrivateKeyword = 119; -SolidityParser.PublicKeyword = 120; -SolidityParser.VirtualKeyword = 121; -SolidityParser.PureKeyword = 122; -SolidityParser.TypeKeyword = 123; -SolidityParser.ViewKeyword = 124; -SolidityParser.ConstructorKeyword = 125; -SolidityParser.FallbackKeyword = 126; -SolidityParser.ReceiveKeyword = 127; -SolidityParser.Identifier = 128; -SolidityParser.StringLiteralFragment = 129; -SolidityParser.VersionLiteral = 130; -SolidityParser.WS = 131; -SolidityParser.COMMENT = 132; -SolidityParser.LINE_COMMENT = 133; -SolidityParser.RULE_sourceUnit = 0; -SolidityParser.RULE_pragmaDirective = 1; -SolidityParser.RULE_pragmaName = 2; -SolidityParser.RULE_pragmaValue = 3; -SolidityParser.RULE_version = 4; -SolidityParser.RULE_versionOperator = 5; -SolidityParser.RULE_versionConstraint = 6; -SolidityParser.RULE_importDeclaration = 7; -SolidityParser.RULE_importDirective = 8; -SolidityParser.RULE_importPath = 9; -SolidityParser.RULE_contractDefinition = 10; -SolidityParser.RULE_inheritanceSpecifier = 11; -SolidityParser.RULE_contractPart = 12; -SolidityParser.RULE_stateVariableDeclaration = 13; -SolidityParser.RULE_fileLevelConstant = 14; -SolidityParser.RULE_customErrorDefinition = 15; -SolidityParser.RULE_typeDefinition = 16; -SolidityParser.RULE_usingForDeclaration = 17; -SolidityParser.RULE_structDefinition = 18; -SolidityParser.RULE_modifierDefinition = 19; -SolidityParser.RULE_modifierInvocation = 20; -SolidityParser.RULE_functionDefinition = 21; -SolidityParser.RULE_functionDescriptor = 22; -SolidityParser.RULE_returnParameters = 23; -SolidityParser.RULE_modifierList = 24; -SolidityParser.RULE_eventDefinition = 25; -SolidityParser.RULE_enumValue = 26; -SolidityParser.RULE_enumDefinition = 27; -SolidityParser.RULE_parameterList = 28; -SolidityParser.RULE_parameter = 29; -SolidityParser.RULE_eventParameterList = 30; -SolidityParser.RULE_eventParameter = 31; -SolidityParser.RULE_functionTypeParameterList = 32; -SolidityParser.RULE_functionTypeParameter = 33; -SolidityParser.RULE_variableDeclaration = 34; -SolidityParser.RULE_typeName = 35; -SolidityParser.RULE_userDefinedTypeName = 36; -SolidityParser.RULE_mappingKey = 37; -SolidityParser.RULE_mapping = 38; -SolidityParser.RULE_functionTypeName = 39; -SolidityParser.RULE_storageLocation = 40; -SolidityParser.RULE_stateMutability = 41; -SolidityParser.RULE_block = 42; -SolidityParser.RULE_statement = 43; -SolidityParser.RULE_expressionStatement = 44; -SolidityParser.RULE_ifStatement = 45; -SolidityParser.RULE_tryStatement = 46; -SolidityParser.RULE_catchClause = 47; -SolidityParser.RULE_whileStatement = 48; -SolidityParser.RULE_simpleStatement = 49; -SolidityParser.RULE_uncheckedStatement = 50; -SolidityParser.RULE_forStatement = 51; -SolidityParser.RULE_inlineAssemblyStatement = 52; -SolidityParser.RULE_doWhileStatement = 53; -SolidityParser.RULE_continueStatement = 54; -SolidityParser.RULE_breakStatement = 55; -SolidityParser.RULE_returnStatement = 56; -SolidityParser.RULE_throwStatement = 57; -SolidityParser.RULE_emitStatement = 58; -SolidityParser.RULE_revertStatement = 59; -SolidityParser.RULE_variableDeclarationStatement = 60; -SolidityParser.RULE_variableDeclarationList = 61; -SolidityParser.RULE_identifierList = 62; -SolidityParser.RULE_elementaryTypeName = 63; -SolidityParser.RULE_expression = 64; -SolidityParser.RULE_primaryExpression = 65; -SolidityParser.RULE_expressionList = 66; -SolidityParser.RULE_nameValueList = 67; -SolidityParser.RULE_nameValue = 68; -SolidityParser.RULE_functionCallArguments = 69; -SolidityParser.RULE_functionCall = 70; -SolidityParser.RULE_assemblyBlock = 71; -SolidityParser.RULE_assemblyItem = 72; -SolidityParser.RULE_assemblyExpression = 73; -SolidityParser.RULE_assemblyMember = 74; -SolidityParser.RULE_assemblyCall = 75; -SolidityParser.RULE_assemblyLocalDefinition = 76; -SolidityParser.RULE_assemblyAssignment = 77; -SolidityParser.RULE_assemblyIdentifierOrList = 78; -SolidityParser.RULE_assemblyIdentifierList = 79; -SolidityParser.RULE_assemblyStackAssignment = 80; -SolidityParser.RULE_labelDefinition = 81; -SolidityParser.RULE_assemblySwitch = 82; -SolidityParser.RULE_assemblyCase = 83; -SolidityParser.RULE_assemblyFunctionDefinition = 84; -SolidityParser.RULE_assemblyFunctionReturns = 85; -SolidityParser.RULE_assemblyFor = 86; -SolidityParser.RULE_assemblyIf = 87; -SolidityParser.RULE_assemblyLiteral = 88; -SolidityParser.RULE_subAssembly = 89; -SolidityParser.RULE_tupleExpression = 90; -SolidityParser.RULE_typeNameExpression = 91; -SolidityParser.RULE_numberLiteral = 92; -SolidityParser.RULE_identifier = 93; -SolidityParser.RULE_hexLiteral = 94; -SolidityParser.RULE_overrideSpecifier = 95; -SolidityParser.RULE_stringLiteral = 96; -SolidityParser.ruleNames = [ - "sourceUnit", - "pragmaDirective", - "pragmaName", - "pragmaValue", - "version", - "versionOperator", - "versionConstraint", - "importDeclaration", - "importDirective", - "importPath", - "contractDefinition", - "inheritanceSpecifier", - "contractPart", - "stateVariableDeclaration", - "fileLevelConstant", - "customErrorDefinition", - "typeDefinition", - "usingForDeclaration", - "structDefinition", - "modifierDefinition", - "modifierInvocation", - "functionDefinition", - "functionDescriptor", - "returnParameters", - "modifierList", - "eventDefinition", - "enumValue", - "enumDefinition", - "parameterList", - "parameter", - "eventParameterList", - "eventParameter", - "functionTypeParameterList", - "functionTypeParameter", - "variableDeclaration", - "typeName", - "userDefinedTypeName", - "mappingKey", - "mapping", - "functionTypeName", - "storageLocation", - "stateMutability", - "block", - "statement", - "expressionStatement", - "ifStatement", - "tryStatement", - "catchClause", - "whileStatement", - "simpleStatement", - "uncheckedStatement", - "forStatement", - "inlineAssemblyStatement", - "doWhileStatement", - "continueStatement", - "breakStatement", - "returnStatement", - "throwStatement", - "emitStatement", - "revertStatement", - "variableDeclarationStatement", - "variableDeclarationList", - "identifierList", - "elementaryTypeName", - "expression", - "primaryExpression", - "expressionList", - "nameValueList", - "nameValue", - "functionCallArguments", - "functionCall", - "assemblyBlock", - "assemblyItem", - "assemblyExpression", - "assemblyMember", - "assemblyCall", - "assemblyLocalDefinition", - "assemblyAssignment", - "assemblyIdentifierOrList", - "assemblyIdentifierList", - "assemblyStackAssignment", - "labelDefinition", - "assemblySwitch", - "assemblyCase", - "assemblyFunctionDefinition", - "assemblyFunctionReturns", - "assemblyFor", - "assemblyIf", - "assemblyLiteral", - "subAssembly", - "tupleExpression", - "typeNameExpression", - "numberLiteral", - "identifier", - "hexLiteral", - "overrideSpecifier", - "stringLiteral" -]; -SolidityParser._LITERAL_NAMES = [ - void 0, - "'pragma'", - "';'", - "'||'", - "'^'", - "'~'", - "'>='", - "'>'", - "'<'", - "'<='", - "'='", - "'as'", - "'import'", - "'*'", - "'from'", - "'{'", - "','", - "'}'", - "'abstract'", - "'contract'", - "'interface'", - "'library'", - "'is'", - "'('", - "')'", - "'error'", - "'using'", - "'for'", - "'struct'", - "'modifier'", - "'function'", - "'returns'", - "'event'", - "'enum'", - "'['", - "']'", - "'address'", - "'.'", - "'mapping'", - "'=>'", - "'memory'", - "'storage'", - "'calldata'", - "'if'", - "'else'", - "'try'", - "'catch'", - "'while'", - "'unchecked'", - "'assembly'", - "'do'", - "'return'", - "'throw'", - "'emit'", - "'revert'", - "'var'", - "'bool'", - "'string'", - "'byte'", - "'++'", - "'--'", - "'new'", - "':'", - "'+'", - "'-'", - "'after'", - "'delete'", - "'!'", - "'**'", - "'/'", - "'%'", - "'<<'", - "'>>'", - "'&'", - "'|'", - "'=='", - "'!='", - "'&&'", - "'?'", - "'|='", - "'^='", - "'&='", - "'<<='", - "'>>='", - "'+='", - "'-='", - "'*='", - "'/='", - "'%='", - "'let'", - "':='", - "'=:'", - "'switch'", - "'case'", - "'default'", - "'->'", - "'callback'", - "'override'", - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - "'anonymous'", - "'break'", - "'constant'", - "'immutable'", - "'continue'", - "'leave'", - "'external'", - "'indexed'", - "'internal'", - "'payable'", - "'private'", - "'public'", - "'virtual'", - "'pure'", - "'type'", - "'view'", - "'constructor'", - "'fallback'", - "'receive'" -]; -SolidityParser._SYMBOLIC_NAMES = [ - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - void 0, - "Int", - "Uint", - "Byte", - "Fixed", - "Ufixed", - "BooleanLiteral", - "DecimalNumber", - "HexNumber", - "NumberUnit", - "HexLiteralFragment", - "ReservedKeyword", - "AnonymousKeyword", - "BreakKeyword", - "ConstantKeyword", - "ImmutableKeyword", - "ContinueKeyword", - "LeaveKeyword", - "ExternalKeyword", - "IndexedKeyword", - "InternalKeyword", - "PayableKeyword", - "PrivateKeyword", - "PublicKeyword", - "VirtualKeyword", - "PureKeyword", - "TypeKeyword", - "ViewKeyword", - "ConstructorKeyword", - "FallbackKeyword", - "ReceiveKeyword", - "Identifier", - "StringLiteralFragment", - "VersionLiteral", - "WS", - "COMMENT", - "LINE_COMMENT" -]; -SolidityParser.VOCABULARY = new import_VocabularyImpl2.VocabularyImpl(_SolidityParser._LITERAL_NAMES, _SolidityParser._SYMBOLIC_NAMES, []); -SolidityParser._serializedATNSegments = 3; -SolidityParser._serializedATNSegment0 = '\uC91D\uCABA\u058D\uAFBA\u4F53\u0607\uEA8B\uC241\x87\u046D     \x07 \x07\b \b \n \n\v \v\f \f\r \r                   ! !" "# #$ $% %& &\' \'( () )* *+ +, ,- -. ./ /0 01 12 23 34 45 56 67 78 89 9: :; ;< <= => >? ?@ @A AB BC CD DE EF FG GH HI IJ JK KL LM MN NO OP PQ QR RS ST TU UV VW WX XY YZ Z[ [\\ \\] ]^ ^_ _` `a ab b\x07\xCE\n\f\xD1\v\xDE\n\xE2\n\x07\xE5\n\f\xE8\v\x07\x07\b\b\xED\n\b\b\b\b\xF1\n\b\b\b\xF4\n\b    \xF9\n \n\n\n\n\n\xFF\n\n\n\n\n\n\n\n\u0106\n\n\n\n\n\u010A\n\n\n\n\n\n\n\n\n\n\n\x07\n\u0115\n\n\f\n\n\u0118\v\n\n\n\n\n\n\n\u011F\n\n\v\v\f\f\u0124\n\f\f\f\f\f\f\f\x07\f\u012C\n\f\f\f\f\u012F\v\f\f\u0131\n\f\f\f\x07\f\u0135\n\f\f\f\f\u0138\v\f\f\f\r\r\r\r\u013F\n\r\r\r\u0142\n\r\u014D\n\x07\u0156\n\f\u0159\v\u015E\n\u0179\n\x07\u0185\n\f\u0188\v\u018A\n\u0191\n\x07\u0195\n\f\u0198\v\u019C\n\u01A1\n\u01A4\n\u01AA\n\u01AE\n\u01B2\n\u01B7\n\x07\u01C4\n\f\u01C7\v\u01CD\n\u01D7\n\x07\u01DB\n\f\u01DE\v\x07\u01E6\n\f\u01E9\v\u01EB\n\u01F1\n\u01F4\n    \x07 \u01FA\n \f  \u01FD\v  \u01FF\n   !!!\u0205\n!!!\u0208\n!""""\x07"\u020E\n"\f""\u0211\v""\u0213\n"""###\u0219\n#$$$\u021D\n$$$%%%%%%%%\u0228\n%%%%%\u022D\n%%\x07%\u0230\n%\f%%\u0233\v%&&&\x07&\u0238\n&\f&&\u023B\v&\'\'\'\u023F\n\'((((((()))))\x07)\u024D\n)\f))\u0250\v))))\u0254\n)**++,,\x07,\u025C\n,\f,,\u025F\v,,,----------------\u0272\n-...////////\u027E\n/0000\u0283\n0000\u0287\n0\r00\u0288111\u028D\n111\u0290\n111222222333\u029C\n344455555\u02A5\n5555\u02A9\n555\u02AC\n5555666\u02B3\n66677777777888999:::\u02C7\n:::;;;<<<<====>>>>>>>>\u02DD\n>>>>\u02E1\n>>>??\u02E6\n????\u02EA\n?\x07?\u02EC\n?\f??\u02EF\v?@@@\u02F3\n@@\x07@\u02F6\n@\f@@\u02F9\v@@@\u02FC\n@@@AABBBBBBBBBBBBBBBBBBB\u0314\nBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBBB\u034A\nBBBB\u034E\nBBBBBBBBBBBBBBB\x07B\u035E\nB\fBB\u0361\vBCCCCCCCC\u036A\nCCCCCCCC\u0372\nCC\u0374\nCDDD\x07D\u0379\nD\fDD\u037C\vDEEE\x07E\u0381\nE\fEE\u0384\vEEE\u0387\nEFFFFGGG\u038F\nGGGG\u0393\nGG\u0395\nGHHHHHII\x07I\u039E\nI\fII\u03A1\vIIIJJJJJJJJJJJJJJJJJJJ\u03B7\nJKKKK\u03BC\nKLLLLMMMMM\u03C6\nMMMM\u03CA\nMMM\x07M\u03CE\nM\fMM\u03D1\vMMM\u03D4\nMNNNNN\u03DA\nNOOOOPPPPPPP\u03E6\nPQQQ\x07Q\u03EB\nQ\fQQ\u03EE\vQRRRSSSTTT\x07T\u03F9\nT\fTT\u03FC\vTUUUUUUU\u0404\nUVVVVV\u040A\nVVVV\u040E\nVVVWWWXXXX\u0418\nXXXXX\u041D\nXXXYYYYZZZZZ\u0429\nZ[[[[\\\\\\\u0431\n\\\\\\\\\u0435\n\\\x07\\\u0437\n\\\f\\\\\u043A\v\\\\\\\\\\\\\x07\\\u0441\n\\\f\\\\\u0444\v\\\\\u0446\n\\\\\\\u0449\n\\]]]\u044D\n]^^^\u0451\n^__``\u0456\n`\r``\u0457aaaaa\x07a\u045F\na\faa\u0462\vaaaa\u0466\nabb\u0469\nb\rbb\u046AbH\x82c\b\n\f "$&(*,.02468:<>@BDFHJLNPRTVXZ\\^`bdfhjlnprtvxz|~\x80\x82\x84\x86\x88\x8A\x8C\x8E\x90\x92\x94\x96\x98\x9A\x9C\x9E\xA0\xA2\xA4\xA6\xA8\xAA\xAC\xAE\xB0\xB2\xB4\xB6\xB8\xBA\xBC\xBE\xC0\xC2\f*,qqxx||~~&&9ABCDGHIJ\b\vMN\f\fQZjk\v,,88bbttxx\x7F\x7F\x81\x82\u04EA\xCF\xD4\xD9\b\xDD\n\xDF\f\xE9\xF3\xF5\u011E\u0120\u0123\u013B\u014C\u014E\u0161 \u0168"\u016D$\u0173&\u017C(\u018D*\u019D,\u01A5.\u01B60\u01B82\u01C54\u01C86\u01D08\u01D2:\u01E1<\u01EE>\u01F5@\u0202B\u0209D\u0216F\u021AH\u0227J\u0234L\u023EN\u0240P\u0247R\u0255T\u0257V\u0259X\u0271Z\u0273\\\u0276^\u027F`\u028Ab\u0293d\u029Bf\u029Dh\u02A0j\u02B0l\u02B6n\u02BEp\u02C1r\u02C4t\u02CAv\u02CDx\u02D1z\u02DC|\u02E5~\u02F0\x80\u02FF\x82\u0313\x84\u0373\x86\u0375\x88\u037D\x8A\u0388\x8C\u0394\x8E\u0396\x90\u039B\x92\u03B6\x94\u03BB\x96\u03BD\x98\u03C5\x9A\u03D5\x9C\u03DB\x9E\u03E5\xA0\u03E7\xA2\u03EF\xA4\u03F2\xA6\u03F5\xA8\u0403\xAA\u0405\xAC\u0411\xAE\u0414\xB0\u0420\xB2\u0428\xB4\u042A\xB6\u0448\xB8\u044C\xBA\u044E\xBC\u0452\xBE\u0455\xC0\u0459\xC2\u0468\xC4\xCE\xC5\xCE\n\xC6\xCE\f\xC7\xCE8\xC8\xCE&\xC9\xCE,\xCA\xCE\xCB\xCE \xCC\xCE"\xCD\xC4\xCD\xC5\xCD\xC6\xCD\xC7\xCD\xC8\xCD\xC9\xCD\xCA\xCD\xCB\xCD\xCC\xCE\xD1\xCF\xCD\xCF\xD0\xD0\xD2\xD1\xCF\xD2\xD3\x07\xD3\xD4\xD5\x07\xD5\xD6\xD6\xD7\b\xD7\xD8\x07\xD8\xD9\xDA\xBC_\xDA\x07\xDB\xDE\n\xDC\xDE\x82B\xDD\xDB\xDD\xDC\xDE \xDF\xE6\b\xE0\xE2\x07\xE1\xE0\xE1\xE2\xE2\xE3\xE3\xE5\b\xE4\xE1\xE5\xE8\xE6\xE4\xE6\xE7\xE7\v\xE8\xE6\xE9\xEA \xEA\r\xEB\xED\f\x07\xEC\xEB\xEC\xED\xED\xEE\xEE\xF4\x07\x84\xEF\xF1\f\x07\xF0\xEF\xF0\xF1\xF1\xF2\xF2\xF4\x07j\xF3\xEC\xF3\xF0\xF4\xF5\xF8\xBC_\xF6\xF7\x07\r\xF7\xF9\xBC_\xF8\xF6\xF8\xF9\xF9\xFA\xFB\x07\xFB\xFE\v\xFC\xFD\x07\r\xFD\xFF\xBC_\xFE\xFC\xFE\xFF\xFF\u0100\u0100\u0101\x07\u0101\u011F\u0102\u0105\x07\u0103\u0106\x07\u0104\u0106\xBC_\u0105\u0103\u0105\u0104\u0106\u0109\u0107\u0108\x07\r\u0108\u010A\xBC_\u0109\u0107\u0109\u010A\u010A\u010B\u010B\u010C\x07\u010C\u010D\v\u010D\u010E\x07\u010E\u011F\u010F\u0110\x07\u0110\u0111\x07\u0111\u0116 \u0112\u0113\x07\u0113\u0115 \u0114\u0112\u0115\u0118\u0116\u0114\u0116\u0117\u0117\u0119\u0118\u0116\u0119\u011A\x07\u011A\u011B\x07\u011B\u011C\v\u011C\u011D\x07\u011D\u011F\u011E\xFA\u011E\u0102\u011E\u010F\u011F\u0120\u0121\x07\x83\u0121\u0122\u0124\x07\u0123\u0122\u0123\u0124\u0124\u0125\u0125\u0126 \u0126\u0130\xBC_\u0127\u0128\x07\u0128\u012D\r\u0129\u012A\x07\u012A\u012C\r\u012B\u0129\u012C\u012F\u012D\u012B\u012D\u012E\u012E\u0131\u012F\u012D\u0130\u0127\u0130\u0131\u0131\u0132\u0132\u0136\x07\u0133\u0135\u0134\u0133\u0135\u0138\u0136\u0134\u0136\u0137\u0137\u0139\u0138\u0136\u0139\u013A\x07\u013A\u013B\u0141J&\u013C\u013E\x07\u013D\u013F\x86D\u013E\u013D\u013E\u013F\u013F\u0140\u0140\u0142\x07\u0141\u013C\u0141\u0142\u0142\u0143\u014D\u0144\u014D$\u0145\u014D&\u0146\u014D(\u0147\u014D,\u0148\u014D4\u0149\u014D8\u014A\u014D \u014B\u014D"\u014C\u0143\u014C\u0144\u014C\u0145\u014C\u0146\u014C\u0147\u014C\u0148\u014C\u0149\u014C\u014A\u014C\u014B\u014D\u014E\u0157H%\u014F\u0156\x07z\u0150\u0156\x07w\u0151\u0156\x07y\u0152\u0156\x07q\u0153\u0156\x07r\u0154\u0156\xC0a\u0155\u014F\u0155\u0150\u0155\u0151\u0155\u0152\u0155\u0153\u0155\u0154\u0156\u0159\u0157\u0155\u0157\u0158\u0158\u015A\u0159\u0157\u015A\u015D\xBC_\u015B\u015C\x07\f\u015C\u015E\x82B\u015D\u015B\u015D\u015E\u015E\u015F\u015F\u0160\x07\u0160\u0161\u0162H%\u0162\u0163\x07q\u0163\u0164\xBC_\u0164\u0165\x07\f\u0165\u0166\x82B\u0166\u0167\x07\u0167\u0168\u0169\x07\u0169\u016A\xBC_\u016A\u016B:\u016B\u016C\x07\u016C!\u016D\u016E\x07}\u016E\u016F\xBC_\u016F\u0170\x07\u0170\u0171\x80A\u0171\u0172\x07\u0172#\u0173\u0174\x07\u0174\u0175J&\u0175\u0178\x07\u0176\u0179\x07\u0177\u0179H%\u0178\u0176\u0178\u0177\u0179\u017A\u017A\u017B\x07\u017B%\u017C\u017D\x07\u017D\u017E\xBC_\u017E\u0189\x07\u017F\u0180F$\u0180\u0186\x07\u0181\u0182F$\u0182\u0183\x07\u0183\u0185\u0184\u0181\u0185\u0188\u0186\u0184\u0186\u0187\u0187\u018A\u0188\u0186\u0189\u017F\u0189\u018A\u018A\u018B\u018B\u018C\x07\u018C\'\u018D\u018E\x07\u018E\u0190\xBC_\u018F\u0191:\u0190\u018F\u0190\u0191\u0191\u0196\u0192\u0195\x07{\u0193\u0195\xC0a\u0194\u0192\u0194\u0193\u0195\u0198\u0196\u0194\u0196\u0197\u0197\u019B\u0198\u0196\u0199\u019C\x07\u019A\u019CV,\u019B\u0199\u019B\u019A\u019C)\u019D\u01A3\xBC_\u019E\u01A0\x07\u019F\u01A1\x86D\u01A0\u019F\u01A0\u01A1\u01A1\u01A2\u01A2\u01A4\x07\u01A3\u019E\u01A3\u01A4\u01A4+\u01A5\u01A6.\u01A6\u01A7:\u01A7\u01A92\u01A8\u01AA0\u01A9\u01A8\u01A9\u01AA'; -SolidityParser._serializedATNSegment1 = "\u01AA\u01AD\u01AB\u01AE\x07\u01AC\u01AEV,\u01AD\u01AB\u01AD\u01AC\u01AE-\u01AF\u01B1\x07 \u01B0\u01B2\xBC_\u01B1\u01B0\u01B1\u01B2\u01B2\u01B7\u01B3\u01B7\x07\x7F\u01B4\u01B7\x07\x80\u01B5\u01B7\x07\x81\u01B6\u01AF\u01B6\u01B3\u01B6\u01B4\u01B6\u01B5\u01B7/\u01B8\u01B9\x07!\u01B9\u01BA:\u01BA1\u01BB\u01C4\x07u\u01BC\u01C4\x07z\u01BD\u01C4\x07w\u01BE\u01C4\x07y\u01BF\u01C4\x07{\u01C0\u01C4T+\u01C1\u01C4*\u01C2\u01C4\xC0a\u01C3\u01BB\u01C3\u01BC\u01C3\u01BD\u01C3\u01BE\u01C3\u01BF\u01C3\u01C0\u01C3\u01C1\u01C3\u01C2\u01C4\u01C7\u01C5\u01C3\u01C5\u01C6\u01C63\u01C7\u01C5\u01C8\u01C9\x07\"\u01C9\u01CA\xBC_\u01CA\u01CC> \u01CB\u01CD\x07o\u01CC\u01CB\u01CC\u01CD\u01CD\u01CE\u01CE\u01CF\x07\u01CF5\u01D0\u01D1\xBC_\u01D17\u01D2\u01D3\x07#\u01D3\u01D4\xBC_\u01D4\u01D6\x07\u01D5\u01D76\u01D6\u01D5\u01D6\u01D7\u01D7\u01DC\u01D8\u01D9\x07\u01D9\u01DB6\u01DA\u01D8\u01DB\u01DE\u01DC\u01DA\u01DC\u01DD\u01DD\u01DF\u01DE\u01DC\u01DF\u01E0\x07\u01E09\u01E1\u01EA\x07\u01E2\u01E7<\u01E3\u01E4\x07\u01E4\u01E6<\u01E5\u01E3\u01E6\u01E9\u01E7\u01E5\u01E7\u01E8\u01E8\u01EB\u01E9\u01E7\u01EA\u01E2\u01EA\u01EB\u01EB\u01EC\u01EC\u01ED\x07\u01ED;\u01EE\u01F0H%\u01EF\u01F1R*\u01F0\u01EF\u01F0\u01F1\u01F1\u01F3\u01F2\u01F4\xBC_\u01F3\u01F2\u01F3\u01F4\u01F4=\u01F5\u01FE\x07\u01F6\u01FB@!\u01F7\u01F8\x07\u01F8\u01FA@!\u01F9\u01F7\u01FA\u01FD\u01FB\u01F9\u01FB\u01FC\u01FC\u01FF\u01FD\u01FB\u01FE\u01F6\u01FE\u01FF\u01FF\u0200\u0200\u0201\x07\u0201?\u0202\u0204H%\u0203\u0205\x07v\u0204\u0203\u0204\u0205\u0205\u0207\u0206\u0208\xBC_\u0207\u0206\u0207\u0208\u0208A\u0209\u0212\x07\u020A\u020FD#\u020B\u020C\x07\u020C\u020ED#\u020D\u020B\u020E\u0211\u020F\u020D\u020F\u0210\u0210\u0213\u0211\u020F\u0212\u020A\u0212\u0213\u0213\u0214\u0214\u0215\x07\u0215C\u0216\u0218H%\u0217\u0219R*\u0218\u0217\u0218\u0219\u0219E\u021A\u021CH%\u021B\u021DR*\u021C\u021B\u021C\u021D\u021D\u021E\u021E\u021F\xBC_\u021FG\u0220\u0221\b%\u0221\u0228\x80A\u0222\u0228J&\u0223\u0228N(\u0224\u0228P)\u0225\u0226\x07&\u0226\u0228\x07x\u0227\u0220\u0227\u0222\u0227\u0223\u0227\u0224\u0227\u0225\u0228\u0231\u0229\u022A\f\u022A\u022C\x07$\u022B\u022D\x82B\u022C\u022B\u022C\u022D\u022D\u022E\u022E\u0230\x07%\u022F\u0229\u0230\u0233\u0231\u022F\u0231\u0232\u0232I\u0233\u0231\u0234\u0239\xBC_\u0235\u0236\x07'\u0236\u0238\xBC_\u0237\u0235\u0238\u023B\u0239\u0237\u0239\u023A\u023AK\u023B\u0239\u023C\u023F\x80A\u023D\u023FJ&\u023E\u023C\u023E\u023D\u023FM\u0240\u0241\x07(\u0241\u0242\x07\u0242\u0243L'\u0243\u0244\x07)\u0244\u0245H%\u0245\u0246\x07\u0246O\u0247\u0248\x07 \u0248\u024EB\"\u0249\u024D\x07w\u024A\u024D\x07u\u024B\u024DT+\u024C\u0249\u024C\u024A\u024C\u024B\u024D\u0250\u024E\u024C\u024E\u024F\u024F\u0253\u0250\u024E\u0251\u0252\x07!\u0252\u0254B\"\u0253\u0251\u0253\u0254\u0254Q\u0255\u0256 \u0256S\u0257\u0258 \u0258U\u0259\u025D\x07\u025A\u025CX-\u025B\u025A\u025C\u025F\u025D\u025B\u025D\u025E\u025E\u0260\u025F\u025D\u0260\u0261\x07\u0261W\u0262\u0272\\/\u0263\u0272^0\u0264\u0272b2\u0265\u0272h5\u0266\u0272V,\u0267\u0272j6\u0268\u0272l7\u0269\u0272n8\u026A\u0272p9\u026B\u0272r:\u026C\u0272t;\u026D\u0272v<\u026E\u0272d3\u026F\u0272f4\u0270\u0272x=\u0271\u0262\u0271\u0263\u0271\u0264\u0271\u0265\u0271\u0266\u0271\u0267\u0271\u0268\u0271\u0269\u0271\u026A\u0271\u026B\u0271\u026C\u0271\u026D\u0271\u026E\u0271\u026F\u0271\u0270\u0272Y\u0273\u0274\x82B\u0274\u0275\x07\u0275[\u0276\u0277\x07-\u0277\u0278\x07\u0278\u0279\x82B\u0279\u027A\x07\u027A\u027DX-\u027B\u027C\x07.\u027C\u027EX-\u027D\u027B\u027D\u027E\u027E]\u027F\u0280\x07/\u0280\u0282\x82B\u0281\u02830\u0282\u0281\u0282\u0283\u0283\u0284\u0284\u0286V,\u0285\u0287`1\u0286\u0285\u0287\u0288\u0288\u0286\u0288\u0289\u0289_\u028A\u028F\x070\u028B\u028D\xBC_\u028C\u028B\u028C\u028D\u028D\u028E\u028E\u0290:\u028F\u028C\u028F\u0290\u0290\u0291\u0291\u0292V,\u0292a\u0293\u0294\x071\u0294\u0295\x07\u0295\u0296\x82B\u0296\u0297\x07\u0297\u0298X-\u0298c\u0299\u029Cz>\u029A\u029CZ.\u029B\u0299\u029B\u029A\u029Ce\u029D\u029E\x072\u029E\u029FV,\u029Fg\u02A0\u02A1\x07\u02A1\u02A4\x07\u02A2\u02A5d3\u02A3\u02A5\x07\u02A4\u02A2\u02A4\u02A3\u02A5\u02A8\u02A6\u02A9Z.\u02A7\u02A9\x07\u02A8\u02A6\u02A8\u02A7\u02A9\u02AB\u02AA\u02AC\x82B\u02AB\u02AA\u02AB\u02AC\u02AC\u02AD\u02AD\u02AE\x07\u02AE\u02AFX-\u02AFi\u02B0\u02B2\x073\u02B1\u02B3\x07\x83\u02B2\u02B1\u02B2\u02B3\u02B3\u02B4\u02B4\u02B5\x90I\u02B5k\u02B6\u02B7\x074\u02B7\u02B8X-\u02B8\u02B9\x071\u02B9\u02BA\x07\u02BA\u02BB\x82B\u02BB\u02BC\x07\u02BC\u02BD\x07\u02BDm\u02BE\u02BF\x07s\u02BF\u02C0\x07\u02C0o\u02C1\u02C2\x07p\u02C2\u02C3\x07\u02C3q\u02C4\u02C6\x075\u02C5\u02C7\x82B\u02C6\u02C5\u02C6\u02C7\u02C7\u02C8\u02C8\u02C9\x07\u02C9s\u02CA\u02CB\x076\u02CB\u02CC\x07\u02CCu\u02CD\u02CE\x077\u02CE\u02CF\x8EH\u02CF\u02D0\x07\u02D0w\u02D1\u02D2\x078\u02D2\u02D3\x8EH\u02D3\u02D4\x07\u02D4y\u02D5\u02D6\x079\u02D6\u02DD~@\u02D7\u02DDF$\u02D8\u02D9\x07\u02D9\u02DA|?\u02DA\u02DB\x07\u02DB\u02DD\u02DC\u02D5\u02DC\u02D7\u02DC\u02D8\u02DD\u02E0\u02DE\u02DF\x07\f\u02DF\u02E1\x82B\u02E0\u02DE\u02E0\u02E1\u02E1\u02E2\u02E2\u02E3\x07\u02E3{\u02E4\u02E6F$\u02E5\u02E4\u02E5\u02E6\u02E6\u02ED\u02E7\u02E9\x07\u02E8\u02EAF$\u02E9\u02E8\u02E9\u02EA\u02EA\u02EC\u02EB\u02E7\u02EC\u02EF\u02ED\u02EB\u02ED\u02EE\u02EE}\u02EF\u02ED\u02F0\u02F7\x07\u02F1\u02F3\xBC_\u02F2\u02F1\u02F2\u02F3\u02F3\u02F4\u02F4\u02F6\x07\u02F5\u02F2\u02F6\u02F9\u02F7\u02F5\u02F7\u02F8\u02F8\u02FB\u02F9\u02F7\u02FA\u02FC\xBC_\u02FB\u02FA\u02FB\u02FC\u02FC\u02FD\u02FD\u02FE\x07\u02FE\x7F\u02FF\u0300 \u0300\x81\u0301\u0302\bB\u0302\u0303\x07?\u0303\u0314H%\u0304\u0305\x07\u0305\u0306\x82B\u0306\u0307\x07\u0307\u0314\u0308\u0309 \x07\u0309\u0314\x82B\u030A\u030B \b\u030B\u0314\x82B\u030C\u030D \u030D\u0314\x82B\u030E\u030F\x07E\u030F\u0314\x82B\u0310\u0311\x07\x07\u0311\u0314\x82B\u0312\u0314\x84C\u0313\u0301\u0313\u0304\u0313\u0308\u0313\u030A\u0313\u030C\u0313\u030E\u0313\u0310\u0313\u0312\u0314\u035F\u0315\u0316\f\u0316\u0317\x07F\u0317\u035E\x82B\u0318\u0319\f\u0319\u031A \n\u031A\u035E\x82B\u031B\u031C\f\u031C\u031D \b\u031D\u035E\x82B\u031E\u031F\f\r\u031F\u0320 \v\u0320\u035E\x82B\u0321\u0322\f\f\u0322\u0323\x07K\u0323\u035E\x82B\r\u0324\u0325\f\v\u0325\u0326\x07\u0326\u035E\x82B\f\u0327\u0328\f\n\u0328\u0329\x07L\u0329\u035E\x82B\v\u032A\u032B\f \u032B\u032C \f\u032C\u035E\x82B\n\u032D\u032E\f\b\u032E\u032F \r\u032F\u035E\x82B \u0330\u0331\f\x07\u0331\u0332\x07O\u0332\u035E\x82B\b\u0333\u0334\f\u0334\u0335\x07\u0335\u035E\x82B\x07\u0336\u0337\f\u0337\u0338\x07P\u0338\u0339\x82B\u0339\u033A\x07@\u033A\u033B\x82B\u033B\u035E\u033C\u033D\f\u033D\u033E \u033E\u035E\x82B\u033F\u0340\f\u0340\u035E \x07\u0341\u0342\f\u0342\u0343\x07$\u0343\u0344\x82B\u0344\u0345\x07%\u0345\u035E\u0346\u0347\f\u0347\u0349\x07$\u0348\u034A\x82B\u0349\u0348\u0349\u034A\u034A\u034B\u034B\u034D\x07@\u034C\u034E\x82B\u034D\u034C\u034D\u034E\u034E\u034F\u034F\u035E\x07%\u0350\u0351\f\u0351\u0352\x07'\u0352\u035E\xBC_\u0353\u0354\f\u0354\u0355\x07\u0355\u0356\x88E\u0356\u0357\x07\u0357\u035E\u0358\u0359\f\u0359\u035A\x07\u035A\u035B\x8CG\u035B\u035C\x07\u035C\u035E\u035D\u0315\u035D\u0318\u035D\u031B\u035D\u031E\u035D\u0321\u035D\u0324\u035D\u0327\u035D\u032A\u035D\u032D\u035D\u0330\u035D\u0333\u035D\u0336\u035D\u033C\u035D\u033F\u035D\u0341\u035D\u0346\u035D\u0350\u035D\u0353\u035D\u0358\u035E\u0361\u035F\u035D\u035F\u0360\u0360\x83\u0361\u035F\u0362\u0374\x07i\u0363\u0374\xBA^\u0364\u0374\xBE`\u0365\u0374\xC2b\u0366\u0369\xBC_\u0367\u0368\x07$\u0368\u036A\x07%\u0369\u0367\u0369\u036A\u036A\u0374\u036B\u0374\x07}\u036C\u0374\x07x\u036D\u0374\xB6\\\u036E\u0371\xB8]\u036F\u0370\x07$\u0370\u0372\x07%\u0371\u036F\u0371\u0372\u0372\u0374\u0373\u0362\u0373\u0363\u0373\u0364\u0373\u0365\u0373\u0366\u0373\u036B\u0373\u036C\u0373\u036D\u0373\u036E\u0374\x85\u0375\u037A\x82B\u0376\u0377\x07\u0377\u0379\x82B\u0378\u0376\u0379\u037C\u037A\u0378\u037A\u037B\u037B\x87\u037C\u037A\u037D\u0382\x8AF\u037E\u037F\x07\u037F\u0381\x8AF\u0380\u037E\u0381\u0384\u0382\u0380\u0382\u0383\u0383\u0386\u0384\u0382\u0385\u0387\x07\u0386\u0385\u0386\u0387\u0387\x89\u0388\u0389\xBC_\u0389\u038A\x07@\u038A\u038B\x82B\u038B\x8B\u038C\u038E\x07\u038D\u038F\x88E\u038E\u038D\u038E\u038F\u038F\u0390\u0390\u0395\x07\u0391\u0393\x86D\u0392\u0391\u0392\u0393\u0393\u0395\u0394\u038C\u0394\u0392\u0395\x8D\u0396\u0397\x82B\u0397\u0398\x07\u0398\u0399\x8CG\u0399\u039A\x07\u039A\x8F\u039B\u039F\x07\u039C\u039E\x92J\u039D\u039C\u039E\u03A1\u039F\u039D\u039F\u03A0\u03A0\u03A2\u03A1\u039F\u03A2\u03A3\x07\u03A3\x91\u03A4\u03B7\xBC_\u03A5\u03B7\x90I\u03A6\u03B7\x94K\u03A7\u03B7\x9AN\u03A8\u03B7\x9CO\u03A9\u03B7\xA2R\u03AA\u03B7\xA4S\u03AB\u03B7\xA6T\u03AC\u03B7\xAAV\u03AD\u03B7\xAEX\u03AE\u03B7\xB0Y\u03AF\u03B7\x07p\u03B0\u03B7\x07s\u03B1\u03B7\x07t\u03B2\u03B7\xB4[\u03B3\u03B7\xBA^\u03B4\u03B7\xC2b\u03B5\u03B7\xBE`\u03B6\u03A4\u03B6\u03A5\u03B6\u03A6\u03B6\u03A7\u03B6\u03A8\u03B6\u03A9\u03B6\u03AA\u03B6\u03AB\u03B6\u03AC\u03B6\u03AD\u03B6\u03AE\u03B6\u03AF\u03B6\u03B0\u03B6\u03B1\u03B6\u03B2\u03B6\u03B3\u03B6\u03B4\u03B6\u03B5\u03B7\x93\u03B8\u03BC\x98M\u03B9\u03BC\xB2Z\u03BA\u03BC\x96L\u03BB\u03B8\u03BB\u03B9\u03BB\u03BA\u03BC\x95\u03BD\u03BE\xBC_\u03BE\u03BF\x07'\u03BF\u03C0\xBC_\u03C0\x97\u03C1\u03C6\x075\u03C2\u03C6\x07&\u03C3\u03C6\x07<\u03C4\u03C6\xBC_\u03C5\u03C1\u03C5\u03C2\u03C5\u03C3\u03C5\u03C4\u03C6\u03D3\u03C7\u03C9\x07\u03C8\u03CA\x94K\u03C9\u03C8\u03C9\u03CA\u03CA\u03CF\u03CB\u03CC\x07\u03CC\u03CE\x94K\u03CD\u03CB\u03CE\u03D1\u03CF\u03CD\u03CF\u03D0\u03D0\u03D2\u03D1\u03CF\u03D2\u03D4\x07\u03D3\u03C7\u03D3\u03D4\u03D4\x99\u03D5\u03D6\x07[\u03D6\u03D9\x9EP\u03D7\u03D8\x07\\\u03D8\u03DA\x94K\u03D9\u03D7\u03D9\u03DA\u03DA\x9B\u03DB\u03DC\x9EP\u03DC\u03DD\x07\\\u03DD\u03DE\x94K\u03DE\x9D\u03DF\u03E6\xBC_\u03E0\u03E6\x96L\u03E1\u03E2\x07\u03E2\u03E3\xA0Q\u03E3\u03E4\x07\u03E4\u03E6\u03E5\u03DF\u03E5\u03E0\u03E5\u03E1\u03E6\x9F\u03E7\u03EC\xBC_\u03E8\u03E9\x07\u03E9\u03EB\xBC_\u03EA\u03E8\u03EB\u03EE\u03EC\u03EA\u03EC\u03ED\u03ED\xA1\u03EE\u03EC\u03EF\u03F0\x07]\u03F0\u03F1\xBC_\u03F1\xA3\u03F2\u03F3\xBC_\u03F3\u03F4\x07@\u03F4\xA5\u03F5\u03F6\x07^\u03F6\u03FA\x94K\u03F7\u03F9\xA8U\u03F8\u03F7\u03F9\u03FC\u03FA\u03F8\u03FA\u03FB\u03FB\xA7\u03FC\u03FA\u03FD\u03FE\x07_\u03FE\u03FF\xB2Z\u03FF\u0400\x90I\u0400\u0404\u0401\u0402\x07`\u0402\u0404\x90I\u0403\u03FD\u0403\u0401\u0404\xA9\u0405\u0406\x07 \u0406\u0407\xBC_\u0407\u0409\x07\u0408\u040A\xA0Q\u0409\u0408\u0409\u040A\u040A\u040B\u040B\u040D\x07\u040C\u040E\xACW\u040D\u040C\u040D\u040E\u040E\u040F\u040F\u0410\x90I\u0410\xAB\u0411\u0412\x07a\u0412\u0413\xA0Q\u0413\xAD\u0414\u0417\x07\u0415\u0418\x90I\u0416\u0418\x94K\u0417\u0415\u0417\u0416\u0418\u0419\u0419\u041C\x94K\u041A\u041D\x90I\u041B\u041D\x94K\u041C\u041A\u041C\u041B\u041D\u041E\u041E\u041F\x90I\u041F\xAF\u0420\u0421\x07-\u0421\u0422\x94K\u0422\u0423\x90I\u0423\xB1\u0424\u0429\xC2b\u0425\u0429\x07j\u0426\u0429\x07k\u0427\u0429\xBE`\u0428\u0424\u0428\u0425\u0428\u0426\u0428\u0427\u0429\xB3\u042A\u042B\x073\u042B\u042C\xBC_\u042C\u042D\x90I\u042D\xB5\u042E\u0430\x07\u042F\u0431\x82B\u0430\u042F\u0430\u0431\u0431\u0438\u0432\u0434\x07\u0433\u0435\x82B\u0434\u0433\u0434\u0435\u0435\u0437\u0436\u0432\u0437\u043A\u0438\u0436\u0438\u0439\u0439\u043B\u043A\u0438\u043B\u0449\x07\u043C\u0445\x07$\u043D\u0442\x82B\u043E\u043F\x07\u043F\u0441\x82B\u0440\u043E\u0441\u0444\u0442\u0440\u0442\u0443\u0443\u0446\u0444\u0442\u0445\u043D\u0445\u0446\u0446\u0447\u0447\u0449\x07%\u0448\u042E"; -SolidityParser._serializedATNSegment2 = "\u0448\u043C\u0449\xB7\u044A\u044D\x80A\u044B\u044DJ&\u044C\u044A\u044C\u044B\u044D\xB9\u044E\u0450 \u044F\u0451\x07l\u0450\u044F\u0450\u0451\u0451\xBB\u0452\u0453 \u0453\xBD\u0454\u0456\x07m\u0455\u0454\u0456\u0457\u0457\u0455\u0457\u0458\u0458\xBF\u0459\u0465\x07c\u045A\u045B\x07\u045B\u0460J&\u045C\u045D\x07\u045D\u045FJ&\u045E\u045C\u045F\u0462\u0460\u045E\u0460\u0461\u0461\u0463\u0462\u0460\u0463\u0464\x07\u0464\u0466\u0465\u045A\u0465\u0466\u0466\xC1\u0467\u0469\x07\x83\u0468\u0467\u0469\u046A\u046A\u0468\u046A\u046B\u046B\xC3\x80\xCD\xCF\xDD\xE1\xE6\xEC\xF0\xF3\xF8\xFE\u0105\u0109\u0116\u011E\u0123\u012D\u0130\u0136\u013E\u0141\u014C\u0155\u0157\u015D\u0178\u0186\u0189\u0190\u0194\u0196\u019B\u01A0\u01A3\u01A9\u01AD\u01B1\u01B6\u01C3\u01C5\u01CC\u01D6\u01DC\u01E7\u01EA\u01F0\u01F3\u01FB\u01FE\u0204\u0207\u020F\u0212\u0218\u021C\u0227\u022C\u0231\u0239\u023E\u024C\u024E\u0253\u025D\u0271\u027D\u0282\u0288\u028C\u028F\u029B\u02A4\u02A8\u02AB\u02B2\u02C6\u02DC\u02E0\u02E5\u02E9\u02ED\u02F2\u02F7\u02FB\u0313\u0349\u034D\u035D\u035F\u0369\u0371\u0373\u037A\u0382\u0386\u038E\u0392\u0394\u039F\u03B6\u03BB\u03C5\u03C9\u03CF\u03D3\u03D9\u03E5\u03EC\u03FA\u0403\u0409\u040D\u0417\u041C\u0428\u0430\u0434\u0438\u0442\u0445\u0448\u044C\u0450\u0457\u0460\u0465\u046A"; -SolidityParser._serializedATN = Utils2.join([ - _SolidityParser._serializedATNSegment0, - _SolidityParser._serializedATNSegment1, - _SolidityParser._serializedATNSegment2 -], ""); -var SourceUnitContext = class extends import_ParserRuleContext.ParserRuleContext { - EOF() { - return this.getToken(SolidityParser.EOF, 0); - } - pragmaDirective(i) { - if (i === void 0) { - return this.getRuleContexts(PragmaDirectiveContext); - } else { - return this.getRuleContext(i, PragmaDirectiveContext); - } - } - importDirective(i) { - if (i === void 0) { - return this.getRuleContexts(ImportDirectiveContext); - } else { - return this.getRuleContext(i, ImportDirectiveContext); - } - } - contractDefinition(i) { - if (i === void 0) { - return this.getRuleContexts(ContractDefinitionContext); - } else { - return this.getRuleContext(i, ContractDefinitionContext); - } - } - enumDefinition(i) { - if (i === void 0) { - return this.getRuleContexts(EnumDefinitionContext); - } else { - return this.getRuleContext(i, EnumDefinitionContext); - } - } - structDefinition(i) { - if (i === void 0) { - return this.getRuleContexts(StructDefinitionContext); - } else { - return this.getRuleContext(i, StructDefinitionContext); - } - } - functionDefinition(i) { - if (i === void 0) { - return this.getRuleContexts(FunctionDefinitionContext); - } else { - return this.getRuleContext(i, FunctionDefinitionContext); - } - } - fileLevelConstant(i) { - if (i === void 0) { - return this.getRuleContexts(FileLevelConstantContext); - } else { - return this.getRuleContext(i, FileLevelConstantContext); - } - } - customErrorDefinition(i) { - if (i === void 0) { - return this.getRuleContexts(CustomErrorDefinitionContext); - } else { - return this.getRuleContext(i, CustomErrorDefinitionContext); - } - } - typeDefinition(i) { - if (i === void 0) { - return this.getRuleContexts(TypeDefinitionContext); - } else { - return this.getRuleContext(i, TypeDefinitionContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_sourceUnit; - } - enterRule(listener) { - if (listener.enterSourceUnit) { - listener.enterSourceUnit(this); - } - } - exitRule(listener) { - if (listener.exitSourceUnit) { - listener.exitSourceUnit(this); - } - } - accept(visitor) { - if (visitor.visitSourceUnit) { - return visitor.visitSourceUnit(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var PragmaDirectiveContext = class extends import_ParserRuleContext.ParserRuleContext { - pragmaName() { - return this.getRuleContext(0, PragmaNameContext); - } - pragmaValue() { - return this.getRuleContext(0, PragmaValueContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_pragmaDirective; - } - enterRule(listener) { - if (listener.enterPragmaDirective) { - listener.enterPragmaDirective(this); - } - } - exitRule(listener) { - if (listener.exitPragmaDirective) { - listener.exitPragmaDirective(this); - } - } - accept(visitor) { - if (visitor.visitPragmaDirective) { - return visitor.visitPragmaDirective(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var PragmaNameContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_pragmaName; - } - enterRule(listener) { - if (listener.enterPragmaName) { - listener.enterPragmaName(this); - } - } - exitRule(listener) { - if (listener.exitPragmaName) { - listener.exitPragmaName(this); - } - } - accept(visitor) { - if (visitor.visitPragmaName) { - return visitor.visitPragmaName(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var PragmaValueContext = class extends import_ParserRuleContext.ParserRuleContext { - version() { - return this.tryGetRuleContext(0, VersionContext); - } - expression() { - return this.tryGetRuleContext(0, ExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_pragmaValue; - } - enterRule(listener) { - if (listener.enterPragmaValue) { - listener.enterPragmaValue(this); - } - } - exitRule(listener) { - if (listener.exitPragmaValue) { - listener.exitPragmaValue(this); - } - } - accept(visitor) { - if (visitor.visitPragmaValue) { - return visitor.visitPragmaValue(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var VersionContext = class extends import_ParserRuleContext.ParserRuleContext { - versionConstraint(i) { - if (i === void 0) { - return this.getRuleContexts(VersionConstraintContext); - } else { - return this.getRuleContext(i, VersionConstraintContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_version; - } - enterRule(listener) { - if (listener.enterVersion) { - listener.enterVersion(this); - } - } - exitRule(listener) { - if (listener.exitVersion) { - listener.exitVersion(this); - } - } - accept(visitor) { - if (visitor.visitVersion) { - return visitor.visitVersion(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var VersionOperatorContext = class extends import_ParserRuleContext.ParserRuleContext { - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_versionOperator; - } - enterRule(listener) { - if (listener.enterVersionOperator) { - listener.enterVersionOperator(this); - } - } - exitRule(listener) { - if (listener.exitVersionOperator) { - listener.exitVersionOperator(this); - } - } - accept(visitor) { - if (visitor.visitVersionOperator) { - return visitor.visitVersionOperator(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var VersionConstraintContext = class extends import_ParserRuleContext.ParserRuleContext { - VersionLiteral() { - return this.tryGetToken(SolidityParser.VersionLiteral, 0); - } - versionOperator() { - return this.tryGetRuleContext(0, VersionOperatorContext); - } - DecimalNumber() { - return this.tryGetToken(SolidityParser.DecimalNumber, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_versionConstraint; - } - enterRule(listener) { - if (listener.enterVersionConstraint) { - listener.enterVersionConstraint(this); - } - } - exitRule(listener) { - if (listener.exitVersionConstraint) { - listener.exitVersionConstraint(this); - } - } - accept(visitor) { - if (visitor.visitVersionConstraint) { - return visitor.visitVersionConstraint(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ImportDeclarationContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier(i) { - if (i === void 0) { - return this.getRuleContexts(IdentifierContext); - } else { - return this.getRuleContext(i, IdentifierContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_importDeclaration; - } - enterRule(listener) { - if (listener.enterImportDeclaration) { - listener.enterImportDeclaration(this); - } - } - exitRule(listener) { - if (listener.exitImportDeclaration) { - listener.exitImportDeclaration(this); - } - } - accept(visitor) { - if (visitor.visitImportDeclaration) { - return visitor.visitImportDeclaration(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ImportDirectiveContext = class extends import_ParserRuleContext.ParserRuleContext { - importPath() { - return this.getRuleContext(0, ImportPathContext); - } - identifier(i) { - if (i === void 0) { - return this.getRuleContexts(IdentifierContext); - } else { - return this.getRuleContext(i, IdentifierContext); - } - } - importDeclaration(i) { - if (i === void 0) { - return this.getRuleContexts(ImportDeclarationContext); - } else { - return this.getRuleContext(i, ImportDeclarationContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_importDirective; - } - enterRule(listener) { - if (listener.enterImportDirective) { - listener.enterImportDirective(this); - } - } - exitRule(listener) { - if (listener.exitImportDirective) { - listener.exitImportDirective(this); - } - } - accept(visitor) { - if (visitor.visitImportDirective) { - return visitor.visitImportDirective(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ImportPathContext = class extends import_ParserRuleContext.ParserRuleContext { - StringLiteralFragment() { - return this.getToken(SolidityParser.StringLiteralFragment, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_importPath; - } - enterRule(listener) { - if (listener.enterImportPath) { - listener.enterImportPath(this); - } - } - exitRule(listener) { - if (listener.exitImportPath) { - listener.exitImportPath(this); - } - } - accept(visitor) { - if (visitor.visitImportPath) { - return visitor.visitImportPath(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ContractDefinitionContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - inheritanceSpecifier(i) { - if (i === void 0) { - return this.getRuleContexts(InheritanceSpecifierContext); - } else { - return this.getRuleContext(i, InheritanceSpecifierContext); - } - } - contractPart(i) { - if (i === void 0) { - return this.getRuleContexts(ContractPartContext); - } else { - return this.getRuleContext(i, ContractPartContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_contractDefinition; - } - enterRule(listener) { - if (listener.enterContractDefinition) { - listener.enterContractDefinition(this); - } - } - exitRule(listener) { - if (listener.exitContractDefinition) { - listener.exitContractDefinition(this); - } - } - accept(visitor) { - if (visitor.visitContractDefinition) { - return visitor.visitContractDefinition(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var InheritanceSpecifierContext = class extends import_ParserRuleContext.ParserRuleContext { - userDefinedTypeName() { - return this.getRuleContext(0, UserDefinedTypeNameContext); - } - expressionList() { - return this.tryGetRuleContext(0, ExpressionListContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_inheritanceSpecifier; - } - enterRule(listener) { - if (listener.enterInheritanceSpecifier) { - listener.enterInheritanceSpecifier(this); - } - } - exitRule(listener) { - if (listener.exitInheritanceSpecifier) { - listener.exitInheritanceSpecifier(this); - } - } - accept(visitor) { - if (visitor.visitInheritanceSpecifier) { - return visitor.visitInheritanceSpecifier(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ContractPartContext = class extends import_ParserRuleContext.ParserRuleContext { - stateVariableDeclaration() { - return this.tryGetRuleContext(0, StateVariableDeclarationContext); - } - usingForDeclaration() { - return this.tryGetRuleContext(0, UsingForDeclarationContext); - } - structDefinition() { - return this.tryGetRuleContext(0, StructDefinitionContext); - } - modifierDefinition() { - return this.tryGetRuleContext(0, ModifierDefinitionContext); - } - functionDefinition() { - return this.tryGetRuleContext(0, FunctionDefinitionContext); - } - eventDefinition() { - return this.tryGetRuleContext(0, EventDefinitionContext); - } - enumDefinition() { - return this.tryGetRuleContext(0, EnumDefinitionContext); - } - customErrorDefinition() { - return this.tryGetRuleContext(0, CustomErrorDefinitionContext); - } - typeDefinition() { - return this.tryGetRuleContext(0, TypeDefinitionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_contractPart; - } - enterRule(listener) { - if (listener.enterContractPart) { - listener.enterContractPart(this); - } - } - exitRule(listener) { - if (listener.exitContractPart) { - listener.exitContractPart(this); - } - } - accept(visitor) { - if (visitor.visitContractPart) { - return visitor.visitContractPart(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var StateVariableDeclarationContext = class extends import_ParserRuleContext.ParserRuleContext { - typeName() { - return this.getRuleContext(0, TypeNameContext); - } - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - PublicKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.PublicKeyword); - } else { - return this.getToken(SolidityParser.PublicKeyword, i); - } - } - InternalKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.InternalKeyword); - } else { - return this.getToken(SolidityParser.InternalKeyword, i); - } - } - PrivateKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.PrivateKeyword); - } else { - return this.getToken(SolidityParser.PrivateKeyword, i); - } - } - ConstantKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.ConstantKeyword); - } else { - return this.getToken(SolidityParser.ConstantKeyword, i); - } - } - ImmutableKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.ImmutableKeyword); - } else { - return this.getToken(SolidityParser.ImmutableKeyword, i); - } - } - overrideSpecifier(i) { - if (i === void 0) { - return this.getRuleContexts(OverrideSpecifierContext); - } else { - return this.getRuleContext(i, OverrideSpecifierContext); - } - } - expression() { - return this.tryGetRuleContext(0, ExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_stateVariableDeclaration; - } - enterRule(listener) { - if (listener.enterStateVariableDeclaration) { - listener.enterStateVariableDeclaration(this); - } - } - exitRule(listener) { - if (listener.exitStateVariableDeclaration) { - listener.exitStateVariableDeclaration(this); - } - } - accept(visitor) { - if (visitor.visitStateVariableDeclaration) { - return visitor.visitStateVariableDeclaration(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var FileLevelConstantContext = class extends import_ParserRuleContext.ParserRuleContext { - typeName() { - return this.getRuleContext(0, TypeNameContext); - } - ConstantKeyword() { - return this.getToken(SolidityParser.ConstantKeyword, 0); - } - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - expression() { - return this.getRuleContext(0, ExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_fileLevelConstant; - } - enterRule(listener) { - if (listener.enterFileLevelConstant) { - listener.enterFileLevelConstant(this); - } - } - exitRule(listener) { - if (listener.exitFileLevelConstant) { - listener.exitFileLevelConstant(this); - } - } - accept(visitor) { - if (visitor.visitFileLevelConstant) { - return visitor.visitFileLevelConstant(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var CustomErrorDefinitionContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - parameterList() { - return this.getRuleContext(0, ParameterListContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_customErrorDefinition; - } - enterRule(listener) { - if (listener.enterCustomErrorDefinition) { - listener.enterCustomErrorDefinition(this); - } - } - exitRule(listener) { - if (listener.exitCustomErrorDefinition) { - listener.exitCustomErrorDefinition(this); - } - } - accept(visitor) { - if (visitor.visitCustomErrorDefinition) { - return visitor.visitCustomErrorDefinition(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var TypeDefinitionContext = class extends import_ParserRuleContext.ParserRuleContext { - TypeKeyword() { - return this.getToken(SolidityParser.TypeKeyword, 0); - } - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - elementaryTypeName() { - return this.getRuleContext(0, ElementaryTypeNameContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_typeDefinition; - } - enterRule(listener) { - if (listener.enterTypeDefinition) { - listener.enterTypeDefinition(this); - } - } - exitRule(listener) { - if (listener.exitTypeDefinition) { - listener.exitTypeDefinition(this); - } - } - accept(visitor) { - if (visitor.visitTypeDefinition) { - return visitor.visitTypeDefinition(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var UsingForDeclarationContext = class extends import_ParserRuleContext.ParserRuleContext { - userDefinedTypeName() { - return this.getRuleContext(0, UserDefinedTypeNameContext); - } - typeName() { - return this.tryGetRuleContext(0, TypeNameContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_usingForDeclaration; - } - enterRule(listener) { - if (listener.enterUsingForDeclaration) { - listener.enterUsingForDeclaration(this); - } - } - exitRule(listener) { - if (listener.exitUsingForDeclaration) { - listener.exitUsingForDeclaration(this); - } - } - accept(visitor) { - if (visitor.visitUsingForDeclaration) { - return visitor.visitUsingForDeclaration(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var StructDefinitionContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - variableDeclaration(i) { - if (i === void 0) { - return this.getRuleContexts(VariableDeclarationContext); - } else { - return this.getRuleContext(i, VariableDeclarationContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_structDefinition; - } - enterRule(listener) { - if (listener.enterStructDefinition) { - listener.enterStructDefinition(this); - } - } - exitRule(listener) { - if (listener.exitStructDefinition) { - listener.exitStructDefinition(this); - } - } - accept(visitor) { - if (visitor.visitStructDefinition) { - return visitor.visitStructDefinition(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ModifierDefinitionContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - block() { - return this.tryGetRuleContext(0, BlockContext); - } - parameterList() { - return this.tryGetRuleContext(0, ParameterListContext); - } - VirtualKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.VirtualKeyword); - } else { - return this.getToken(SolidityParser.VirtualKeyword, i); - } - } - overrideSpecifier(i) { - if (i === void 0) { - return this.getRuleContexts(OverrideSpecifierContext); - } else { - return this.getRuleContext(i, OverrideSpecifierContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_modifierDefinition; - } - enterRule(listener) { - if (listener.enterModifierDefinition) { - listener.enterModifierDefinition(this); - } - } - exitRule(listener) { - if (listener.exitModifierDefinition) { - listener.exitModifierDefinition(this); - } - } - accept(visitor) { - if (visitor.visitModifierDefinition) { - return visitor.visitModifierDefinition(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ModifierInvocationContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - expressionList() { - return this.tryGetRuleContext(0, ExpressionListContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_modifierInvocation; - } - enterRule(listener) { - if (listener.enterModifierInvocation) { - listener.enterModifierInvocation(this); - } - } - exitRule(listener) { - if (listener.exitModifierInvocation) { - listener.exitModifierInvocation(this); - } - } - accept(visitor) { - if (visitor.visitModifierInvocation) { - return visitor.visitModifierInvocation(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var FunctionDefinitionContext = class extends import_ParserRuleContext.ParserRuleContext { - functionDescriptor() { - return this.getRuleContext(0, FunctionDescriptorContext); - } - parameterList() { - return this.getRuleContext(0, ParameterListContext); - } - modifierList() { - return this.getRuleContext(0, ModifierListContext); - } - block() { - return this.tryGetRuleContext(0, BlockContext); - } - returnParameters() { - return this.tryGetRuleContext(0, ReturnParametersContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_functionDefinition; - } - enterRule(listener) { - if (listener.enterFunctionDefinition) { - listener.enterFunctionDefinition(this); - } - } - exitRule(listener) { - if (listener.exitFunctionDefinition) { - listener.exitFunctionDefinition(this); - } - } - accept(visitor) { - if (visitor.visitFunctionDefinition) { - return visitor.visitFunctionDefinition(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var FunctionDescriptorContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.tryGetRuleContext(0, IdentifierContext); - } - ConstructorKeyword() { - return this.tryGetToken(SolidityParser.ConstructorKeyword, 0); - } - FallbackKeyword() { - return this.tryGetToken(SolidityParser.FallbackKeyword, 0); - } - ReceiveKeyword() { - return this.tryGetToken(SolidityParser.ReceiveKeyword, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_functionDescriptor; - } - enterRule(listener) { - if (listener.enterFunctionDescriptor) { - listener.enterFunctionDescriptor(this); - } - } - exitRule(listener) { - if (listener.exitFunctionDescriptor) { - listener.exitFunctionDescriptor(this); - } - } - accept(visitor) { - if (visitor.visitFunctionDescriptor) { - return visitor.visitFunctionDescriptor(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ReturnParametersContext = class extends import_ParserRuleContext.ParserRuleContext { - parameterList() { - return this.getRuleContext(0, ParameterListContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_returnParameters; - } - enterRule(listener) { - if (listener.enterReturnParameters) { - listener.enterReturnParameters(this); - } - } - exitRule(listener) { - if (listener.exitReturnParameters) { - listener.exitReturnParameters(this); - } - } - accept(visitor) { - if (visitor.visitReturnParameters) { - return visitor.visitReturnParameters(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ModifierListContext = class extends import_ParserRuleContext.ParserRuleContext { - ExternalKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.ExternalKeyword); - } else { - return this.getToken(SolidityParser.ExternalKeyword, i); - } - } - PublicKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.PublicKeyword); - } else { - return this.getToken(SolidityParser.PublicKeyword, i); - } - } - InternalKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.InternalKeyword); - } else { - return this.getToken(SolidityParser.InternalKeyword, i); - } - } - PrivateKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.PrivateKeyword); - } else { - return this.getToken(SolidityParser.PrivateKeyword, i); - } - } - VirtualKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.VirtualKeyword); - } else { - return this.getToken(SolidityParser.VirtualKeyword, i); - } - } - stateMutability(i) { - if (i === void 0) { - return this.getRuleContexts(StateMutabilityContext); - } else { - return this.getRuleContext(i, StateMutabilityContext); - } - } - modifierInvocation(i) { - if (i === void 0) { - return this.getRuleContexts(ModifierInvocationContext); - } else { - return this.getRuleContext(i, ModifierInvocationContext); - } - } - overrideSpecifier(i) { - if (i === void 0) { - return this.getRuleContexts(OverrideSpecifierContext); - } else { - return this.getRuleContext(i, OverrideSpecifierContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_modifierList; - } - enterRule(listener) { - if (listener.enterModifierList) { - listener.enterModifierList(this); - } - } - exitRule(listener) { - if (listener.exitModifierList) { - listener.exitModifierList(this); - } - } - accept(visitor) { - if (visitor.visitModifierList) { - return visitor.visitModifierList(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var EventDefinitionContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - eventParameterList() { - return this.getRuleContext(0, EventParameterListContext); - } - AnonymousKeyword() { - return this.tryGetToken(SolidityParser.AnonymousKeyword, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_eventDefinition; - } - enterRule(listener) { - if (listener.enterEventDefinition) { - listener.enterEventDefinition(this); - } - } - exitRule(listener) { - if (listener.exitEventDefinition) { - listener.exitEventDefinition(this); - } - } - accept(visitor) { - if (visitor.visitEventDefinition) { - return visitor.visitEventDefinition(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var EnumValueContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_enumValue; - } - enterRule(listener) { - if (listener.enterEnumValue) { - listener.enterEnumValue(this); - } - } - exitRule(listener) { - if (listener.exitEnumValue) { - listener.exitEnumValue(this); - } - } - accept(visitor) { - if (visitor.visitEnumValue) { - return visitor.visitEnumValue(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var EnumDefinitionContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - enumValue(i) { - if (i === void 0) { - return this.getRuleContexts(EnumValueContext); - } else { - return this.getRuleContext(i, EnumValueContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_enumDefinition; - } - enterRule(listener) { - if (listener.enterEnumDefinition) { - listener.enterEnumDefinition(this); - } - } - exitRule(listener) { - if (listener.exitEnumDefinition) { - listener.exitEnumDefinition(this); - } - } - accept(visitor) { - if (visitor.visitEnumDefinition) { - return visitor.visitEnumDefinition(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ParameterListContext = class extends import_ParserRuleContext.ParserRuleContext { - parameter(i) { - if (i === void 0) { - return this.getRuleContexts(ParameterContext); - } else { - return this.getRuleContext(i, ParameterContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_parameterList; - } - enterRule(listener) { - if (listener.enterParameterList) { - listener.enterParameterList(this); - } - } - exitRule(listener) { - if (listener.exitParameterList) { - listener.exitParameterList(this); - } - } - accept(visitor) { - if (visitor.visitParameterList) { - return visitor.visitParameterList(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ParameterContext = class extends import_ParserRuleContext.ParserRuleContext { - typeName() { - return this.getRuleContext(0, TypeNameContext); - } - storageLocation() { - return this.tryGetRuleContext(0, StorageLocationContext); - } - identifier() { - return this.tryGetRuleContext(0, IdentifierContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_parameter; - } - enterRule(listener) { - if (listener.enterParameter) { - listener.enterParameter(this); - } - } - exitRule(listener) { - if (listener.exitParameter) { - listener.exitParameter(this); - } - } - accept(visitor) { - if (visitor.visitParameter) { - return visitor.visitParameter(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var EventParameterListContext = class extends import_ParserRuleContext.ParserRuleContext { - eventParameter(i) { - if (i === void 0) { - return this.getRuleContexts(EventParameterContext); - } else { - return this.getRuleContext(i, EventParameterContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_eventParameterList; - } - enterRule(listener) { - if (listener.enterEventParameterList) { - listener.enterEventParameterList(this); - } - } - exitRule(listener) { - if (listener.exitEventParameterList) { - listener.exitEventParameterList(this); - } - } - accept(visitor) { - if (visitor.visitEventParameterList) { - return visitor.visitEventParameterList(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var EventParameterContext = class extends import_ParserRuleContext.ParserRuleContext { - typeName() { - return this.getRuleContext(0, TypeNameContext); - } - IndexedKeyword() { - return this.tryGetToken(SolidityParser.IndexedKeyword, 0); - } - identifier() { - return this.tryGetRuleContext(0, IdentifierContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_eventParameter; - } - enterRule(listener) { - if (listener.enterEventParameter) { - listener.enterEventParameter(this); - } - } - exitRule(listener) { - if (listener.exitEventParameter) { - listener.exitEventParameter(this); - } - } - accept(visitor) { - if (visitor.visitEventParameter) { - return visitor.visitEventParameter(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var FunctionTypeParameterListContext = class extends import_ParserRuleContext.ParserRuleContext { - functionTypeParameter(i) { - if (i === void 0) { - return this.getRuleContexts(FunctionTypeParameterContext); - } else { - return this.getRuleContext(i, FunctionTypeParameterContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_functionTypeParameterList; - } - enterRule(listener) { - if (listener.enterFunctionTypeParameterList) { - listener.enterFunctionTypeParameterList(this); - } - } - exitRule(listener) { - if (listener.exitFunctionTypeParameterList) { - listener.exitFunctionTypeParameterList(this); - } - } - accept(visitor) { - if (visitor.visitFunctionTypeParameterList) { - return visitor.visitFunctionTypeParameterList(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var FunctionTypeParameterContext = class extends import_ParserRuleContext.ParserRuleContext { - typeName() { - return this.getRuleContext(0, TypeNameContext); - } - storageLocation() { - return this.tryGetRuleContext(0, StorageLocationContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_functionTypeParameter; - } - enterRule(listener) { - if (listener.enterFunctionTypeParameter) { - listener.enterFunctionTypeParameter(this); - } - } - exitRule(listener) { - if (listener.exitFunctionTypeParameter) { - listener.exitFunctionTypeParameter(this); - } - } - accept(visitor) { - if (visitor.visitFunctionTypeParameter) { - return visitor.visitFunctionTypeParameter(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var VariableDeclarationContext = class extends import_ParserRuleContext.ParserRuleContext { - typeName() { - return this.getRuleContext(0, TypeNameContext); - } - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - storageLocation() { - return this.tryGetRuleContext(0, StorageLocationContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_variableDeclaration; - } - enterRule(listener) { - if (listener.enterVariableDeclaration) { - listener.enterVariableDeclaration(this); - } - } - exitRule(listener) { - if (listener.exitVariableDeclaration) { - listener.exitVariableDeclaration(this); - } - } - accept(visitor) { - if (visitor.visitVariableDeclaration) { - return visitor.visitVariableDeclaration(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var TypeNameContext = class extends import_ParserRuleContext.ParserRuleContext { - elementaryTypeName() { - return this.tryGetRuleContext(0, ElementaryTypeNameContext); - } - userDefinedTypeName() { - return this.tryGetRuleContext(0, UserDefinedTypeNameContext); - } - mapping() { - return this.tryGetRuleContext(0, MappingContext); - } - typeName() { - return this.tryGetRuleContext(0, TypeNameContext); - } - expression() { - return this.tryGetRuleContext(0, ExpressionContext); - } - functionTypeName() { - return this.tryGetRuleContext(0, FunctionTypeNameContext); - } - PayableKeyword() { - return this.tryGetToken(SolidityParser.PayableKeyword, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_typeName; - } - enterRule(listener) { - if (listener.enterTypeName) { - listener.enterTypeName(this); - } - } - exitRule(listener) { - if (listener.exitTypeName) { - listener.exitTypeName(this); - } - } - accept(visitor) { - if (visitor.visitTypeName) { - return visitor.visitTypeName(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var UserDefinedTypeNameContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier(i) { - if (i === void 0) { - return this.getRuleContexts(IdentifierContext); - } else { - return this.getRuleContext(i, IdentifierContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_userDefinedTypeName; - } - enterRule(listener) { - if (listener.enterUserDefinedTypeName) { - listener.enterUserDefinedTypeName(this); - } - } - exitRule(listener) { - if (listener.exitUserDefinedTypeName) { - listener.exitUserDefinedTypeName(this); - } - } - accept(visitor) { - if (visitor.visitUserDefinedTypeName) { - return visitor.visitUserDefinedTypeName(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var MappingKeyContext = class extends import_ParserRuleContext.ParserRuleContext { - elementaryTypeName() { - return this.tryGetRuleContext(0, ElementaryTypeNameContext); - } - userDefinedTypeName() { - return this.tryGetRuleContext(0, UserDefinedTypeNameContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_mappingKey; - } - enterRule(listener) { - if (listener.enterMappingKey) { - listener.enterMappingKey(this); - } - } - exitRule(listener) { - if (listener.exitMappingKey) { - listener.exitMappingKey(this); - } - } - accept(visitor) { - if (visitor.visitMappingKey) { - return visitor.visitMappingKey(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var MappingContext = class extends import_ParserRuleContext.ParserRuleContext { - mappingKey() { - return this.getRuleContext(0, MappingKeyContext); - } - typeName() { - return this.getRuleContext(0, TypeNameContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_mapping; - } - enterRule(listener) { - if (listener.enterMapping) { - listener.enterMapping(this); - } - } - exitRule(listener) { - if (listener.exitMapping) { - listener.exitMapping(this); - } - } - accept(visitor) { - if (visitor.visitMapping) { - return visitor.visitMapping(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var FunctionTypeNameContext = class extends import_ParserRuleContext.ParserRuleContext { - functionTypeParameterList(i) { - if (i === void 0) { - return this.getRuleContexts(FunctionTypeParameterListContext); - } else { - return this.getRuleContext(i, FunctionTypeParameterListContext); - } - } - InternalKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.InternalKeyword); - } else { - return this.getToken(SolidityParser.InternalKeyword, i); - } - } - ExternalKeyword(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.ExternalKeyword); - } else { - return this.getToken(SolidityParser.ExternalKeyword, i); - } - } - stateMutability(i) { - if (i === void 0) { - return this.getRuleContexts(StateMutabilityContext); - } else { - return this.getRuleContext(i, StateMutabilityContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_functionTypeName; - } - enterRule(listener) { - if (listener.enterFunctionTypeName) { - listener.enterFunctionTypeName(this); - } - } - exitRule(listener) { - if (listener.exitFunctionTypeName) { - listener.exitFunctionTypeName(this); - } - } - accept(visitor) { - if (visitor.visitFunctionTypeName) { - return visitor.visitFunctionTypeName(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var StorageLocationContext = class extends import_ParserRuleContext.ParserRuleContext { - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_storageLocation; - } - enterRule(listener) { - if (listener.enterStorageLocation) { - listener.enterStorageLocation(this); - } - } - exitRule(listener) { - if (listener.exitStorageLocation) { - listener.exitStorageLocation(this); - } - } - accept(visitor) { - if (visitor.visitStorageLocation) { - return visitor.visitStorageLocation(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var StateMutabilityContext = class extends import_ParserRuleContext.ParserRuleContext { - PureKeyword() { - return this.tryGetToken(SolidityParser.PureKeyword, 0); - } - ConstantKeyword() { - return this.tryGetToken(SolidityParser.ConstantKeyword, 0); - } - ViewKeyword() { - return this.tryGetToken(SolidityParser.ViewKeyword, 0); - } - PayableKeyword() { - return this.tryGetToken(SolidityParser.PayableKeyword, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_stateMutability; - } - enterRule(listener) { - if (listener.enterStateMutability) { - listener.enterStateMutability(this); - } - } - exitRule(listener) { - if (listener.exitStateMutability) { - listener.exitStateMutability(this); - } - } - accept(visitor) { - if (visitor.visitStateMutability) { - return visitor.visitStateMutability(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var BlockContext = class extends import_ParserRuleContext.ParserRuleContext { - statement(i) { - if (i === void 0) { - return this.getRuleContexts(StatementContext); - } else { - return this.getRuleContext(i, StatementContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_block; - } - enterRule(listener) { - if (listener.enterBlock) { - listener.enterBlock(this); - } - } - exitRule(listener) { - if (listener.exitBlock) { - listener.exitBlock(this); - } - } - accept(visitor) { - if (visitor.visitBlock) { - return visitor.visitBlock(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var StatementContext = class extends import_ParserRuleContext.ParserRuleContext { - ifStatement() { - return this.tryGetRuleContext(0, IfStatementContext); - } - tryStatement() { - return this.tryGetRuleContext(0, TryStatementContext); - } - whileStatement() { - return this.tryGetRuleContext(0, WhileStatementContext); - } - forStatement() { - return this.tryGetRuleContext(0, ForStatementContext); - } - block() { - return this.tryGetRuleContext(0, BlockContext); - } - inlineAssemblyStatement() { - return this.tryGetRuleContext(0, InlineAssemblyStatementContext); - } - doWhileStatement() { - return this.tryGetRuleContext(0, DoWhileStatementContext); - } - continueStatement() { - return this.tryGetRuleContext(0, ContinueStatementContext); - } - breakStatement() { - return this.tryGetRuleContext(0, BreakStatementContext); - } - returnStatement() { - return this.tryGetRuleContext(0, ReturnStatementContext); - } - throwStatement() { - return this.tryGetRuleContext(0, ThrowStatementContext); - } - emitStatement() { - return this.tryGetRuleContext(0, EmitStatementContext); - } - simpleStatement() { - return this.tryGetRuleContext(0, SimpleStatementContext); - } - uncheckedStatement() { - return this.tryGetRuleContext(0, UncheckedStatementContext); - } - revertStatement() { - return this.tryGetRuleContext(0, RevertStatementContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_statement; - } - enterRule(listener) { - if (listener.enterStatement) { - listener.enterStatement(this); - } - } - exitRule(listener) { - if (listener.exitStatement) { - listener.exitStatement(this); - } - } - accept(visitor) { - if (visitor.visitStatement) { - return visitor.visitStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ExpressionStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - expression() { - return this.getRuleContext(0, ExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_expressionStatement; - } - enterRule(listener) { - if (listener.enterExpressionStatement) { - listener.enterExpressionStatement(this); - } - } - exitRule(listener) { - if (listener.exitExpressionStatement) { - listener.exitExpressionStatement(this); - } - } - accept(visitor) { - if (visitor.visitExpressionStatement) { - return visitor.visitExpressionStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var IfStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - expression() { - return this.getRuleContext(0, ExpressionContext); - } - statement(i) { - if (i === void 0) { - return this.getRuleContexts(StatementContext); - } else { - return this.getRuleContext(i, StatementContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_ifStatement; - } - enterRule(listener) { - if (listener.enterIfStatement) { - listener.enterIfStatement(this); - } - } - exitRule(listener) { - if (listener.exitIfStatement) { - listener.exitIfStatement(this); - } - } - accept(visitor) { - if (visitor.visitIfStatement) { - return visitor.visitIfStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var TryStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - expression() { - return this.getRuleContext(0, ExpressionContext); - } - block() { - return this.getRuleContext(0, BlockContext); - } - returnParameters() { - return this.tryGetRuleContext(0, ReturnParametersContext); - } - catchClause(i) { - if (i === void 0) { - return this.getRuleContexts(CatchClauseContext); - } else { - return this.getRuleContext(i, CatchClauseContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_tryStatement; - } - enterRule(listener) { - if (listener.enterTryStatement) { - listener.enterTryStatement(this); - } - } - exitRule(listener) { - if (listener.exitTryStatement) { - listener.exitTryStatement(this); - } - } - accept(visitor) { - if (visitor.visitTryStatement) { - return visitor.visitTryStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var CatchClauseContext = class extends import_ParserRuleContext.ParserRuleContext { - block() { - return this.getRuleContext(0, BlockContext); - } - parameterList() { - return this.tryGetRuleContext(0, ParameterListContext); - } - identifier() { - return this.tryGetRuleContext(0, IdentifierContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_catchClause; - } - enterRule(listener) { - if (listener.enterCatchClause) { - listener.enterCatchClause(this); - } - } - exitRule(listener) { - if (listener.exitCatchClause) { - listener.exitCatchClause(this); - } - } - accept(visitor) { - if (visitor.visitCatchClause) { - return visitor.visitCatchClause(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var WhileStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - expression() { - return this.getRuleContext(0, ExpressionContext); - } - statement() { - return this.getRuleContext(0, StatementContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_whileStatement; - } - enterRule(listener) { - if (listener.enterWhileStatement) { - listener.enterWhileStatement(this); - } - } - exitRule(listener) { - if (listener.exitWhileStatement) { - listener.exitWhileStatement(this); - } - } - accept(visitor) { - if (visitor.visitWhileStatement) { - return visitor.visitWhileStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var SimpleStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - variableDeclarationStatement() { - return this.tryGetRuleContext(0, VariableDeclarationStatementContext); - } - expressionStatement() { - return this.tryGetRuleContext(0, ExpressionStatementContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_simpleStatement; - } - enterRule(listener) { - if (listener.enterSimpleStatement) { - listener.enterSimpleStatement(this); - } - } - exitRule(listener) { - if (listener.exitSimpleStatement) { - listener.exitSimpleStatement(this); - } - } - accept(visitor) { - if (visitor.visitSimpleStatement) { - return visitor.visitSimpleStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var UncheckedStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - block() { - return this.getRuleContext(0, BlockContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_uncheckedStatement; - } - enterRule(listener) { - if (listener.enterUncheckedStatement) { - listener.enterUncheckedStatement(this); - } - } - exitRule(listener) { - if (listener.exitUncheckedStatement) { - listener.exitUncheckedStatement(this); - } - } - accept(visitor) { - if (visitor.visitUncheckedStatement) { - return visitor.visitUncheckedStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ForStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - statement() { - return this.getRuleContext(0, StatementContext); - } - simpleStatement() { - return this.tryGetRuleContext(0, SimpleStatementContext); - } - expressionStatement() { - return this.tryGetRuleContext(0, ExpressionStatementContext); - } - expression() { - return this.tryGetRuleContext(0, ExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_forStatement; - } - enterRule(listener) { - if (listener.enterForStatement) { - listener.enterForStatement(this); - } - } - exitRule(listener) { - if (listener.exitForStatement) { - listener.exitForStatement(this); - } - } - accept(visitor) { - if (visitor.visitForStatement) { - return visitor.visitForStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var InlineAssemblyStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - assemblyBlock() { - return this.getRuleContext(0, AssemblyBlockContext); - } - StringLiteralFragment() { - return this.tryGetToken(SolidityParser.StringLiteralFragment, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_inlineAssemblyStatement; - } - enterRule(listener) { - if (listener.enterInlineAssemblyStatement) { - listener.enterInlineAssemblyStatement(this); - } - } - exitRule(listener) { - if (listener.exitInlineAssemblyStatement) { - listener.exitInlineAssemblyStatement(this); - } - } - accept(visitor) { - if (visitor.visitInlineAssemblyStatement) { - return visitor.visitInlineAssemblyStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var DoWhileStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - statement() { - return this.getRuleContext(0, StatementContext); - } - expression() { - return this.getRuleContext(0, ExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_doWhileStatement; - } - enterRule(listener) { - if (listener.enterDoWhileStatement) { - listener.enterDoWhileStatement(this); - } - } - exitRule(listener) { - if (listener.exitDoWhileStatement) { - listener.exitDoWhileStatement(this); - } - } - accept(visitor) { - if (visitor.visitDoWhileStatement) { - return visitor.visitDoWhileStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ContinueStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - ContinueKeyword() { - return this.getToken(SolidityParser.ContinueKeyword, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_continueStatement; - } - enterRule(listener) { - if (listener.enterContinueStatement) { - listener.enterContinueStatement(this); - } - } - exitRule(listener) { - if (listener.exitContinueStatement) { - listener.exitContinueStatement(this); - } - } - accept(visitor) { - if (visitor.visitContinueStatement) { - return visitor.visitContinueStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var BreakStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - BreakKeyword() { - return this.getToken(SolidityParser.BreakKeyword, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_breakStatement; - } - enterRule(listener) { - if (listener.enterBreakStatement) { - listener.enterBreakStatement(this); - } - } - exitRule(listener) { - if (listener.exitBreakStatement) { - listener.exitBreakStatement(this); - } - } - accept(visitor) { - if (visitor.visitBreakStatement) { - return visitor.visitBreakStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ReturnStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - expression() { - return this.tryGetRuleContext(0, ExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_returnStatement; - } - enterRule(listener) { - if (listener.enterReturnStatement) { - listener.enterReturnStatement(this); - } - } - exitRule(listener) { - if (listener.exitReturnStatement) { - listener.exitReturnStatement(this); - } - } - accept(visitor) { - if (visitor.visitReturnStatement) { - return visitor.visitReturnStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ThrowStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_throwStatement; - } - enterRule(listener) { - if (listener.enterThrowStatement) { - listener.enterThrowStatement(this); - } - } - exitRule(listener) { - if (listener.exitThrowStatement) { - listener.exitThrowStatement(this); - } - } - accept(visitor) { - if (visitor.visitThrowStatement) { - return visitor.visitThrowStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var EmitStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - functionCall() { - return this.getRuleContext(0, FunctionCallContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_emitStatement; - } - enterRule(listener) { - if (listener.enterEmitStatement) { - listener.enterEmitStatement(this); - } - } - exitRule(listener) { - if (listener.exitEmitStatement) { - listener.exitEmitStatement(this); - } - } - accept(visitor) { - if (visitor.visitEmitStatement) { - return visitor.visitEmitStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var RevertStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - functionCall() { - return this.getRuleContext(0, FunctionCallContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_revertStatement; - } - enterRule(listener) { - if (listener.enterRevertStatement) { - listener.enterRevertStatement(this); - } - } - exitRule(listener) { - if (listener.exitRevertStatement) { - listener.exitRevertStatement(this); - } - } - accept(visitor) { - if (visitor.visitRevertStatement) { - return visitor.visitRevertStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var VariableDeclarationStatementContext = class extends import_ParserRuleContext.ParserRuleContext { - identifierList() { - return this.tryGetRuleContext(0, IdentifierListContext); - } - variableDeclaration() { - return this.tryGetRuleContext(0, VariableDeclarationContext); - } - variableDeclarationList() { - return this.tryGetRuleContext(0, VariableDeclarationListContext); - } - expression() { - return this.tryGetRuleContext(0, ExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_variableDeclarationStatement; - } - enterRule(listener) { - if (listener.enterVariableDeclarationStatement) { - listener.enterVariableDeclarationStatement(this); - } - } - exitRule(listener) { - if (listener.exitVariableDeclarationStatement) { - listener.exitVariableDeclarationStatement(this); - } - } - accept(visitor) { - if (visitor.visitVariableDeclarationStatement) { - return visitor.visitVariableDeclarationStatement(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var VariableDeclarationListContext = class extends import_ParserRuleContext.ParserRuleContext { - variableDeclaration(i) { - if (i === void 0) { - return this.getRuleContexts(VariableDeclarationContext); - } else { - return this.getRuleContext(i, VariableDeclarationContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_variableDeclarationList; - } - enterRule(listener) { - if (listener.enterVariableDeclarationList) { - listener.enterVariableDeclarationList(this); - } - } - exitRule(listener) { - if (listener.exitVariableDeclarationList) { - listener.exitVariableDeclarationList(this); - } - } - accept(visitor) { - if (visitor.visitVariableDeclarationList) { - return visitor.visitVariableDeclarationList(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var IdentifierListContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier(i) { - if (i === void 0) { - return this.getRuleContexts(IdentifierContext); - } else { - return this.getRuleContext(i, IdentifierContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_identifierList; - } - enterRule(listener) { - if (listener.enterIdentifierList) { - listener.enterIdentifierList(this); - } - } - exitRule(listener) { - if (listener.exitIdentifierList) { - listener.exitIdentifierList(this); - } - } - accept(visitor) { - if (visitor.visitIdentifierList) { - return visitor.visitIdentifierList(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ElementaryTypeNameContext = class extends import_ParserRuleContext.ParserRuleContext { - Int() { - return this.tryGetToken(SolidityParser.Int, 0); - } - Uint() { - return this.tryGetToken(SolidityParser.Uint, 0); - } - Byte() { - return this.tryGetToken(SolidityParser.Byte, 0); - } - Fixed() { - return this.tryGetToken(SolidityParser.Fixed, 0); - } - Ufixed() { - return this.tryGetToken(SolidityParser.Ufixed, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_elementaryTypeName; - } - enterRule(listener) { - if (listener.enterElementaryTypeName) { - listener.enterElementaryTypeName(this); - } - } - exitRule(listener) { - if (listener.exitElementaryTypeName) { - listener.exitElementaryTypeName(this); - } - } - accept(visitor) { - if (visitor.visitElementaryTypeName) { - return visitor.visitElementaryTypeName(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ExpressionContext = class extends import_ParserRuleContext.ParserRuleContext { - expression(i) { - if (i === void 0) { - return this.getRuleContexts(ExpressionContext); - } else { - return this.getRuleContext(i, ExpressionContext); - } - } - typeName() { - return this.tryGetRuleContext(0, TypeNameContext); - } - identifier() { - return this.tryGetRuleContext(0, IdentifierContext); - } - nameValueList() { - return this.tryGetRuleContext(0, NameValueListContext); - } - functionCallArguments() { - return this.tryGetRuleContext(0, FunctionCallArgumentsContext); - } - primaryExpression() { - return this.tryGetRuleContext(0, PrimaryExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_expression; - } - enterRule(listener) { - if (listener.enterExpression) { - listener.enterExpression(this); - } - } - exitRule(listener) { - if (listener.exitExpression) { - listener.exitExpression(this); - } - } - accept(visitor) { - if (visitor.visitExpression) { - return visitor.visitExpression(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var PrimaryExpressionContext = class extends import_ParserRuleContext.ParserRuleContext { - BooleanLiteral() { - return this.tryGetToken(SolidityParser.BooleanLiteral, 0); - } - numberLiteral() { - return this.tryGetRuleContext(0, NumberLiteralContext); - } - hexLiteral() { - return this.tryGetRuleContext(0, HexLiteralContext); - } - stringLiteral() { - return this.tryGetRuleContext(0, StringLiteralContext); - } - identifier() { - return this.tryGetRuleContext(0, IdentifierContext); - } - TypeKeyword() { - return this.tryGetToken(SolidityParser.TypeKeyword, 0); - } - PayableKeyword() { - return this.tryGetToken(SolidityParser.PayableKeyword, 0); - } - tupleExpression() { - return this.tryGetRuleContext(0, TupleExpressionContext); - } - typeNameExpression() { - return this.tryGetRuleContext(0, TypeNameExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_primaryExpression; - } - enterRule(listener) { - if (listener.enterPrimaryExpression) { - listener.enterPrimaryExpression(this); - } - } - exitRule(listener) { - if (listener.exitPrimaryExpression) { - listener.exitPrimaryExpression(this); - } - } - accept(visitor) { - if (visitor.visitPrimaryExpression) { - return visitor.visitPrimaryExpression(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var ExpressionListContext = class extends import_ParserRuleContext.ParserRuleContext { - expression(i) { - if (i === void 0) { - return this.getRuleContexts(ExpressionContext); - } else { - return this.getRuleContext(i, ExpressionContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_expressionList; - } - enterRule(listener) { - if (listener.enterExpressionList) { - listener.enterExpressionList(this); - } - } - exitRule(listener) { - if (listener.exitExpressionList) { - listener.exitExpressionList(this); - } - } - accept(visitor) { - if (visitor.visitExpressionList) { - return visitor.visitExpressionList(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var NameValueListContext = class extends import_ParserRuleContext.ParserRuleContext { - nameValue(i) { - if (i === void 0) { - return this.getRuleContexts(NameValueContext); - } else { - return this.getRuleContext(i, NameValueContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_nameValueList; - } - enterRule(listener) { - if (listener.enterNameValueList) { - listener.enterNameValueList(this); - } - } - exitRule(listener) { - if (listener.exitNameValueList) { - listener.exitNameValueList(this); - } - } - accept(visitor) { - if (visitor.visitNameValueList) { - return visitor.visitNameValueList(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var NameValueContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - expression() { - return this.getRuleContext(0, ExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_nameValue; - } - enterRule(listener) { - if (listener.enterNameValue) { - listener.enterNameValue(this); - } - } - exitRule(listener) { - if (listener.exitNameValue) { - listener.exitNameValue(this); - } - } - accept(visitor) { - if (visitor.visitNameValue) { - return visitor.visitNameValue(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var FunctionCallArgumentsContext = class extends import_ParserRuleContext.ParserRuleContext { - nameValueList() { - return this.tryGetRuleContext(0, NameValueListContext); - } - expressionList() { - return this.tryGetRuleContext(0, ExpressionListContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_functionCallArguments; - } - enterRule(listener) { - if (listener.enterFunctionCallArguments) { - listener.enterFunctionCallArguments(this); - } - } - exitRule(listener) { - if (listener.exitFunctionCallArguments) { - listener.exitFunctionCallArguments(this); - } - } - accept(visitor) { - if (visitor.visitFunctionCallArguments) { - return visitor.visitFunctionCallArguments(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var FunctionCallContext = class extends import_ParserRuleContext.ParserRuleContext { - expression() { - return this.getRuleContext(0, ExpressionContext); - } - functionCallArguments() { - return this.getRuleContext(0, FunctionCallArgumentsContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_functionCall; - } - enterRule(listener) { - if (listener.enterFunctionCall) { - listener.enterFunctionCall(this); - } - } - exitRule(listener) { - if (listener.exitFunctionCall) { - listener.exitFunctionCall(this); - } - } - accept(visitor) { - if (visitor.visitFunctionCall) { - return visitor.visitFunctionCall(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyBlockContext = class extends import_ParserRuleContext.ParserRuleContext { - assemblyItem(i) { - if (i === void 0) { - return this.getRuleContexts(AssemblyItemContext); - } else { - return this.getRuleContext(i, AssemblyItemContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyBlock; - } - enterRule(listener) { - if (listener.enterAssemblyBlock) { - listener.enterAssemblyBlock(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyBlock) { - listener.exitAssemblyBlock(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyBlock) { - return visitor.visitAssemblyBlock(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyItemContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.tryGetRuleContext(0, IdentifierContext); - } - assemblyBlock() { - return this.tryGetRuleContext(0, AssemblyBlockContext); - } - assemblyExpression() { - return this.tryGetRuleContext(0, AssemblyExpressionContext); - } - assemblyLocalDefinition() { - return this.tryGetRuleContext(0, AssemblyLocalDefinitionContext); - } - assemblyAssignment() { - return this.tryGetRuleContext(0, AssemblyAssignmentContext); - } - assemblyStackAssignment() { - return this.tryGetRuleContext(0, AssemblyStackAssignmentContext); - } - labelDefinition() { - return this.tryGetRuleContext(0, LabelDefinitionContext); - } - assemblySwitch() { - return this.tryGetRuleContext(0, AssemblySwitchContext); - } - assemblyFunctionDefinition() { - return this.tryGetRuleContext(0, AssemblyFunctionDefinitionContext); - } - assemblyFor() { - return this.tryGetRuleContext(0, AssemblyForContext); - } - assemblyIf() { - return this.tryGetRuleContext(0, AssemblyIfContext); - } - BreakKeyword() { - return this.tryGetToken(SolidityParser.BreakKeyword, 0); - } - ContinueKeyword() { - return this.tryGetToken(SolidityParser.ContinueKeyword, 0); - } - LeaveKeyword() { - return this.tryGetToken(SolidityParser.LeaveKeyword, 0); - } - subAssembly() { - return this.tryGetRuleContext(0, SubAssemblyContext); - } - numberLiteral() { - return this.tryGetRuleContext(0, NumberLiteralContext); - } - stringLiteral() { - return this.tryGetRuleContext(0, StringLiteralContext); - } - hexLiteral() { - return this.tryGetRuleContext(0, HexLiteralContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyItem; - } - enterRule(listener) { - if (listener.enterAssemblyItem) { - listener.enterAssemblyItem(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyItem) { - listener.exitAssemblyItem(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyItem) { - return visitor.visitAssemblyItem(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyExpressionContext = class extends import_ParserRuleContext.ParserRuleContext { - assemblyCall() { - return this.tryGetRuleContext(0, AssemblyCallContext); - } - assemblyLiteral() { - return this.tryGetRuleContext(0, AssemblyLiteralContext); - } - assemblyMember() { - return this.tryGetRuleContext(0, AssemblyMemberContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyExpression; - } - enterRule(listener) { - if (listener.enterAssemblyExpression) { - listener.enterAssemblyExpression(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyExpression) { - listener.exitAssemblyExpression(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyExpression) { - return visitor.visitAssemblyExpression(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyMemberContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier(i) { - if (i === void 0) { - return this.getRuleContexts(IdentifierContext); - } else { - return this.getRuleContext(i, IdentifierContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyMember; - } - enterRule(listener) { - if (listener.enterAssemblyMember) { - listener.enterAssemblyMember(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyMember) { - listener.exitAssemblyMember(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyMember) { - return visitor.visitAssemblyMember(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyCallContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.tryGetRuleContext(0, IdentifierContext); - } - assemblyExpression(i) { - if (i === void 0) { - return this.getRuleContexts(AssemblyExpressionContext); - } else { - return this.getRuleContext(i, AssemblyExpressionContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyCall; - } - enterRule(listener) { - if (listener.enterAssemblyCall) { - listener.enterAssemblyCall(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyCall) { - listener.exitAssemblyCall(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyCall) { - return visitor.visitAssemblyCall(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyLocalDefinitionContext = class extends import_ParserRuleContext.ParserRuleContext { - assemblyIdentifierOrList() { - return this.getRuleContext(0, AssemblyIdentifierOrListContext); - } - assemblyExpression() { - return this.tryGetRuleContext(0, AssemblyExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyLocalDefinition; - } - enterRule(listener) { - if (listener.enterAssemblyLocalDefinition) { - listener.enterAssemblyLocalDefinition(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyLocalDefinition) { - listener.exitAssemblyLocalDefinition(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyLocalDefinition) { - return visitor.visitAssemblyLocalDefinition(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyAssignmentContext = class extends import_ParserRuleContext.ParserRuleContext { - assemblyIdentifierOrList() { - return this.getRuleContext(0, AssemblyIdentifierOrListContext); - } - assemblyExpression() { - return this.getRuleContext(0, AssemblyExpressionContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyAssignment; - } - enterRule(listener) { - if (listener.enterAssemblyAssignment) { - listener.enterAssemblyAssignment(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyAssignment) { - listener.exitAssemblyAssignment(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyAssignment) { - return visitor.visitAssemblyAssignment(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyIdentifierOrListContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.tryGetRuleContext(0, IdentifierContext); - } - assemblyMember() { - return this.tryGetRuleContext(0, AssemblyMemberContext); - } - assemblyIdentifierList() { - return this.tryGetRuleContext(0, AssemblyIdentifierListContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyIdentifierOrList; - } - enterRule(listener) { - if (listener.enterAssemblyIdentifierOrList) { - listener.enterAssemblyIdentifierOrList(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyIdentifierOrList) { - listener.exitAssemblyIdentifierOrList(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyIdentifierOrList) { - return visitor.visitAssemblyIdentifierOrList(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyIdentifierListContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier(i) { - if (i === void 0) { - return this.getRuleContexts(IdentifierContext); - } else { - return this.getRuleContext(i, IdentifierContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyIdentifierList; - } - enterRule(listener) { - if (listener.enterAssemblyIdentifierList) { - listener.enterAssemblyIdentifierList(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyIdentifierList) { - listener.exitAssemblyIdentifierList(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyIdentifierList) { - return visitor.visitAssemblyIdentifierList(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyStackAssignmentContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyStackAssignment; - } - enterRule(listener) { - if (listener.enterAssemblyStackAssignment) { - listener.enterAssemblyStackAssignment(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyStackAssignment) { - listener.exitAssemblyStackAssignment(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyStackAssignment) { - return visitor.visitAssemblyStackAssignment(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var LabelDefinitionContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_labelDefinition; - } - enterRule(listener) { - if (listener.enterLabelDefinition) { - listener.enterLabelDefinition(this); - } - } - exitRule(listener) { - if (listener.exitLabelDefinition) { - listener.exitLabelDefinition(this); - } - } - accept(visitor) { - if (visitor.visitLabelDefinition) { - return visitor.visitLabelDefinition(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblySwitchContext = class extends import_ParserRuleContext.ParserRuleContext { - assemblyExpression() { - return this.getRuleContext(0, AssemblyExpressionContext); - } - assemblyCase(i) { - if (i === void 0) { - return this.getRuleContexts(AssemblyCaseContext); - } else { - return this.getRuleContext(i, AssemblyCaseContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblySwitch; - } - enterRule(listener) { - if (listener.enterAssemblySwitch) { - listener.enterAssemblySwitch(this); - } - } - exitRule(listener) { - if (listener.exitAssemblySwitch) { - listener.exitAssemblySwitch(this); - } - } - accept(visitor) { - if (visitor.visitAssemblySwitch) { - return visitor.visitAssemblySwitch(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyCaseContext = class extends import_ParserRuleContext.ParserRuleContext { - assemblyLiteral() { - return this.tryGetRuleContext(0, AssemblyLiteralContext); - } - assemblyBlock() { - return this.getRuleContext(0, AssemblyBlockContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyCase; - } - enterRule(listener) { - if (listener.enterAssemblyCase) { - listener.enterAssemblyCase(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyCase) { - listener.exitAssemblyCase(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyCase) { - return visitor.visitAssemblyCase(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyFunctionDefinitionContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - assemblyBlock() { - return this.getRuleContext(0, AssemblyBlockContext); - } - assemblyIdentifierList() { - return this.tryGetRuleContext(0, AssemblyIdentifierListContext); - } - assemblyFunctionReturns() { - return this.tryGetRuleContext(0, AssemblyFunctionReturnsContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyFunctionDefinition; - } - enterRule(listener) { - if (listener.enterAssemblyFunctionDefinition) { - listener.enterAssemblyFunctionDefinition(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyFunctionDefinition) { - listener.exitAssemblyFunctionDefinition(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyFunctionDefinition) { - return visitor.visitAssemblyFunctionDefinition(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyFunctionReturnsContext = class extends import_ParserRuleContext.ParserRuleContext { - assemblyIdentifierList() { - return this.tryGetRuleContext(0, AssemblyIdentifierListContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyFunctionReturns; - } - enterRule(listener) { - if (listener.enterAssemblyFunctionReturns) { - listener.enterAssemblyFunctionReturns(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyFunctionReturns) { - listener.exitAssemblyFunctionReturns(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyFunctionReturns) { - return visitor.visitAssemblyFunctionReturns(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyForContext = class extends import_ParserRuleContext.ParserRuleContext { - assemblyExpression(i) { - if (i === void 0) { - return this.getRuleContexts(AssemblyExpressionContext); - } else { - return this.getRuleContext(i, AssemblyExpressionContext); - } - } - assemblyBlock(i) { - if (i === void 0) { - return this.getRuleContexts(AssemblyBlockContext); - } else { - return this.getRuleContext(i, AssemblyBlockContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyFor; - } - enterRule(listener) { - if (listener.enterAssemblyFor) { - listener.enterAssemblyFor(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyFor) { - listener.exitAssemblyFor(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyFor) { - return visitor.visitAssemblyFor(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyIfContext = class extends import_ParserRuleContext.ParserRuleContext { - assemblyExpression() { - return this.getRuleContext(0, AssemblyExpressionContext); - } - assemblyBlock() { - return this.getRuleContext(0, AssemblyBlockContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyIf; - } - enterRule(listener) { - if (listener.enterAssemblyIf) { - listener.enterAssemblyIf(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyIf) { - listener.exitAssemblyIf(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyIf) { - return visitor.visitAssemblyIf(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var AssemblyLiteralContext = class extends import_ParserRuleContext.ParserRuleContext { - stringLiteral() { - return this.tryGetRuleContext(0, StringLiteralContext); - } - DecimalNumber() { - return this.tryGetToken(SolidityParser.DecimalNumber, 0); - } - HexNumber() { - return this.tryGetToken(SolidityParser.HexNumber, 0); - } - hexLiteral() { - return this.tryGetRuleContext(0, HexLiteralContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_assemblyLiteral; - } - enterRule(listener) { - if (listener.enterAssemblyLiteral) { - listener.enterAssemblyLiteral(this); - } - } - exitRule(listener) { - if (listener.exitAssemblyLiteral) { - listener.exitAssemblyLiteral(this); - } - } - accept(visitor) { - if (visitor.visitAssemblyLiteral) { - return visitor.visitAssemblyLiteral(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var SubAssemblyContext = class extends import_ParserRuleContext.ParserRuleContext { - identifier() { - return this.getRuleContext(0, IdentifierContext); - } - assemblyBlock() { - return this.getRuleContext(0, AssemblyBlockContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_subAssembly; - } - enterRule(listener) { - if (listener.enterSubAssembly) { - listener.enterSubAssembly(this); - } - } - exitRule(listener) { - if (listener.exitSubAssembly) { - listener.exitSubAssembly(this); - } - } - accept(visitor) { - if (visitor.visitSubAssembly) { - return visitor.visitSubAssembly(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var TupleExpressionContext = class extends import_ParserRuleContext.ParserRuleContext { - expression(i) { - if (i === void 0) { - return this.getRuleContexts(ExpressionContext); - } else { - return this.getRuleContext(i, ExpressionContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_tupleExpression; - } - enterRule(listener) { - if (listener.enterTupleExpression) { - listener.enterTupleExpression(this); - } - } - exitRule(listener) { - if (listener.exitTupleExpression) { - listener.exitTupleExpression(this); - } - } - accept(visitor) { - if (visitor.visitTupleExpression) { - return visitor.visitTupleExpression(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var TypeNameExpressionContext = class extends import_ParserRuleContext.ParserRuleContext { - elementaryTypeName() { - return this.tryGetRuleContext(0, ElementaryTypeNameContext); - } - userDefinedTypeName() { - return this.tryGetRuleContext(0, UserDefinedTypeNameContext); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_typeNameExpression; - } - enterRule(listener) { - if (listener.enterTypeNameExpression) { - listener.enterTypeNameExpression(this); - } - } - exitRule(listener) { - if (listener.exitTypeNameExpression) { - listener.exitTypeNameExpression(this); - } - } - accept(visitor) { - if (visitor.visitTypeNameExpression) { - return visitor.visitTypeNameExpression(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var NumberLiteralContext = class extends import_ParserRuleContext.ParserRuleContext { - DecimalNumber() { - return this.tryGetToken(SolidityParser.DecimalNumber, 0); - } - HexNumber() { - return this.tryGetToken(SolidityParser.HexNumber, 0); - } - NumberUnit() { - return this.tryGetToken(SolidityParser.NumberUnit, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_numberLiteral; - } - enterRule(listener) { - if (listener.enterNumberLiteral) { - listener.enterNumberLiteral(this); - } - } - exitRule(listener) { - if (listener.exitNumberLiteral) { - listener.exitNumberLiteral(this); - } - } - accept(visitor) { - if (visitor.visitNumberLiteral) { - return visitor.visitNumberLiteral(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var IdentifierContext = class extends import_ParserRuleContext.ParserRuleContext { - ReceiveKeyword() { - return this.tryGetToken(SolidityParser.ReceiveKeyword, 0); - } - ConstructorKeyword() { - return this.tryGetToken(SolidityParser.ConstructorKeyword, 0); - } - PayableKeyword() { - return this.tryGetToken(SolidityParser.PayableKeyword, 0); - } - LeaveKeyword() { - return this.tryGetToken(SolidityParser.LeaveKeyword, 0); - } - Identifier() { - return this.tryGetToken(SolidityParser.Identifier, 0); - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_identifier; - } - enterRule(listener) { - if (listener.enterIdentifier) { - listener.enterIdentifier(this); - } - } - exitRule(listener) { - if (listener.exitIdentifier) { - listener.exitIdentifier(this); - } - } - accept(visitor) { - if (visitor.visitIdentifier) { - return visitor.visitIdentifier(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var HexLiteralContext = class extends import_ParserRuleContext.ParserRuleContext { - HexLiteralFragment(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.HexLiteralFragment); - } else { - return this.getToken(SolidityParser.HexLiteralFragment, i); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_hexLiteral; - } - enterRule(listener) { - if (listener.enterHexLiteral) { - listener.enterHexLiteral(this); - } - } - exitRule(listener) { - if (listener.exitHexLiteral) { - listener.exitHexLiteral(this); - } - } - accept(visitor) { - if (visitor.visitHexLiteral) { - return visitor.visitHexLiteral(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var OverrideSpecifierContext = class extends import_ParserRuleContext.ParserRuleContext { - userDefinedTypeName(i) { - if (i === void 0) { - return this.getRuleContexts(UserDefinedTypeNameContext); - } else { - return this.getRuleContext(i, UserDefinedTypeNameContext); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_overrideSpecifier; - } - enterRule(listener) { - if (listener.enterOverrideSpecifier) { - listener.enterOverrideSpecifier(this); - } - } - exitRule(listener) { - if (listener.exitOverrideSpecifier) { - listener.exitOverrideSpecifier(this); - } - } - accept(visitor) { - if (visitor.visitOverrideSpecifier) { - return visitor.visitOverrideSpecifier(this); - } else { - return visitor.visitChildren(this); - } - } -}; -var StringLiteralContext = class extends import_ParserRuleContext.ParserRuleContext { - StringLiteralFragment(i) { - if (i === void 0) { - return this.getTokens(SolidityParser.StringLiteralFragment); - } else { - return this.getToken(SolidityParser.StringLiteralFragment, i); - } - } - constructor(parent, invokingState) { - super(parent, invokingState); - } - get ruleIndex() { - return SolidityParser.RULE_stringLiteral; - } - enterRule(listener) { - if (listener.enterStringLiteral) { - listener.enterStringLiteral(this); - } - } - exitRule(listener) { - if (listener.exitStringLiteral) { - listener.exitStringLiteral(this); - } - } - accept(visitor) { - if (visitor.visitStringLiteral) { - return visitor.visitStringLiteral(this); - } else { - return visitor.visitChildren(this); - } - } -}; - -// src/ast-types.ts -var astNodeTypes = [ - "SourceUnit", - "PragmaDirective", - "ImportDirective", - "ContractDefinition", - "InheritanceSpecifier", - "StateVariableDeclaration", - "UsingForDeclaration", - "StructDefinition", - "ModifierDefinition", - "ModifierInvocation", - "FunctionDefinition", - "EventDefinition", - "CustomErrorDefinition", - "RevertStatement", - "EnumValue", - "EnumDefinition", - "VariableDeclaration", - "UserDefinedTypeName", - "Mapping", - "ArrayTypeName", - "FunctionTypeName", - "Block", - "ExpressionStatement", - "IfStatement", - "WhileStatement", - "ForStatement", - "InlineAssemblyStatement", - "DoWhileStatement", - "ContinueStatement", - "Break", - "Continue", - "BreakStatement", - "ReturnStatement", - "EmitStatement", - "ThrowStatement", - "VariableDeclarationStatement", - "ElementaryTypeName", - "FunctionCall", - "AssemblyBlock", - "AssemblyCall", - "AssemblyLocalDefinition", - "AssemblyAssignment", - "AssemblyStackAssignment", - "LabelDefinition", - "AssemblySwitch", - "AssemblyCase", - "AssemblyFunctionDefinition", - "AssemblyFunctionReturns", - "AssemblyFor", - "AssemblyIf", - "SubAssembly", - "TupleExpression", - "TypeNameExpression", - "NameValueExpression", - "BooleanLiteral", - "NumberLiteral", - "Identifier", - "BinaryOperation", - "UnaryOperation", - "NewExpression", - "Conditional", - "StringLiteral", - "HexLiteral", - "HexNumber", - "DecimalNumber", - "MemberAccess", - "IndexAccess", - "IndexRangeAccess", - "NameValueList", - "UncheckedStatement", - "TryStatement", - "CatchClause", - "FileLevelConstant", - "AssemblyMemberAccess", - "TypeDefinition" -]; -var binaryOpValues = [ - "+", - "-", - "*", - "/", - "**", - "%", - "<<", - ">>", - "&&", - "||", - ",,", - "&", - ",", - "^", - "<", - ">", - "<=", - ">=", - "==", - "!=", - "=", - ",=", - "^=", - "&=", - "<<=", - ">>=", - "+=", - "-=", - "*=", - "/=", - "%=", - "|", - "|=" -]; -var unaryOpValues = [ - "-", - "+", - "++", - "--", - "~", - "after", - "delete", - "!" -]; - -// src/ASTBuilder.ts -var import_AbstractParseTreeVisitor = __toModule(require_AbstractParseTreeVisitor()); -var import_ErrorNode = __toModule(require_ErrorNode()); -var ASTBuilder = class extends import_AbstractParseTreeVisitor.AbstractParseTreeVisitor { - constructor(options) { - super(); - this.options = options; - this.result = null; - } - defaultResult() { - throw new Error("Unknown node"); - } - aggregateResult() { - return {type: ""}; - } - visitSourceUnit(ctx) { - var _a; - const children = ((_a = ctx.children) != null ? _a : []).filter((x) => !(x instanceof import_ErrorNode.ErrorNode)); - const node = { - type: "SourceUnit", - children: children.slice(0, -1).map((child) => this.visit(child)) - }; - const result = this._addMeta(node, ctx); - this.result = result; - return result; - } - visitContractPart(ctx) { - return this.visit(ctx.getChild(0)); - } - visitContractDefinition(ctx) { - const name = this._toText(ctx.identifier()); - const kind = this._toText(ctx.getChild(0)); - this._currentContract = name; - const node = { - type: "ContractDefinition", - name, - baseContracts: ctx.inheritanceSpecifier().map((x) => this.visitInheritanceSpecifier(x)), - subNodes: ctx.contractPart().map((x) => this.visit(x)), - kind - }; - return this._addMeta(node, ctx); - } - visitStateVariableDeclaration(ctx) { - const type = this.visitTypeName(ctx.typeName()); - const iden = ctx.identifier(); - const name = this._toText(iden); - let expression = null; - const ctxExpression = ctx.expression(); - if (ctxExpression) { - expression = this.visitExpression(ctxExpression); - } - let visibility = "default"; - if (ctx.InternalKeyword().length > 0) { - visibility = "internal"; - } else if (ctx.PublicKeyword().length > 0) { - visibility = "public"; - } else if (ctx.PrivateKeyword().length > 0) { - visibility = "private"; - } - let isDeclaredConst = false; - if (ctx.ConstantKeyword().length > 0) { - isDeclaredConst = true; - } - let override; - const overrideSpecifier = ctx.overrideSpecifier(); - if (overrideSpecifier.length === 0) { - override = null; - } else { - override = overrideSpecifier[0].userDefinedTypeName().map((x) => this.visitUserDefinedTypeName(x)); - } - let isImmutable = false; - if (ctx.ImmutableKeyword().length > 0) { - isImmutable = true; - } - const decl = { - type: "VariableDeclaration", - typeName: type, - name, - identifier: this.visitIdentifier(iden), - expression, - visibility, - isStateVar: true, - isDeclaredConst, - isIndexed: false, - isImmutable, - override, - storageLocation: null - }; - const node = { - type: "StateVariableDeclaration", - variables: [this._addMeta(decl, ctx)], - initialValue: expression - }; - return this._addMeta(node, ctx); - } - visitVariableDeclaration(ctx) { - let storageLocation = null; - const ctxStorageLocation = ctx.storageLocation(); - if (ctxStorageLocation) { - storageLocation = this._toText(ctxStorageLocation); - } - const identifierCtx = ctx.identifier(); - const node = { - type: "VariableDeclaration", - typeName: this.visitTypeName(ctx.typeName()), - name: this._toText(identifierCtx), - identifier: this.visitIdentifier(identifierCtx), - storageLocation, - isStateVar: false, - isIndexed: false, - expression: null - }; - return this._addMeta(node, ctx); - } - visitVariableDeclarationStatement(ctx) { - let variables = []; - const ctxVariableDeclaration = ctx.variableDeclaration(); - const ctxIdentifierList = ctx.identifierList(); - const ctxVariableDeclarationList = ctx.variableDeclarationList(); - if (ctxVariableDeclaration !== void 0) { - variables = [this.visitVariableDeclaration(ctxVariableDeclaration)]; - } else if (ctxIdentifierList !== void 0) { - variables = this.buildIdentifierList(ctxIdentifierList); - } else if (ctxVariableDeclarationList) { - variables = this.buildVariableDeclarationList(ctxVariableDeclarationList); - } - let initialValue = null; - const ctxExpression = ctx.expression(); - if (ctxExpression) { - initialValue = this.visitExpression(ctxExpression); - } - const node = { - type: "VariableDeclarationStatement", - variables, - initialValue - }; - return this._addMeta(node, ctx); - } - visitStatement(ctx) { - return this.visit(ctx.getChild(0)); - } - visitSimpleStatement(ctx) { - return this.visit(ctx.getChild(0)); - } - visitEventDefinition(ctx) { - const parameters = ctx.eventParameterList().eventParameter().map((paramCtx) => { - const type = this.visitTypeName(paramCtx.typeName()); - let name = null; - const paramCtxIdentifier = paramCtx.identifier(); - if (paramCtxIdentifier) { - name = this._toText(paramCtxIdentifier); - } - const node2 = { - type: "VariableDeclaration", - typeName: type, - name, - identifier: paramCtxIdentifier !== void 0 ? this.visitIdentifier(paramCtxIdentifier) : null, - isStateVar: false, - isIndexed: paramCtx.IndexedKeyword() !== void 0, - storageLocation: null, - expression: null - }; - return this._addMeta(node2, paramCtx); - }); - const node = { - type: "EventDefinition", - name: this._toText(ctx.identifier()), - parameters, - isAnonymous: ctx.AnonymousKeyword() !== void 0 - }; - return this._addMeta(node, ctx); - } - visitBlock(ctx) { - const node = { - type: "Block", - statements: ctx.statement().map((x) => this.visitStatement(x)) - }; - return this._addMeta(node, ctx); - } - visitParameter(ctx) { - let storageLocation = null; - const ctxStorageLocation = ctx.storageLocation(); - if (ctxStorageLocation !== void 0) { - storageLocation = this._toText(ctxStorageLocation); - } - let name = null; - const ctxIdentifier = ctx.identifier(); - if (ctxIdentifier !== void 0) { - name = this._toText(ctxIdentifier); - } - const node = { - type: "VariableDeclaration", - typeName: this.visitTypeName(ctx.typeName()), - name, - identifier: ctxIdentifier !== void 0 ? this.visitIdentifier(ctxIdentifier) : null, - storageLocation, - isStateVar: false, - isIndexed: false, - expression: null - }; - return this._addMeta(node, ctx); - } - visitFunctionDefinition(ctx) { - let isConstructor = false; - let isFallback = false; - let isReceiveEther = false; - let isVirtual = false; - let name = null; - let parameters = []; - let returnParameters = null; - let visibility = "default"; - let block = null; - const ctxBlock = ctx.block(); - if (ctxBlock !== void 0) { - block = this.visitBlock(ctxBlock); - } - const modifiers = ctx.modifierList().modifierInvocation().map((mod) => this.visitModifierInvocation(mod)); - let stateMutability = null; - if (ctx.modifierList().stateMutability().length > 0) { - stateMutability = this._stateMutabilityToText(ctx.modifierList().stateMutability(0)); - } - const ctxReturnParameters = ctx.returnParameters(); - switch (this._toText(ctx.functionDescriptor().getChild(0))) { - case "constructor": - parameters = ctx.parameterList().parameter().map((x) => this.visit(x)); - if (ctx.modifierList().InternalKeyword().length > 0) { - visibility = "internal"; - } else if (ctx.modifierList().PublicKeyword().length > 0) { - visibility = "public"; - } else { - visibility = "default"; - } - isConstructor = true; - break; - case "fallback": - visibility = "external"; - isFallback = true; - break; - case "receive": - visibility = "external"; - isReceiveEther = true; - break; - case "function": { - const identifier = ctx.functionDescriptor().identifier(); - name = identifier !== void 0 ? this._toText(identifier) : ""; - parameters = ctx.parameterList().parameter().map((x) => this.visit(x)); - returnParameters = ctxReturnParameters !== void 0 ? this.visitReturnParameters(ctxReturnParameters) : null; - if (ctx.modifierList().ExternalKeyword().length > 0) { - visibility = "external"; - } else if (ctx.modifierList().InternalKeyword().length > 0) { - visibility = "internal"; - } else if (ctx.modifierList().PublicKeyword().length > 0) { - visibility = "public"; - } else if (ctx.modifierList().PrivateKeyword().length > 0) { - visibility = "private"; - } - isConstructor = name === this._currentContract; - isFallback = name === ""; - break; - } - } - if (ctx.modifierList().VirtualKeyword().length > 0) { - isVirtual = true; - } - let override; - const overrideSpecifier = ctx.modifierList().overrideSpecifier(); - if (overrideSpecifier.length === 0) { - override = null; - } else { - override = overrideSpecifier[0].userDefinedTypeName().map((x) => this.visitUserDefinedTypeName(x)); - } - const node = { - type: "FunctionDefinition", - name, - parameters, - returnParameters, - body: block, - visibility, - modifiers, - override, - isConstructor, - isReceiveEther, - isFallback, - isVirtual, - stateMutability - }; - return this._addMeta(node, ctx); - } - visitEnumDefinition(ctx) { - const node = { - type: "EnumDefinition", - name: this._toText(ctx.identifier()), - members: ctx.enumValue().map((x) => this.visitEnumValue(x)) - }; - return this._addMeta(node, ctx); - } - visitEnumValue(ctx) { - const node = { - type: "EnumValue", - name: this._toText(ctx.identifier()) - }; - return this._addMeta(node, ctx); - } - visitElementaryTypeName(ctx) { - const node = { - type: "ElementaryTypeName", - name: this._toText(ctx), - stateMutability: null - }; - return this._addMeta(node, ctx); - } - visitIdentifier(ctx) { - const node = { - type: "Identifier", - name: this._toText(ctx) - }; - return this._addMeta(node, ctx); - } - visitTypeName(ctx) { - var _a; - if (ctx.children !== void 0 && ctx.children.length > 2) { - let length = null; - if (ctx.children.length === 4) { - const expression = ctx.expression(); - if (expression === void 0) { - throw new Error("Assertion error: a typeName with 4 children should have an expression"); - } - length = this.visitExpression(expression); - } - const ctxTypeName = ctx.typeName(); - const node = { - type: "ArrayTypeName", - baseTypeName: this.visitTypeName(ctxTypeName), - length - }; - return this._addMeta(node, ctx); - } - if (((_a = ctx.children) == null ? void 0 : _a.length) === 2) { - const node = { - type: "ElementaryTypeName", - name: this._toText(ctx.getChild(0)), - stateMutability: this._toText(ctx.getChild(1)) - }; - return this._addMeta(node, ctx); - } - if (ctx.elementaryTypeName() !== void 0) { - return this.visitElementaryTypeName(ctx.elementaryTypeName()); - } - if (ctx.userDefinedTypeName() !== void 0) { - return this.visitUserDefinedTypeName(ctx.userDefinedTypeName()); - } - if (ctx.mapping() !== void 0) { - return this.visitMapping(ctx.mapping()); - } - if (ctx.functionTypeName() !== void 0) { - return this.visitFunctionTypeName(ctx.functionTypeName()); - } - throw new Error("Assertion error: unhandled type name case"); - } - visitUserDefinedTypeName(ctx) { - const node = { - type: "UserDefinedTypeName", - namePath: this._toText(ctx) - }; - return this._addMeta(node, ctx); - } - visitUsingForDeclaration(ctx) { - let typeName = null; - const ctxTypeName = ctx.typeName(); - if (ctxTypeName !== void 0) { - typeName = this.visitTypeName(ctxTypeName); - } - const node = { - type: "UsingForDeclaration", - typeName, - libraryName: this._toText(ctx.userDefinedTypeName()) - }; - return this._addMeta(node, ctx); - } - visitPragmaDirective(ctx) { - const versionContext = ctx.pragmaValue().version(); - let value = this._toText(ctx.pragmaValue()); - if ((versionContext == null ? void 0 : versionContext.children) !== void 0) { - value = versionContext.children.map((x) => this._toText(x)).join(" "); - } - const node = { - type: "PragmaDirective", - name: this._toText(ctx.pragmaName()), - value - }; - return this._addMeta(node, ctx); - } - visitInheritanceSpecifier(ctx) { - const exprList = ctx.expressionList(); - const args = exprList !== void 0 ? exprList.expression().map((x) => this.visitExpression(x)) : []; - const node = { - type: "InheritanceSpecifier", - baseName: this.visitUserDefinedTypeName(ctx.userDefinedTypeName()), - arguments: args - }; - return this._addMeta(node, ctx); - } - visitModifierInvocation(ctx) { - const exprList = ctx.expressionList(); - let args; - if (exprList != null) { - args = exprList.expression().map((x) => this.visit(x)); - } else if (ctx.children !== void 0 && ctx.children.length > 1) { - args = []; - } else { - args = null; - } - const node = { - type: "ModifierInvocation", - name: this._toText(ctx.identifier()), - arguments: args - }; - return this._addMeta(node, ctx); - } - visitTypeNameExpression(ctx) { - const ctxElementaryTypeName = ctx.elementaryTypeName(); - const ctxUserDefinedTypeName = ctx.userDefinedTypeName(); - let typeName; - if (ctxElementaryTypeName !== void 0) { - typeName = this.visitElementaryTypeName(ctxElementaryTypeName); - } else if (ctxUserDefinedTypeName !== void 0) { - typeName = this.visitUserDefinedTypeName(ctxUserDefinedTypeName); - } else { - throw new Error("Assertion error: either elementaryTypeName or userDefinedTypeName should be defined"); - } - const node = { - type: "TypeNameExpression", - typeName - }; - return this._addMeta(node, ctx); - } - visitFunctionTypeName(ctx) { - const parameterTypes = ctx.functionTypeParameterList(0).functionTypeParameter().map((typeCtx) => this.visitFunctionTypeParameter(typeCtx)); - let returnTypes = []; - if (ctx.functionTypeParameterList().length > 1) { - returnTypes = ctx.functionTypeParameterList(1).functionTypeParameter().map((typeCtx) => this.visitFunctionTypeParameter(typeCtx)); - } - let visibility = "default"; - if (ctx.InternalKeyword().length > 0) { - visibility = "internal"; - } else if (ctx.ExternalKeyword().length > 0) { - visibility = "external"; - } - let stateMutability = null; - if (ctx.stateMutability().length > 0) { - stateMutability = this._toText(ctx.stateMutability(0)); - } - const node = { - type: "FunctionTypeName", - parameterTypes, - returnTypes, - visibility, - stateMutability - }; - return this._addMeta(node, ctx); - } - visitFunctionTypeParameter(ctx) { - let storageLocation = null; - if (ctx.storageLocation()) { - storageLocation = this._toText(ctx.storageLocation()); - } - const node = { - type: "VariableDeclaration", - typeName: this.visitTypeName(ctx.typeName()), - name: null, - identifier: null, - storageLocation, - isStateVar: false, - isIndexed: false, - expression: null - }; - return this._addMeta(node, ctx); - } - visitThrowStatement(ctx) { - const node = { - type: "ThrowStatement" - }; - return this._addMeta(node, ctx); - } - visitReturnStatement(ctx) { - let expression = null; - const ctxExpression = ctx.expression(); - if (ctxExpression) { - expression = this.visitExpression(ctxExpression); - } - const node = { - type: "ReturnStatement", - expression - }; - return this._addMeta(node, ctx); - } - visitEmitStatement(ctx) { - const node = { - type: "EmitStatement", - eventCall: this.visitFunctionCall(ctx.functionCall()) - }; - return this._addMeta(node, ctx); - } - visitCustomErrorDefinition(ctx) { - const node = { - type: "CustomErrorDefinition", - name: this._toText(ctx.identifier()), - parameters: this.visitParameterList(ctx.parameterList()) - }; - return this._addMeta(node, ctx); - } - visitTypeDefinition(ctx) { - const node = { - type: "TypeDefinition", - name: this._toText(ctx.identifier()), - definition: this.visitElementaryTypeName(ctx.elementaryTypeName()) - }; - return this._addMeta(node, ctx); - } - visitRevertStatement(ctx) { - const node = { - type: "RevertStatement", - revertCall: this.visitFunctionCall(ctx.functionCall()) - }; - return this._addMeta(node, ctx); - } - visitFunctionCall(ctx) { - let args = []; - const names = []; - const identifiers = []; - const ctxArgs = ctx.functionCallArguments(); - const ctxArgsExpressionList = ctxArgs.expressionList(); - const ctxArgsNameValueList = ctxArgs.nameValueList(); - if (ctxArgsExpressionList) { - args = ctxArgsExpressionList.expression().map((exprCtx) => this.visitExpression(exprCtx)); - } else if (ctxArgsNameValueList) { - for (const nameValue of ctxArgsNameValueList.nameValue()) { - args.push(this.visitExpression(nameValue.expression())); - names.push(this._toText(nameValue.identifier())); - identifiers.push(this.visitIdentifier(nameValue.identifier())); - } - } - const node = { - type: "FunctionCall", - expression: this.visitExpression(ctx.expression()), - arguments: args, - names, - identifiers - }; - return this._addMeta(node, ctx); - } - visitStructDefinition(ctx) { - const node = { - type: "StructDefinition", - name: this._toText(ctx.identifier()), - members: ctx.variableDeclaration().map((x) => this.visitVariableDeclaration(x)) - }; - return this._addMeta(node, ctx); - } - visitWhileStatement(ctx) { - const node = { - type: "WhileStatement", - condition: this.visitExpression(ctx.expression()), - body: this.visitStatement(ctx.statement()) - }; - return this._addMeta(node, ctx); - } - visitDoWhileStatement(ctx) { - const node = { - type: "DoWhileStatement", - condition: this.visitExpression(ctx.expression()), - body: this.visitStatement(ctx.statement()) - }; - return this._addMeta(node, ctx); - } - visitIfStatement(ctx) { - const trueBody = this.visitStatement(ctx.statement(0)); - let falseBody = null; - if (ctx.statement().length > 1) { - falseBody = this.visitStatement(ctx.statement(1)); - } - const node = { - type: "IfStatement", - condition: this.visitExpression(ctx.expression()), - trueBody, - falseBody - }; - return this._addMeta(node, ctx); - } - visitTryStatement(ctx) { - let returnParameters = null; - const ctxReturnParameters = ctx.returnParameters(); - if (ctxReturnParameters !== void 0) { - returnParameters = this.visitReturnParameters(ctxReturnParameters); - } - const catchClauses = ctx.catchClause().map((exprCtx) => this.visitCatchClause(exprCtx)); - const node = { - type: "TryStatement", - expression: this.visitExpression(ctx.expression()), - returnParameters, - body: this.visitBlock(ctx.block()), - catchClauses - }; - return this._addMeta(node, ctx); - } - visitCatchClause(ctx) { - let parameters = null; - if (ctx.parameterList()) { - parameters = this.visitParameterList(ctx.parameterList()); - } - if (ctx.identifier() && this._toText(ctx.identifier()) !== "Error" && this._toText(ctx.identifier()) !== "Panic") { - throw new Error('Expected "Error" or "Panic" identifier in catch clause'); - } - let kind = null; - const ctxIdentifier = ctx.identifier(); - if (ctxIdentifier !== void 0) { - kind = this._toText(ctxIdentifier); - } - const node = { - type: "CatchClause", - isReasonStringType: kind === "Error", - kind, - parameters, - body: this.visitBlock(ctx.block()) - }; - return this._addMeta(node, ctx); - } - visitExpressionStatement(ctx) { - if (!ctx) { - return null; - } - const node = { - type: "ExpressionStatement", - expression: this.visitExpression(ctx.expression()) - }; - return this._addMeta(node, ctx); - } - visitNumberLiteral(ctx) { - var _a; - const number = this._toText(ctx.getChild(0)); - let subdenomination = null; - if (((_a = ctx.children) == null ? void 0 : _a.length) === 2) { - subdenomination = this._toText(ctx.getChild(1)); - } - const node = { - type: "NumberLiteral", - number, - subdenomination - }; - return this._addMeta(node, ctx); - } - visitMappingKey(ctx) { - if (ctx.elementaryTypeName()) { - return this.visitElementaryTypeName(ctx.elementaryTypeName()); - } else if (ctx.userDefinedTypeName()) { - return this.visitUserDefinedTypeName(ctx.userDefinedTypeName()); - } else { - throw new Error("Expected MappingKey to have either elementaryTypeName or userDefinedTypeName"); - } - } - visitMapping(ctx) { - const node = { - type: "Mapping", - keyType: this.visitMappingKey(ctx.mappingKey()), - valueType: this.visitTypeName(ctx.typeName()) - }; - return this._addMeta(node, ctx); - } - visitModifierDefinition(ctx) { - let parameters = null; - if (ctx.parameterList()) { - parameters = this.visitParameterList(ctx.parameterList()); - } - let isVirtual = false; - if (ctx.VirtualKeyword().length > 0) { - isVirtual = true; - } - let override; - const overrideSpecifier = ctx.overrideSpecifier(); - if (overrideSpecifier.length === 0) { - override = null; - } else { - override = overrideSpecifier[0].userDefinedTypeName().map((x) => this.visitUserDefinedTypeName(x)); - } - let body = null; - const blockCtx = ctx.block(); - if (blockCtx !== void 0) { - body = this.visitBlock(blockCtx); - } - const node = { - type: "ModifierDefinition", - name: this._toText(ctx.identifier()), - parameters, - body, - isVirtual, - override - }; - return this._addMeta(node, ctx); - } - visitUncheckedStatement(ctx) { - const node = { - type: "UncheckedStatement", - block: this.visitBlock(ctx.block()) - }; - return this._addMeta(node, ctx); - } - visitExpression(ctx) { - let op; - switch (ctx.children.length) { - case 1: { - const primaryExpressionCtx = ctx.tryGetRuleContext(0, PrimaryExpressionContext); - if (primaryExpressionCtx === void 0) { - throw new Error("Assertion error: primary expression should exist when children length is 1"); - } - return this.visitPrimaryExpression(primaryExpressionCtx); - } - case 2: - op = this._toText(ctx.getChild(0)); - if (op === "new") { - const node = { - type: "NewExpression", - typeName: this.visitTypeName(ctx.typeName()) - }; - return this._addMeta(node, ctx); - } - if (unaryOpValues.includes(op)) { - const node = { - type: "UnaryOperation", - operator: op, - subExpression: this.visitExpression(ctx.getRuleContext(0, ExpressionContext)), - isPrefix: true - }; - return this._addMeta(node, ctx); - } - op = this._toText(ctx.getChild(1)); - if (["++", "--"].includes(op)) { - const node = { - type: "UnaryOperation", - operator: op, - subExpression: this.visitExpression(ctx.getRuleContext(0, ExpressionContext)), - isPrefix: false - }; - return this._addMeta(node, ctx); - } - break; - case 3: - if (this._toText(ctx.getChild(0)) === "(" && this._toText(ctx.getChild(2)) === ")") { - const node = { - type: "TupleExpression", - components: [ - this.visitExpression(ctx.getRuleContext(0, ExpressionContext)) - ], - isArray: false - }; - return this._addMeta(node, ctx); - } - op = this._toText(ctx.getChild(1)); - if (op === ".") { - const node = { - type: "MemberAccess", - expression: this.visitExpression(ctx.expression(0)), - memberName: this._toText(ctx.identifier()) - }; - return this._addMeta(node, ctx); - } - if (isBinOp(op)) { - const node = { - type: "BinaryOperation", - operator: op, - left: this.visitExpression(ctx.expression(0)), - right: this.visitExpression(ctx.expression(1)) - }; - return this._addMeta(node, ctx); - } - break; - case 4: - if (this._toText(ctx.getChild(1)) === "(" && this._toText(ctx.getChild(3)) === ")") { - let args = []; - const names = []; - const identifiers = []; - const ctxArgs = ctx.functionCallArguments(); - if (ctxArgs.expressionList()) { - args = ctxArgs.expressionList().expression().map((exprCtx) => this.visitExpression(exprCtx)); - } else if (ctxArgs.nameValueList()) { - for (const nameValue of ctxArgs.nameValueList().nameValue()) { - args.push(this.visitExpression(nameValue.expression())); - names.push(this._toText(nameValue.identifier())); - identifiers.push(this.visitIdentifier(nameValue.identifier())); - } - } - const node = { - type: "FunctionCall", - expression: this.visitExpression(ctx.expression(0)), - arguments: args, - names, - identifiers - }; - return this._addMeta(node, ctx); - } - if (this._toText(ctx.getChild(1)) === "[" && this._toText(ctx.getChild(3)) === "]") { - if (ctx.getChild(2).text === ":") { - const node2 = { - type: "IndexRangeAccess", - base: this.visitExpression(ctx.expression(0)) - }; - return this._addMeta(node2, ctx); - } - const node = { - type: "IndexAccess", - base: this.visitExpression(ctx.expression(0)), - index: this.visitExpression(ctx.expression(1)) - }; - return this._addMeta(node, ctx); - } - if (this._toText(ctx.getChild(1)) === "{" && this._toText(ctx.getChild(3)) === "}") { - const node = { - type: "NameValueExpression", - expression: this.visitExpression(ctx.expression(0)), - arguments: this.visitNameValueList(ctx.nameValueList()) - }; - return this._addMeta(node, ctx); - } - break; - case 5: - if (this._toText(ctx.getChild(1)) === "?" && this._toText(ctx.getChild(3)) === ":") { - const node = { - type: "Conditional", - condition: this.visitExpression(ctx.expression(0)), - trueExpression: this.visitExpression(ctx.expression(1)), - falseExpression: this.visitExpression(ctx.expression(2)) - }; - return this._addMeta(node, ctx); - } - if (this._toText(ctx.getChild(1)) === "[" && this._toText(ctx.getChild(2)) === ":" && this._toText(ctx.getChild(4)) === "]") { - const node = { - type: "IndexRangeAccess", - base: this.visitExpression(ctx.expression(0)), - indexEnd: this.visitExpression(ctx.expression(1)) - }; - return this._addMeta(node, ctx); - } else if (this._toText(ctx.getChild(1)) === "[" && this._toText(ctx.getChild(3)) === ":" && this._toText(ctx.getChild(4)) === "]") { - const node = { - type: "IndexRangeAccess", - base: this.visitExpression(ctx.expression(0)), - indexStart: this.visitExpression(ctx.expression(1)) - }; - return this._addMeta(node, ctx); - } - break; - case 6: - if (this._toText(ctx.getChild(1)) === "[" && this._toText(ctx.getChild(3)) === ":" && this._toText(ctx.getChild(5)) === "]") { - const node = { - type: "IndexRangeAccess", - base: this.visitExpression(ctx.expression(0)), - indexStart: this.visitExpression(ctx.expression(1)), - indexEnd: this.visitExpression(ctx.expression(2)) - }; - return this._addMeta(node, ctx); - } - break; - } - throw new Error("Unrecognized expression"); - } - visitNameValueList(ctx) { - const names = []; - const identifiers = []; - const args = []; - for (const nameValue of ctx.nameValue()) { - names.push(this._toText(nameValue.identifier())); - identifiers.push(this.visitIdentifier(nameValue.identifier())); - args.push(this.visitExpression(nameValue.expression())); - } - const node = { - type: "NameValueList", - names, - identifiers, - arguments: args - }; - return this._addMeta(node, ctx); - } - visitFileLevelConstant(ctx) { - const type = this.visitTypeName(ctx.typeName()); - const iden = ctx.identifier(); - const name = this._toText(iden); - const expression = this.visitExpression(ctx.expression()); - const node = { - type: "FileLevelConstant", - typeName: type, - name, - initialValue: expression, - isDeclaredConst: true, - isImmutable: false - }; - return this._addMeta(node, ctx); - } - visitForStatement(ctx) { - let conditionExpression = this.visitExpressionStatement(ctx.expressionStatement()); - if (conditionExpression) { - conditionExpression = conditionExpression.expression; - } - const node = { - type: "ForStatement", - initExpression: ctx.simpleStatement() ? this.visitSimpleStatement(ctx.simpleStatement()) : null, - conditionExpression, - loopExpression: { - type: "ExpressionStatement", - expression: ctx.expression() !== void 0 ? this.visitExpression(ctx.expression()) : null - }, - body: this.visitStatement(ctx.statement()) - }; - return this._addMeta(node, ctx); - } - visitHexLiteral(ctx) { - const parts = ctx.HexLiteralFragment().map((x) => this._toText(x)).map((x) => x.substring(4, x.length - 1)); - const node = { - type: "HexLiteral", - value: parts.join(""), - parts - }; - return this._addMeta(node, ctx); - } - visitPrimaryExpression(ctx) { - if (ctx.BooleanLiteral()) { - const node = { - type: "BooleanLiteral", - value: this._toText(ctx.BooleanLiteral()) === "true" - }; - return this._addMeta(node, ctx); - } - if (ctx.hexLiteral()) { - return this.visitHexLiteral(ctx.hexLiteral()); - } - if (ctx.stringLiteral()) { - const fragments = ctx.stringLiteral().StringLiteralFragment().map((stringLiteralFragmentCtx) => { - let text = this._toText(stringLiteralFragmentCtx); - const isUnicode = text.slice(0, 7) === "unicode"; - if (isUnicode) { - text = text.slice(7); - } - const singleQuotes = text[0] === "'"; - const textWithoutQuotes = text.substring(1, text.length - 1); - const value = singleQuotes ? textWithoutQuotes.replace(new RegExp("\\\\'", "g"), "'") : textWithoutQuotes.replace(new RegExp('\\\\"', "g"), '"'); - return {value, isUnicode}; - }); - const parts = fragments.map((x) => x.value); - const node = { - type: "StringLiteral", - value: parts.join(""), - parts, - isUnicode: fragments.map((x) => x.isUnicode) - }; - return this._addMeta(node, ctx); - } - if (ctx.numberLiteral()) { - return this.visitNumberLiteral(ctx.numberLiteral()); - } - if (ctx.TypeKeyword()) { - const node = { - type: "Identifier", - name: "type" - }; - return this._addMeta(node, ctx); - } - if (ctx.children.length == 3 && this._toText(ctx.getChild(1)) === "[" && this._toText(ctx.getChild(2)) === "]") { - let node = this.visit(ctx.getChild(0)); - if (node.type === "Identifier") { - node = { - type: "UserDefinedTypeName", - namePath: node.name - }; - } else if (node.type == "TypeNameExpression") { - node = node.typeName; - } else { - node = { - type: "ElementaryTypeName", - name: this._toText(ctx.getChild(0)) - }; - } - const typeName = { - type: "ArrayTypeName", - baseTypeName: this._addMeta(node, ctx), - length: null - }; - const result = { - type: "TypeNameExpression", - typeName: this._addMeta(typeName, ctx) - }; - return this._addMeta(result, ctx); - } - return this.visit(ctx.getChild(0)); - } - visitTupleExpression(ctx) { - const children = ctx.children.slice(1, -1); - const components = this._mapCommasToNulls(children).map((expr) => { - if (expr === null) { - return null; - } - return this.visit(expr); - }); - const node = { - type: "TupleExpression", - components, - isArray: this._toText(ctx.getChild(0)) === "[" - }; - return this._addMeta(node, ctx); - } - buildIdentifierList(ctx) { - const children = ctx.children.slice(1, -1); - const identifiers = ctx.identifier(); - let i = 0; - return this._mapCommasToNulls(children).map((idenOrNull) => { - if (!idenOrNull) { - return null; - } - const iden = identifiers[i]; - i++; - const node = { - type: "VariableDeclaration", - name: this._toText(iden), - identifier: this.visitIdentifier(iden), - isStateVar: false, - isIndexed: false, - typeName: null, - storageLocation: null, - expression: null - }; - return this._addMeta(node, iden); - }); - } - buildVariableDeclarationList(ctx) { - const variableDeclarations = ctx.variableDeclaration(); - let i = 0; - return this._mapCommasToNulls(ctx.children).map((declOrNull) => { - if (!declOrNull) { - return null; - } - const decl = variableDeclarations[i]; - i++; - let storageLocation = null; - if (decl.storageLocation()) { - storageLocation = this._toText(decl.storageLocation()); - } - const identifierCtx = decl.identifier(); - const result = { - type: "VariableDeclaration", - name: this._toText(identifierCtx), - identifier: this.visitIdentifier(identifierCtx), - typeName: this.visitTypeName(decl.typeName()), - storageLocation, - isStateVar: false, - isIndexed: false, - expression: null - }; - return this._addMeta(result, decl); - }); - } - visitImportDirective(ctx) { - const pathString = this._toText(ctx.importPath()); - let unitAlias = null; - let unitAliasIdentifier = null; - let symbolAliases = null; - let symbolAliasesIdentifiers = null; - if (ctx.importDeclaration().length > 0) { - symbolAliases = ctx.importDeclaration().map((decl) => { - const symbol = this._toText(decl.identifier(0)); - let alias = null; - if (decl.identifier().length > 1) { - alias = this._toText(decl.identifier(1)); - } - return [symbol, alias]; - }); - symbolAliasesIdentifiers = ctx.importDeclaration().map((decl) => { - const symbolIdentifier = this.visitIdentifier(decl.identifier(0)); - let aliasIdentifier = null; - if (decl.identifier().length > 1) { - aliasIdentifier = this.visitIdentifier(decl.identifier(1)); - } - return [symbolIdentifier, aliasIdentifier]; - }); - } else { - const identifierCtxList = ctx.identifier(); - if (identifierCtxList.length === 0) { - } else if (identifierCtxList.length === 1) { - const aliasIdentifierCtx = ctx.identifier(0); - unitAlias = this._toText(aliasIdentifierCtx); - unitAliasIdentifier = this.visitIdentifier(aliasIdentifierCtx); - } else if (identifierCtxList.length === 2) { - const aliasIdentifierCtx = ctx.identifier(1); - unitAlias = this._toText(aliasIdentifierCtx); - unitAliasIdentifier = this.visitIdentifier(aliasIdentifierCtx); - } else { - throw new Error("Assertion error: an import should have one or two identifiers"); - } - } - const path = pathString.substring(1, pathString.length - 1); - const pathLiteral = { - type: "StringLiteral", - value: path, - parts: [path], - isUnicode: [false] - }; - const node = { - type: "ImportDirective", - path, - pathLiteral: this._addMeta(pathLiteral, ctx.importPath()), - unitAlias, - unitAliasIdentifier, - symbolAliases, - symbolAliasesIdentifiers - }; - return this._addMeta(node, ctx); - } - buildEventParameterList(ctx) { - return ctx.eventParameter().map((paramCtx) => { - const type = this.visit(paramCtx.typeName()); - let name = null; - if (paramCtx.identifier()) { - name = this._toText(paramCtx.identifier()); - } - return { - type: "VariableDeclaration", - typeName: type, - name, - isStateVar: false, - isIndexed: !!paramCtx.IndexedKeyword(0) - }; - }); - } - visitReturnParameters(ctx) { - return this.visitParameterList(ctx.parameterList()); - } - visitParameterList(ctx) { - return ctx.parameter().map((paramCtx) => this.visitParameter(paramCtx)); - } - visitInlineAssemblyStatement(ctx) { - let language = null; - if (ctx.StringLiteralFragment()) { - language = this._toText(ctx.StringLiteralFragment()); - language = language.substring(1, language.length - 1); - } - const node = { - type: "InlineAssemblyStatement", - language, - body: this.visitAssemblyBlock(ctx.assemblyBlock()) - }; - return this._addMeta(node, ctx); - } - visitAssemblyBlock(ctx) { - const operations = ctx.assemblyItem().map((item) => this.visitAssemblyItem(item)); - const node = { - type: "AssemblyBlock", - operations - }; - return this._addMeta(node, ctx); - } - visitAssemblyItem(ctx) { - let text; - if (ctx.hexLiteral()) { - return this.visitHexLiteral(ctx.hexLiteral()); - } - if (ctx.stringLiteral()) { - text = this._toText(ctx.stringLiteral()); - const value = text.substring(1, text.length - 1); - const node = { - type: "StringLiteral", - value, - parts: [value], - isUnicode: [false] - }; - return this._addMeta(node, ctx); - } - if (ctx.BreakKeyword()) { - const node = { - type: "Break" - }; - return this._addMeta(node, ctx); - } - if (ctx.ContinueKeyword()) { - const node = { - type: "Continue" - }; - return this._addMeta(node, ctx); - } - return this.visit(ctx.getChild(0)); - } - visitAssemblyExpression(ctx) { - return this.visit(ctx.getChild(0)); - } - visitAssemblyCall(ctx) { - const functionName = this._toText(ctx.getChild(0)); - const args = ctx.assemblyExpression().map((assemblyExpr) => this.visitAssemblyExpression(assemblyExpr)); - const node = { - type: "AssemblyCall", - functionName, - arguments: args - }; - return this._addMeta(node, ctx); - } - visitAssemblyLiteral(ctx) { - let text; - if (ctx.stringLiteral()) { - text = this._toText(ctx); - const value = text.substring(1, text.length - 1); - const node = { - type: "StringLiteral", - value, - parts: [value], - isUnicode: [false] - }; - return this._addMeta(node, ctx); - } - if (ctx.DecimalNumber()) { - const node = { - type: "DecimalNumber", - value: this._toText(ctx) - }; - return this._addMeta(node, ctx); - } - if (ctx.HexNumber()) { - const node = { - type: "HexNumber", - value: this._toText(ctx) - }; - return this._addMeta(node, ctx); - } - if (ctx.hexLiteral()) { - return this.visitHexLiteral(ctx.hexLiteral()); - } - throw new Error("Should never reach here"); - } - visitAssemblySwitch(ctx) { - const node = { - type: "AssemblySwitch", - expression: this.visitAssemblyExpression(ctx.assemblyExpression()), - cases: ctx.assemblyCase().map((c) => this.visitAssemblyCase(c)) - }; - return this._addMeta(node, ctx); - } - visitAssemblyCase(ctx) { - let value = null; - if (this._toText(ctx.getChild(0)) === "case") { - value = this.visitAssemblyLiteral(ctx.assemblyLiteral()); - } - const node = { - type: "AssemblyCase", - block: this.visitAssemblyBlock(ctx.assemblyBlock()), - value, - default: value === null - }; - return this._addMeta(node, ctx); - } - visitAssemblyLocalDefinition(ctx) { - const ctxAssemblyIdentifierOrList = ctx.assemblyIdentifierOrList(); - let names; - if (ctxAssemblyIdentifierOrList.identifier()) { - names = [this.visitIdentifier(ctxAssemblyIdentifierOrList.identifier())]; - } else if (ctxAssemblyIdentifierOrList.assemblyMember()) { - names = [ - this.visitAssemblyMember(ctxAssemblyIdentifierOrList.assemblyMember()) - ]; - } else { - names = ctxAssemblyIdentifierOrList.assemblyIdentifierList().identifier().map((x) => this.visitIdentifier(x)); - } - let expression = null; - if (ctx.assemblyExpression() !== void 0) { - expression = this.visitAssemblyExpression(ctx.assemblyExpression()); - } - const node = { - type: "AssemblyLocalDefinition", - names, - expression - }; - return this._addMeta(node, ctx); - } - visitAssemblyFunctionDefinition(ctx) { - const ctxAssemblyIdentifierList = ctx.assemblyIdentifierList(); - const args = ctxAssemblyIdentifierList !== void 0 ? ctxAssemblyIdentifierList.identifier().map((x) => this.visitIdentifier(x)) : []; - const ctxAssemblyFunctionReturns = ctx.assemblyFunctionReturns(); - const returnArgs = ctxAssemblyFunctionReturns ? ctxAssemblyFunctionReturns.assemblyIdentifierList().identifier().map((x) => this.visitIdentifier(x)) : []; - const node = { - type: "AssemblyFunctionDefinition", - name: this._toText(ctx.identifier()), - arguments: args, - returnArguments: returnArgs, - body: this.visitAssemblyBlock(ctx.assemblyBlock()) - }; - return this._addMeta(node, ctx); - } - visitAssemblyAssignment(ctx) { - const ctxAssemblyIdentifierOrList = ctx.assemblyIdentifierOrList(); - let names; - if (ctxAssemblyIdentifierOrList.identifier()) { - names = [this.visitIdentifier(ctxAssemblyIdentifierOrList.identifier())]; - } else if (ctxAssemblyIdentifierOrList.assemblyMember()) { - names = [ - this.visitAssemblyMember(ctxAssemblyIdentifierOrList.assemblyMember()) - ]; - } else { - names = ctxAssemblyIdentifierOrList.assemblyIdentifierList().identifier().map((x) => this.visitIdentifier(x)); - } - const node = { - type: "AssemblyAssignment", - names, - expression: this.visitAssemblyExpression(ctx.assemblyExpression()) - }; - return this._addMeta(node, ctx); - } - visitAssemblyMember(ctx) { - const [accessed, member] = ctx.identifier(); - const node = { - type: "AssemblyMemberAccess", - expression: this.visitIdentifier(accessed), - memberName: this.visitIdentifier(member) - }; - return this._addMeta(node, ctx); - } - visitLabelDefinition(ctx) { - const node = { - type: "LabelDefinition", - name: this._toText(ctx.identifier()) - }; - return this._addMeta(node, ctx); - } - visitAssemblyStackAssignment(ctx) { - const node = { - type: "AssemblyStackAssignment", - name: this._toText(ctx.identifier()) - }; - return this._addMeta(node, ctx); - } - visitAssemblyFor(ctx) { - const node = { - type: "AssemblyFor", - pre: this.visit(ctx.getChild(1)), - condition: this.visit(ctx.getChild(2)), - post: this.visit(ctx.getChild(3)), - body: this.visit(ctx.getChild(4)) - }; - return this._addMeta(node, ctx); - } - visitAssemblyIf(ctx) { - const node = { - type: "AssemblyIf", - condition: this.visitAssemblyExpression(ctx.assemblyExpression()), - body: this.visitAssemblyBlock(ctx.assemblyBlock()) - }; - return this._addMeta(node, ctx); - } - visitContinueStatement(ctx) { - const node = { - type: "ContinueStatement" - }; - return this._addMeta(node, ctx); - } - visitBreakStatement(ctx) { - const node = { - type: "BreakStatement" - }; - return this._addMeta(node, ctx); - } - _toText(ctx) { - const text = ctx.text; - if (text === void 0) { - throw new Error("Assertion error: text should never be undefiend"); - } - return text; - } - _stateMutabilityToText(ctx) { - if (ctx.PureKeyword() !== void 0) { - return "pure"; - } - if (ctx.ConstantKeyword() !== void 0) { - return "constant"; - } - if (ctx.PayableKeyword() !== void 0) { - return "payable"; - } - if (ctx.ViewKeyword() !== void 0) { - return "view"; - } - throw new Error("Assertion error: non-exhaustive stateMutability check"); - } - _loc(ctx) { - const sourceLocation = { - start: { - line: ctx.start.line, - column: ctx.start.charPositionInLine - }, - end: { - line: ctx.stop ? ctx.stop.line : ctx.start.line, - column: ctx.stop ? ctx.stop.charPositionInLine : ctx.start.charPositionInLine - } - }; - return sourceLocation; - } - _range(ctx) { - var _a, _b; - return [ctx.start.startIndex, (_b = (_a = ctx.stop) == null ? void 0 : _a.stopIndex) != null ? _b : ctx.start.startIndex]; - } - _addMeta(node, ctx) { - const nodeWithMeta = { - type: node.type - }; - if (this.options.loc === true) { - node.loc = this._loc(ctx); - } - if (this.options.range === true) { - node.range = this._range(ctx); - } - return __objSpread(__objSpread({}, nodeWithMeta), node); - } - _mapCommasToNulls(children) { - if (children.length === 0) { - return []; - } - const values = []; - let comma = true; - for (const el of children) { - if (comma) { - if (this._toText(el) === ",") { - values.push(null); - } else { - values.push(el); - comma = false; - } - } else { - if (this._toText(el) !== ",") { - throw new Error("expected comma"); - } - comma = true; - } - } - if (comma) { - values.push(null); - } - return values; - } -}; -function isBinOp(op) { - return binaryOpValues.includes(op); -} - -// src/ErrorListener.ts -var import_antlr4 = __toModule(require_antlr4()); -var ErrorListener = class extends import_antlr4.default.error.ErrorListener { - constructor() { - super(); - this._errors = []; - } - syntaxError(recognizer, offendingSymbol, line, column, message) { - this._errors.push({message, line, column}); - } - getErrors() { - return this._errors; - } - hasErrors() { - return this._errors.length > 0; - } -}; -var ErrorListener_default = ErrorListener; - -// src/tokens.ts -var import_tokens_string = __toModule(require_tokens_string()); -var tokens = import_tokens_string.default; -var TYPE_TOKENS = [ - "var", - "bool", - "address", - "string", - "Int", - "Uint", - "Byte", - "Fixed", - "UFixed" -]; -function rsplit(str, value) { - const index = str.lastIndexOf(value); - return [str.substring(0, index), str.substring(index + 1, str.length)]; -} -function normalizeTokenType(value) { - if (value.endsWith("'")) { - value = value.substring(0, value.length - 1); - } - if (value.startsWith("'")) { - value = value.substring(1, value.length); - } - return value; -} -function getTokenType(value) { - if (value === "Identifier" || value === "from") { - return "Identifier"; - } else if (value === "TrueLiteral" || value === "FalseLiteral") { - return "Boolean"; - } else if (value === "VersionLiteral") { - return "Version"; - } else if (value === "StringLiteral") { - return "String"; - } else if (TYPE_TOKENS.includes(value)) { - return "Type"; - } else if (value === "NumberUnit") { - return "Subdenomination"; - } else if (value === "DecimalNumber") { - return "Numeric"; - } else if (value === "HexLiteral") { - return "Hex"; - } else if (value === "ReservedKeyword") { - return "Reserved"; - } else if (/^\W+$/.test(value)) { - return "Punctuator"; - } else { - return "Keyword"; - } -} -function getTokenTypeMap() { - return tokens.split("\n").map((line) => rsplit(line, "=")).reduce((acum, [value, key]) => { - acum[parseInt(key, 10)] = normalizeTokenType(value); - return acum; - }, {}); -} -function buildTokenList(tokensArg, options) { - const tokenTypes = getTokenTypeMap(); - const result = tokensArg.map((token) => { - var _a, _b; - const type = getTokenType(tokenTypes[token.type]); - const node = {type, value: token.text}; - if (options.range === true) { - node.range = [token.startIndex, token.stopIndex + 1]; - } - if (options.loc === true) { - node.loc = { - start: {line: token.line, column: token.charPositionInLine}, - end: {line: token.line, column: token.charPositionInLine + ((_b = (_a = token.text) == null ? void 0 : _a.length) != null ? _b : 0)} - }; - } - return node; - }); - return result; -} - -// src/parser.ts -var ParserError = class extends Error { - constructor(args) { - super(); - const {message, line, column} = args.errors[0]; - this.message = `${message} (${line}:${column})`; - this.errors = args.errors; - if (Error.captureStackTrace !== void 0) { - Error.captureStackTrace(this, this.constructor); - } else { - this.stack = new Error().stack; - } - } -}; -function tokenize(input, options = {}) { - const inputStream = new import_antlr4ts.ANTLRInputStream(input); - const lexer = new SolidityLexer(inputStream); - return buildTokenList(lexer.getAllTokens(), options); -} -function parse(input, options = {}) { - const inputStream = new import_antlr4ts.ANTLRInputStream(input); - const lexer = new SolidityLexer(inputStream); - const tokenStream = new import_antlr4ts.CommonTokenStream(lexer); - const parser = new SolidityParser(tokenStream); - const listener = new ErrorListener_default(); - lexer.removeErrorListeners(); - lexer.addErrorListener(listener); - parser.removeErrorListeners(); - parser.addErrorListener(listener); - parser.buildParseTree = true; - const sourceUnit = parser.sourceUnit(); - const astBuilder = new ASTBuilder(options); - astBuilder.visit(sourceUnit); - const ast = astBuilder.result; - if (ast === null) { - throw new Error("ast should never be null"); - } - let tokenList = []; - if (options.tokens === true) { - tokenList = buildTokenList(tokenStream.getTokens(), options); - } - if (options.tolerant !== true && listener.hasErrors()) { - throw new ParserError({errors: listener.getErrors()}); - } - if (options.tolerant === true && listener.hasErrors()) { - ast.errors = listener.getErrors(); - } - if (options.tokens === true) { - ast.tokens = tokenList; - } - return ast; -} -function _isASTNode(node) { - if (typeof node !== "object" || node === null) { - return false; - } - const nodeAsAny = node; - if (Object.prototype.hasOwnProperty.call(nodeAsAny, "type") && typeof nodeAsAny.type === "string") { - return astNodeTypes.includes(nodeAsAny.type); - } - return false; -} -function visit(node, visitor, nodeParent) { - if (Array.isArray(node)) { - node.forEach((child) => visit(child, visitor, nodeParent)); - } - if (!_isASTNode(node)) - return; - let cont = true; - if (visitor[node.type] !== void 0) { - cont = visitor[node.type](node, nodeParent); - } - if (cont === false) - return; - for (const prop in node) { - if (Object.prototype.hasOwnProperty.call(node, prop)) { - visit(node[prop], visitor, node); - } - } - const selector = node.type + ":exit"; - if (visitor[selector] !== void 0) { - visitor[selector](node, nodeParent); - } -} -// Annotate the CommonJS export names for ESM import in node: -0 && (module.exports = { - ParserError, - parse, - tokenize, - visit -}); -/*! - * Copyright 2016 The ANTLR Project. All rights reserved. - * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information. - */ -/*! https://mths.be/codepointat v0.2.0 by @mathias */ -/*! https://mths.be/fromcodepoint v0.2.1 by @mathias */ -//# sourceMappingURL=index.cjs.js.map diff --git a/node_modules/@solidity-parser/parser/dist/index.cjs.js.map b/node_modules/@solidity-parser/parser/dist/index.cjs.js.map deleted file mode 100644 index 8a13879..0000000 --- a/node_modules/@solidity-parser/parser/dist/index.cjs.js.map +++ /dev/null @@ -1,7 +0,0 @@ -{ - "version": 3, - "sources": ["../src/ANTLRErrorListener.ts", "../src/ANTLRErrorStrategy.ts", "../src/Decorators.ts", "../src/IntStream.ts", "../src/ANTLRInputStream.ts", "../src/atn/ATNState.ts", "../src/atn/ATNStateType.ts", "../src/RecognitionException.ts", "../src/atn/Transition.ts", "../src/atn/AbstractPredicateTransition.ts", "../src/misc/MurmurHash.ts", "../src/misc/ObjectEqualityComparator.ts", "../src/misc/DefaultEqualityComparator.ts", "../src/misc/Array2DHashSet.ts", "../src/misc/ArrayEqualityComparator.ts", "../src/misc/Utils.ts", "../src/atn/SemanticContext.ts", "../src/atn/PredicateTransition.ts", "../src/FailedPredicateException.ts", "../src/InputMismatchException.ts", "../src/misc/Arrays.ts", "../src/misc/IntegerList.ts", "../src/misc/Interval.ts", "../src/Token.ts", "../src/CommonToken.ts", "../src/CommonTokenFactory.ts", "../src/misc/IntegerStack.ts", "../src/dfa/AcceptStateInfo.ts", "../src/misc/Array2DHashMap.ts", "../src/atn/DecisionState.ts", "../src/atn/PredictionContextCache.ts", "../src/atn/PredictionContext.ts", "../src/atn/ATNConfig.ts", "../src/misc/BitSet.ts", "../src/atn/ATNConfigSet.ts", "../src/dfa/DFAState.ts", "../src/atn/ATNSimulator.ts", "../src/ConsoleErrorListener.ts", "../src/ProxyErrorListener.ts", "../src/Recognizer.ts", "../src/VocabularyImpl.ts", "../src/dfa/DFASerializer.ts", "../src/dfa/LexerDFASerializer.ts", "../src/atn/StarLoopEntryState.ts", "../src/dfa/DFA.ts", "../src/atn/BasicState.ts", "../src/atn/InvalidState.ts", "../src/atn/SetTransition.ts", "../src/atn/NotSetTransition.ts", "../src/atn/RuleStopState.ts", "../src/atn/RuleTransition.ts", "../src/atn/WildcardTransition.ts", "../src/atn/LL1Analyzer.ts", "../src/atn/ATN.ts", "../src/atn/LexerIndexedCustomAction.ts", "../src/atn/LexerActionExecutor.ts", "../src/LexerNoViableAltException.ts", "../src/atn/OrderedATNConfigSet.ts", "../src/atn/LexerATNSimulator.ts", "../src/Lexer.ts", "../src/misc/IntervalSet.ts", "../src/atn/ATNDeserializationOptions.ts", "../src/atn/ActionTransition.ts", "../src/atn/AtomTransition.ts", "../src/atn/BlockStartState.ts", "../src/atn/BasicBlockStartState.ts", "../src/atn/BlockEndState.ts", "../src/atn/EpsilonTransition.ts", "../src/atn/LexerChannelAction.ts", "../src/atn/LexerCustomAction.ts", "../src/atn/LexerModeAction.ts", "../src/atn/LexerMoreAction.ts", "../src/atn/LexerPopModeAction.ts", "../src/atn/LexerPushModeAction.ts", "../src/atn/LexerSkipAction.ts", "../src/atn/LexerTypeAction.ts", "../src/atn/LoopEndState.ts", "../src/atn/ConflictInfo.ts", "../src/tree/TerminalNode.ts", "../src/tree/ErrorNode.ts", "../src/tree/RuleNode.ts", "../src/tree/Trees.ts", "../src/RuleContext.ts", "../src/ParserRuleContext.ts", "../src/atn/PredictionMode.ts", "../src/atn/SimulatorState.ts", "../src/atn/ParserATNSimulator.ts", "../src/atn/PlusBlockStartState.ts", "../src/atn/PlusLoopbackState.ts", "../src/atn/PrecedencePredicateTransition.ts", "../src/atn/RangeTransition.ts", "../src/atn/RuleStartState.ts", "../src/atn/StarBlockStartState.ts", "../src/atn/StarLoopbackState.ts", "../src/atn/TokensStartState.ts", "../src/misc/UUID.ts", "../src/atn/ATNDeserializer.ts", "../src/atn/ParseInfo.ts", "../src/ProxyParserErrorListener.ts", "../src/misc/Character.ts", "../src/CodePointBuffer.ts", "../src/CodePointCharStream.ts", "../src/CharStreams.ts", "../src/BufferedTokenStream.ts", "../src/CommonTokenStream.ts", "../src/ListTokenSource.ts", "../src/misc/MultiMap.ts", "../src/misc/ParseCancellationException.ts", "../src/InterpreterRuleContext.ts", "../src/ParserInterpreter.ts", "../src/tree/pattern/ParseTreeMatch.ts", "../src/tree/xpath/XPathLexer.ts", "../src/tree/xpath/XPathLexerErrorListener.ts", "../src/tree/xpath/XPathElement.ts", "../src/tree/xpath/XPathRuleAnywhereElement.ts", "../src/tree/xpath/XPathRuleElement.ts", "../src/tree/xpath/XPathTokenAnywhereElement.ts", "../src/tree/xpath/XPathTokenElement.ts", "../src/tree/xpath/XPathWildcardAnywhereElement.ts", "../src/tree/xpath/XPathWildcardElement.ts", "../src/tree/xpath/XPath.ts", "../src/tree/pattern/ParseTreePattern.ts", "../src/tree/pattern/RuleTagToken.ts", "../src/tree/pattern/Chunk.ts", "../src/tree/pattern/TagChunk.ts", "../src/tree/pattern/TextChunk.ts", "../src/tree/pattern/TokenTagToken.ts", "../src/tree/pattern/ParseTreePatternMatcher.ts", "../src/atn/DecisionEventInfo.ts", "../src/atn/AmbiguityInfo.ts", "../src/atn/ContextSensitivityInfo.ts", "../src/atn/DecisionInfo.ts", "../src/atn/ErrorInfo.ts", "../src/atn/LookaheadEventInfo.ts", "../src/atn/PredicateEvalInfo.ts", "../src/atn/ProfilingATNSimulator.ts", "../src/Parser.ts", "../src/NoViableAltException.ts", "../src/DefaultErrorStrategy.ts", "../src/BailErrorStrategy.ts", "../src/CharStream.ts", "../src/Dependents.ts", "../src/DiagnosticErrorListener.ts", "../src/LexerInterpreter.ts", "../src/ParserErrorListener.ts", "../src/RuleContextWithAltNum.ts", "../src/RuleDependency.ts", "../src/RuleVersion.ts", "../src/TokenFactory.ts", "../src/TokenSource.ts", "../src/TokenStream.ts", "../src/TokenStreamRewriter.ts", "../src/Vocabulary.ts", "../src/WritableToken.ts", "../src/index.ts", "../src/tree/AbstractParseTreeVisitor.ts", "../node_modules/antlr4/src/antlr4/Utils.js", "../node_modules/antlr4/src/antlr4/Token.js", "../node_modules/antlr4/src/antlr4/atn/ATNState.js", "../node_modules/antlr4/src/antlr4/atn/SemanticContext.js", "../node_modules/antlr4/src/antlr4/atn/ATNConfig.js", "../node_modules/antlr4/src/antlr4/IntervalSet.js", "../node_modules/antlr4/src/antlr4/atn/Transition.js", "../node_modules/antlr4/src/antlr4/tree/Tree.js", "../node_modules/antlr4/src/antlr4/tree/Trees.js", "../node_modules/antlr4/src/antlr4/RuleContext.js", "../node_modules/antlr4/src/antlr4/PredictionContext.js", "../node_modules/antlr4/src/antlr4/LL1Analyzer.js", "../node_modules/antlr4/src/antlr4/atn/ATN.js", "../node_modules/antlr4/src/antlr4/atn/ATNType.js", "../node_modules/antlr4/src/antlr4/atn/ATNDeserializationOptions.js", "../node_modules/antlr4/src/antlr4/atn/LexerAction.js", "../node_modules/antlr4/src/antlr4/atn/ATNDeserializer.js", "../node_modules/antlr4/src/antlr4/error/ErrorListener.js", "../node_modules/antlr4/src/antlr4/Recognizer.js", "../node_modules/antlr4/src/antlr4/CommonTokenFactory.js", "../node_modules/antlr4/src/antlr4/error/Errors.js", "../node_modules/antlr4/src/antlr4/Lexer.js", "../node_modules/antlr4/src/antlr4/atn/ATNConfigSet.js", "../node_modules/antlr4/src/antlr4/dfa/DFAState.js", "../node_modules/antlr4/src/antlr4/atn/ATNSimulator.js", "../node_modules/antlr4/src/antlr4/atn/LexerActionExecutor.js", "../node_modules/antlr4/src/antlr4/atn/LexerATNSimulator.js", "../node_modules/antlr4/src/antlr4/atn/PredictionMode.js", "../node_modules/antlr4/src/antlr4/ParserRuleContext.js", "../node_modules/antlr4/src/antlr4/atn/ParserATNSimulator.js", "../node_modules/antlr4/src/antlr4/atn/index.js", "../node_modules/antlr4/src/antlr4/polyfills/codepointat.js", "../node_modules/antlr4/src/antlr4/dfa/DFASerializer.js", "../node_modules/antlr4/src/antlr4/dfa/DFA.js", "../node_modules/antlr4/src/antlr4/dfa/index.js", "../node_modules/antlr4/src/antlr4/polyfills/fromcodepoint.js", "../node_modules/antlr4/src/antlr4/tree/index.js", "../node_modules/antlr4/src/antlr4/error/DiagnosticErrorListener.js", "../node_modules/antlr4/src/antlr4/error/ErrorStrategy.js", "../node_modules/antlr4/src/antlr4/error/index.js", "../node_modules/antlr4/src/antlr4/InputStream.js", "../node_modules/antlr4/src/antlr4/CharStreams.js", "../node_modules/antlr4/src/antlr4/FileStream.js", "../node_modules/antlr4/src/antlr4/BufferedTokenStream.js", "../node_modules/antlr4/src/antlr4/CommonTokenStream.js", "../node_modules/antlr4/src/antlr4/Parser.js", "../node_modules/antlr4/src/antlr4/index.js", "../src/tokens-string.js", "../src/index.ts", "../src/parser.ts", "../src/antlr/SolidityLexer.ts", "../src/antlr/SolidityParser.ts", "../src/ast-types.ts", "../src/ASTBuilder.ts", "../src/ErrorListener.ts", "../src/tokens.ts"], - "sourcesContent": ["/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:48.7499997-07:00\r\n\r\n/** How to emit recognition errors. */\r\nimport { Recognizer } from \"./Recognizer\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\n\r\nexport interface ANTLRErrorListener {\r\n\t/**\r\n\t * Upon syntax error, notify any interested parties. This is not how to\r\n\t * recover from errors or compute error messages. {@link ANTLRErrorStrategy}\r\n\t * specifies how to recover from syntax errors and how to compute error\r\n\t * messages. This listener's job is simply to emit a computed message,\r\n\t * though it has enough information to create its own message in many cases.\r\n\t *\r\n\t * The {@link RecognitionException} is non-`undefined` for all syntax errors except\r\n\t * when we discover mismatched token errors that we can recover from\r\n\t * in-line, without returning from the surrounding rule (via the single\r\n\t * token insertion and deletion mechanism).\r\n\t *\r\n\t * @param recognizer\r\n\t * What parser got the error. From this\r\n\t * \t\t object, you can access the context as well\r\n\t * \t\t as the input stream.\r\n\t * @param offendingSymbol\r\n\t * The offending token in the input token\r\n\t * \t\t stream, unless recognizer is a lexer (then it's `undefined`). If\r\n\t * \t\t no viable alternative error, `e` has token at which we\r\n\t * \t\t started production for the decision.\r\n\t * @param line\r\n\t * \t\t The line number in the input where the error occurred.\r\n\t * @param charPositionInLine\r\n\t * \t\t The character position within that line where the error occurred.\r\n\t * @param msg\r\n\t * \t\t The message to emit.\r\n\t * @param e\r\n\t * The exception generated by the parser that led to\r\n\t * the reporting of an error. It is `undefined` in the case where\r\n\t * the parser was able to recover in line without exiting the\r\n\t * surrounding rule.\r\n\t */\r\n\tsyntaxError?: (\r\n\t\t/*@NotNull*/\r\n\t\trecognizer: Recognizer,\r\n\t\toffendingSymbol: T | undefined,\r\n\t\tline: number,\r\n\t\tcharPositionInLine: number,\r\n\t\t/*@NotNull*/\r\n\t\tmsg: string,\r\n\t\te: RecognitionException | undefined) => void;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:48.9102174-07:00\r\n\r\nimport { Parser } from \"./Parser\";\r\nimport { Token } from \"./Token\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\n\r\n/**\r\n * The interface for defining strategies to deal with syntax errors encountered\r\n * during a parse by ANTLR-generated parsers. We distinguish between three\r\n * different kinds of errors:\r\n *\r\n * * The parser could not figure out which path to take in the ATN (none of\r\n * the available alternatives could possibly match)\r\n * * The current input does not match what we were looking for\r\n * * A predicate evaluated to false\r\n *\r\n * Implementations of this interface report syntax errors by calling\r\n * {@link Parser#notifyErrorListeners}.\r\n *\r\n * TODO: what to do about lexers\r\n */\r\nexport interface ANTLRErrorStrategy {\r\n\t/**\r\n\t * Reset the error handler state for the specified `recognizer`.\r\n\t * @param recognizer the parser instance\r\n\t */\r\n\treset(/*@NotNull*/ recognizer: Parser): void;\r\n\r\n\t/**\r\n\t * This method is called when an unexpected symbol is encountered during an\r\n\t * inline match operation, such as {@link Parser#match}. If the error\r\n\t * strategy successfully recovers from the match failure, this method\r\n\t * returns the {@link Token} instance which should be treated as the\r\n\t * successful result of the match.\r\n\t *\r\n\t * This method handles the consumption of any tokens - the caller should\r\n\t * *not* call {@link Parser#consume} after a successful recovery.\r\n\t *\r\n\t * Note that the calling code will not report an error if this method\r\n\t * returns successfully. The error strategy implementation is responsible\r\n\t * for calling {@link Parser#notifyErrorListeners} as appropriate.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @ if the error strategy was not able to\r\n\t * recover from the unexpected input symbol\r\n\t */\r\n\trecoverInline(/*@NotNull*/ recognizer: Parser): Token;\r\n\r\n\t/**\r\n\t * This method is called to recover from exception `e`. This method is\r\n\t * called after {@link #reportError} by the default exception handler\r\n\t * generated for a rule method.\r\n\t *\r\n\t * @see #reportError\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @param e the recognition exception to recover from\r\n\t * @ if the error strategy could not recover from\r\n\t * the recognition exception\r\n\t */\r\n\trecover(/*@NotNull*/ recognizer: Parser, /*@NotNull*/ e: RecognitionException): void;\r\n\r\n\t/**\r\n\t * This method provides the error handler with an opportunity to handle\r\n\t * syntactic or semantic errors in the input stream before they result in a\r\n\t * {@link RecognitionException}.\r\n\t *\r\n\t * The generated code currently contains calls to {@link #sync} after\r\n\t * entering the decision state of a closure block (`(...)*` or\r\n\t * `(...)+`).\r\n\t *\r\n\t * For an implementation based on Jim Idle's \"magic sync\" mechanism, see\r\n\t * {@link DefaultErrorStrategy#sync}.\r\n\t *\r\n\t * @see DefaultErrorStrategy#sync\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @ if an error is detected by the error\r\n\t * strategy but cannot be automatically recovered at the current state in\r\n\t * the parsing process\r\n\t */\r\n\tsync(/*@NotNull*/ recognizer: Parser): void;\r\n\r\n\t/**\r\n\t * Tests whether or not `recognizer` is in the process of recovering\r\n\t * from an error. In error recovery mode, {@link Parser#consume} adds\r\n\t * symbols to the parse tree by calling\r\n\t * {@link Parser#createErrorNode(ParserRuleContext, Token)} then\r\n\t * {@link ParserRuleContext#addErrorNode(ErrorNode)} instead of\r\n\t * {@link Parser#createTerminalNode(ParserRuleContext, Token)}.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @returns `true` if the parser is currently recovering from a parse\r\n\t * error, otherwise `false`\r\n\t */\r\n\tinErrorRecoveryMode(/*@NotNull*/ recognizer: Parser): boolean;\r\n\r\n\t/**\r\n\t * This method is called by when the parser successfully matches an input\r\n\t * symbol.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t */\r\n\treportMatch(/*@NotNull*/ recognizer: Parser): void;\r\n\r\n\t/**\r\n\t * Report any kind of {@link RecognitionException}. This method is called by\r\n\t * the default exception handler generated for a rule method.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @param e the recognition exception to report\r\n\t */\r\n\treportError(\r\n\t\t/*@NotNull*/ recognizer: Parser,\r\n\t\t/*@NotNull*/ e: RecognitionException): void;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nexport function NotNull(\r\n\ttarget: any,\r\n\tpropertyKey: PropertyKey,\r\n\tpropertyDescriptor?: PropertyDescriptor | number) {\r\n\t// intentionally empty\r\n}\r\n\r\nexport function Nullable(\r\n\ttarget: any,\r\n\tpropertyKey: PropertyKey,\r\n\tpropertyDescriptor?: PropertyDescriptor | number) {\r\n\t// intentionally empty\r\n}\r\n\r\nexport function Override(\r\n\ttarget: any,\r\n\tpropertyKey: PropertyKey,\r\n\tpropertyDescriptor?: PropertyDescriptor) {\r\n\t// do something with 'target' ...\r\n}\r\n\r\nexport function SuppressWarnings(options: string) {\r\n\treturn (target: any, propertyKey: PropertyKey, descriptor?: PropertyDescriptor) => {\r\n\t\t// intentionally empty\r\n\t};\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:51.6934376-07:00\r\n\r\nexport namespace IntStream {\r\n\t/**\r\n\t * The value returned by {@link #LA LA()} when the end of the stream is\r\n\t * reached.\r\n\t */\r\n\texport const EOF: number = -1;\r\n\r\n\t/**\r\n\t * The value returned by {@link #getSourceName} when the actual name of the\r\n\t * underlying source is not known.\r\n\t */\r\n\texport const UNKNOWN_SOURCE_NAME: string = \"\";\r\n}\r\n\r\n/**\r\n * A simple stream of symbols whose values are represented as integers. This\r\n * interface provides *marked ranges* with support for a minimum level\r\n * of buffering necessary to implement arbitrary lookahead during prediction.\r\n * For more information on marked ranges, see {@link #mark}.\r\n *\r\n * **Initializing Methods:** Some methods in this interface have\r\n * unspecified behavior if no call to an initializing method has occurred after\r\n * the stream was constructed. The following is a list of initializing methods:\r\n *\r\n * * {@link #LA}\r\n * * {@link #consume}\r\n * * {@link #size}\r\n */\r\nexport interface IntStream {\r\n\t/**\r\n\t * Consumes the current symbol in the stream. This method has the following\r\n\t * effects:\r\n\t *\r\n\t * * **Forward movement:** The value of `index`\r\n\t * before calling this method is less than the value of `index`\r\n\t * after calling this method.\r\n\t * * **Ordered lookahead:** The value of `LA(1)` before\r\n\t * calling this method becomes the value of `LA(-1)` after calling\r\n\t * this method.\r\n\t *\r\n\t * Note that calling this method does not guarantee that `index` is\r\n\t * incremented by exactly 1, as that would preclude the ability to implement\r\n\t * filtering streams (e.g. {@link CommonTokenStream} which distinguishes\r\n\t * between \"on-channel\" and \"off-channel\" tokens).\r\n\t *\r\n\t * @throws IllegalStateException if an attempt is made to consume the\r\n\t * end of the stream (i.e. if `LA(1)==`{@link #EOF EOF} before calling\r\n\t * `consume`).\r\n\t */\r\n\tconsume(): void;\r\n\r\n\t/**\r\n\t * Gets the value of the symbol at offset `i` from the current\r\n\t * position. When `i==1`, this method returns the value of the current\r\n\t * symbol in the stream (which is the next symbol to be consumed). When\r\n\t * `i==-1`, this method returns the value of the previously read\r\n\t * symbol in the stream. It is not valid to call this method with\r\n\t * `i==0`, but the specific behavior is unspecified because this\r\n\t * method is frequently called from performance-critical code.\r\n\t *\r\n\t * This method is guaranteed to succeed if any of the following are true:\r\n\t *\r\n\t * * `i>0`\r\n\t * * `i==-1` and `index` returns a value greater\r\n\t * than the value of `index` after the stream was constructed\r\n\t * and `LA(1)` was called in that order. Specifying the current\r\n\t * `index` relative to the index after the stream was created\r\n\t * allows for filtering implementations that do not return every symbol\r\n\t * from the underlying source. Specifying the call to `LA(1)`\r\n\t * allows for lazily initialized streams.\r\n\t * * `LA(i)` refers to a symbol consumed within a marked region\r\n\t * that has not yet been released.\r\n\t *\r\n\t * If `i` represents a position at or beyond the end of the stream,\r\n\t * this method returns {@link #EOF}.\r\n\t *\r\n\t * The return value is unspecified if `i<0` and fewer than `-i`\r\n\t * calls to {@link #consume consume()} have occurred from the beginning of\r\n\t * the stream before calling this method.\r\n\t *\r\n\t * @throws UnsupportedOperationException if the stream does not support\r\n\t * retrieving the value of the specified symbol\r\n\t */\r\n\tLA(i: number): number;\r\n\r\n\t/**\r\n\t * A mark provides a guarantee that {@link #seek seek()} operations will be\r\n\t * valid over a \"marked range\" extending from the index where `mark()`\r\n\t * was called to the current `index`. This allows the use of\r\n\t * streaming input sources by specifying the minimum buffering requirements\r\n\t * to support arbitrary lookahead during prediction.\r\n\t *\r\n\t * The returned mark is an opaque handle (type `int`) which is passed\r\n\t * to {@link #release release()} when the guarantees provided by the marked\r\n\t * range are no longer necessary. When calls to\r\n\t * `mark()`/`release()` are nested, the marks must be released\r\n\t * in reverse order of which they were obtained. Since marked regions are\r\n\t * used during performance-critical sections of prediction, the specific\r\n\t * behavior of invalid usage is unspecified (i.e. a mark is not released, or\r\n\t * a mark is released twice, or marks are not released in reverse order from\r\n\t * which they were created).\r\n\t *\r\n\t * The behavior of this method is unspecified if no call to an\r\n\t * {@link IntStream initializing method} has occurred after this stream was\r\n\t * constructed.\r\n\t *\r\n\t * This method does not change the current position in the input stream.\r\n\t *\r\n\t * The following example shows the use of {@link #mark mark()},\r\n\t * {@link #release release(mark)}, `index`, and\r\n\t * {@link #seek seek(index)} as part of an operation to safely work within a\r\n\t * marked region, then restore the stream position to its original value and\r\n\t * release the mark.\r\n\t *\r\n\t * ```\r\n\t * IntStream stream = ...;\r\n\t * int index = -1;\r\n\t * int mark = stream.mark();\r\n\t * try {\r\n\t * index = stream.index;\r\n\t * // perform work here...\r\n\t * } finally {\r\n\t * if (index != -1) {\r\n\t * stream.seek(index);\r\n\t * }\r\n\t * stream.release(mark);\r\n\t * }\r\n\t * ```\r\n\t *\r\n\t * @returns An opaque marker which should be passed to\r\n\t * {@link #release release()} when the marked range is no longer required.\r\n\t */\r\n\tmark(): number;\r\n\r\n\t/**\r\n\t * This method releases a marked range created by a call to\r\n\t * {@link #mark mark()}. Calls to `release()` must appear in the\r\n\t * reverse order of the corresponding calls to `mark()`. If a mark is\r\n\t * released twice, or if marks are not released in reverse order of the\r\n\t * corresponding calls to `mark()`, the behavior is unspecified.\r\n\t *\r\n\t * For more information and an example, see {@link #mark}.\r\n\t *\r\n\t * @param marker A marker returned by a call to `mark()`.\r\n\t * @see #mark\r\n\t */\r\n\trelease(marker: number): void;\r\n\r\n\t/**\r\n\t * Return the index into the stream of the input symbol referred to by\r\n\t * `LA(1)`.\r\n\t *\r\n\t * The behavior of this method is unspecified if no call to an\r\n\t * {@link IntStream initializing method} has occurred after this stream was\r\n\t * constructed.\r\n\t */\r\n\treadonly index: number;\r\n\r\n\t/**\r\n\t * Set the input cursor to the position indicated by `index`. If the\r\n\t * specified index lies past the end of the stream, the operation behaves as\r\n\t * though `index` was the index of the EOF symbol. After this method\r\n\t * returns without throwing an exception, then at least one of the following\r\n\t * will be true.\r\n\t *\r\n\t * * `index` will return the index of the first symbol\r\n\t * appearing at or after the specified `index`. Specifically,\r\n\t * implementations which filter their sources should automatically\r\n\t * adjust `index` forward the minimum amount required for the\r\n\t * operation to target a non-ignored symbol.\r\n\t * * `LA(1)` returns {@link #EOF}\r\n\t *\r\n\t * This operation is guaranteed to not throw an exception if `index`\r\n\t * lies within a marked region. For more information on marked regions, see\r\n\t * {@link #mark}. The behavior of this method is unspecified if no call to\r\n\t * an {@link IntStream initializing method} has occurred after this stream\r\n\t * was constructed.\r\n\t *\r\n\t * @param index The absolute index to seek to.\r\n\t *\r\n\t * @throws IllegalArgumentException if `index` is less than 0\r\n\t * @throws UnsupportedOperationException if the stream does not support\r\n\t * seeking to the specified index\r\n\t */\r\n\tseek(index: number): void;\r\n\r\n\t/**\r\n\t * Returns the total number of symbols in the stream, including a single EOF\r\n\t * symbol.\r\n\t *\r\n\t * @throws UnsupportedOperationException if the size of the stream is\r\n\t * unknown.\r\n\t */\r\n\treadonly size: number;\r\n\r\n\t/**\r\n\t * Gets the name of the underlying symbol source. This method returns a\r\n\t * non-undefined, non-empty string. If such a name is not known, this method\r\n\t * returns {@link #UNKNOWN_SOURCE_NAME}.\r\n\t */\r\n\t//@NotNull\r\n\treadonly sourceName: string;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n// ConvertTo-TS run at 2016-10-04T11:26:49.0828748-07:00\r\n\r\nimport * as assert from \"assert\";\r\nimport { CharStream } from \"./CharStream\";\r\nimport { Arrays } from \"./misc/Arrays\";\r\nimport { Override } from \"./Decorators\";\r\nimport { IntStream } from \"./IntStream\";\r\nimport { Interval } from \"./misc/Interval\";\r\n\r\nconst READ_BUFFER_SIZE: number = 1024;\r\nconst INITIAL_BUFFER_SIZE: number = 1024;\r\n\r\n/**\r\n * Vacuum all input from a {@link Reader}/{@link InputStream} and then treat it\r\n * like a `char[]` buffer. Can also pass in a {@link String} or\r\n * `char[]` to use.\r\n *\r\n * If you need encoding, pass in stream/reader with correct encoding.\r\n *\r\n * @deprecated as of 4.7, please use `CharStreams` interface.\r\n */\r\nexport class ANTLRInputStream implements CharStream {\r\n\t/** The data being scanned */\r\n\tprotected data: string;\r\n\r\n\t/** How many characters are actually in the buffer */\r\n\tprotected n: number;\r\n\r\n\t/** 0..n-1 index into string of next char */\r\n\tprotected p: number = 0;\r\n\r\n\t/** What is name or source of this char stream? */\r\n\tpublic name?: string;\r\n\r\n\t/** Copy data in string to a local char array */\r\n\tconstructor(input: string) {\r\n\t\tthis.data = input;\r\n\t\tthis.n = input.length;\r\n\t}\r\n\r\n\t/** Reset the stream so that it's in the same state it was\r\n\t * when the object was created *except* the data array is not\r\n\t * touched.\r\n\t */\r\n\tpublic reset(): void {\r\n\t\tthis.p = 0;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic consume(): void {\r\n\t\tif (this.p >= this.n) {\r\n\t\t\tassert(this.LA(1) === IntStream.EOF);\r\n\t\t\tthrow new Error(\"cannot consume EOF\");\r\n\t\t}\r\n\r\n\t\t//System.out.println(\"prev p=\"+p+\", c=\"+(char)data[p]);\r\n\t\tif (this.p < this.n) {\r\n\t\t\tthis.p++;\r\n\t\t\t//System.out.println(\"p moves to \"+p+\" (c='\"+(char)data[p]+\"')\");\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\tpublic LA(i: number): number {\r\n\t\tif (i === 0) {\r\n\t\t\treturn 0; // undefined\r\n\t\t}\r\n\t\tif (i < 0) {\r\n\t\t\ti++; // e.g., translate LA(-1) to use offset i=0; then data[p+0-1]\r\n\t\t\tif ((this.p + i - 1) < 0) {\r\n\t\t\t\treturn IntStream.EOF; // invalid; no char before first char\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif ((this.p + i - 1) >= this.n) {\r\n\t\t\t//System.out.println(\"char LA(\"+i+\")=EOF; p=\"+p);\r\n\t\t\treturn IntStream.EOF;\r\n\t\t}\r\n\t\t//System.out.println(\"char LA(\"+i+\")=\"+(char)data[p+i-1]+\"; p=\"+p);\r\n\t\t//System.out.println(\"LA(\"+i+\"); p=\"+p+\" n=\"+n+\" data.length=\"+data.length);\r\n\t\treturn this.data.charCodeAt(this.p + i - 1);\r\n\t}\r\n\r\n\tpublic LT(i: number): number {\r\n\t\treturn this.LA(i);\r\n\t}\r\n\r\n\t/** Return the current input symbol index 0..n where n indicates the\r\n\t * last symbol has been read. The index is the index of char to\r\n\t * be returned from LA(1).\r\n\t */\r\n\t@Override\r\n\tget index(): number {\r\n\t\treturn this.p;\r\n\t}\r\n\r\n\t@Override\r\n\tget size(): number {\r\n\t\treturn this.n;\r\n\t}\r\n\r\n\t/** mark/release do nothing; we have entire buffer */\r\n\t@Override\r\n\tpublic mark(): number {\r\n\t\treturn -1;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic release(marker: number): void {\r\n\t\t// No default implementation since this stream buffers the entire input\r\n\t}\r\n\r\n\t/** consume() ahead until p==index; can't just set p=index as we must\r\n\t * update line and charPositionInLine. If we seek backwards, just set p\r\n\t */\r\n\t@Override\r\n\tpublic seek(index: number): void {\r\n\t\tif (index <= this.p) {\r\n\t\t\tthis.p = index; // just jump; don't update stream state (line, ...)\r\n\t\t\treturn;\r\n\t\t}\r\n\t\t// seek forward, consume until p hits index or n (whichever comes first)\r\n\t\tindex = Math.min(index, this.n);\r\n\t\twhile (this.p < index) {\r\n\t\t\tthis.consume();\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getText(interval: Interval): string {\r\n\t\tlet start: number = interval.a;\r\n\t\tlet stop: number = interval.b;\r\n\t\tif (stop >= this.n) {\r\n\t\t\tstop = this.n - 1;\r\n\t\t}\r\n\t\tlet count: number = stop - start + 1;\r\n\t\tif (start >= this.n) {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\t\t// System.err.println(\"data: \"+Arrays.toString(data)+\", n=\"+n+\r\n\t\t// \t\t\t\t \", start=\"+start+\r\n\t\t// \t\t\t\t \", stop=\"+stop);\r\n\t\treturn this.data.substr(start, count);\r\n\t}\r\n\r\n\t@Override\r\n\tget sourceName(): string {\r\n\t\tif (!this.name) {\r\n\t\t\treturn IntStream.UNKNOWN_SOURCE_NAME;\r\n\t\t}\r\n\t\treturn this.name;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString() { return this.data; }\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:27.4734328-07:00\r\n\r\nimport { ATN } from \"./ATN\";\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { IntervalSet } from \"../misc/IntervalSet\";\r\nimport { Override } from \"../Decorators\";\r\nimport { Transition } from \"./Transition\";\r\n\r\nconst INITIAL_NUM_TRANSITIONS: number = 4;\r\n\r\n/**\r\n * The following images show the relation of states and\r\n * {@link ATNState#transitions} for various grammar constructs.\r\n *\r\n * * Solid edges marked with an ε indicate a required\r\n * {@link EpsilonTransition}.\r\n *\r\n * * Dashed edges indicate locations where any transition derived from\r\n * {@link Transition} might appear.\r\n *\r\n * * Dashed nodes are place holders for either a sequence of linked\r\n * {@link BasicState} states or the inclusion of a block representing a nested\r\n * construct in one of the forms below.\r\n *\r\n * * Nodes showing multiple outgoing alternatives with a `...` support\r\n * any number of alternatives (one or more). Nodes without the `...` only\r\n * support the exact number of alternatives shown in the diagram.\r\n *\r\n *

Basic Blocks

\r\n *\r\n *

Rule

\r\n *\r\n * \r\n *\r\n *

Block of 1 or more alternatives

\r\n *\r\n * \r\n *\r\n *

Greedy Loops

\r\n *\r\n *

Greedy Closure: `(...)*`

\r\n *\r\n * \r\n *\r\n *

Greedy Positive Closure: `(...)+`

\r\n *\r\n * \r\n *\r\n *

Greedy Optional: `(...)?`

\r\n *\r\n * \r\n *\r\n *

Non-Greedy Loops

\r\n *\r\n *

Non-Greedy Closure: `(...)*?`

\r\n *\r\n * \r\n *\r\n *

Non-Greedy Positive Closure: `(...)+?`

\r\n *\r\n * \r\n *\r\n *

Non-Greedy Optional: `(...)??`

\r\n *\r\n * \r\n */\r\nexport abstract class ATNState {\r\n\r\n\t/** Which ATN are we in? */\r\n\tpublic atn?: ATN;\r\n\r\n\tpublic stateNumber: number = ATNState.INVALID_STATE_NUMBER;\r\n\r\n\tpublic ruleIndex: number = 0; // at runtime, we don't have Rule objects\r\n\r\n\tpublic epsilonOnlyTransitions: boolean = false;\r\n\r\n\t/** Track the transitions emanating from this ATN state. */\r\n\tprotected transitions: Transition[] = [];\r\n\r\n\tprotected optimizedTransitions: Transition[] = this.transitions;\r\n\r\n\t/** Used to cache lookahead during parsing, not used during construction */\r\n\tpublic nextTokenWithinRule?: IntervalSet;\r\n\r\n\t/**\r\n\t * Gets the state number.\r\n\t *\r\n\t * @returns the state number\r\n\t */\r\n\tpublic getStateNumber(): number {\r\n\t\treturn this.stateNumber;\r\n\t}\r\n\r\n\t/**\r\n\t * For all states except {@link RuleStopState}, this returns the state\r\n\t * number. Returns -1 for stop states.\r\n\t *\r\n\t * @returns -1 for {@link RuleStopState}, otherwise the state number\r\n\t */\r\n\tget nonStopStateNumber(): number {\r\n\t\treturn this.getStateNumber();\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\treturn this.stateNumber;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(o: any): boolean {\r\n\t\t// are these states same object?\r\n\t\tif (o instanceof ATNState) {\r\n\t\t\treturn this.stateNumber === o.stateNumber;\r\n\t\t}\r\n\r\n\t\treturn false;\r\n\t}\r\n\r\n\tget isNonGreedyExitState(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn String(this.stateNumber);\r\n\t}\r\n\r\n\tpublic getTransitions(): Transition[] {\r\n\t\treturn this.transitions.slice(0);\r\n\t}\r\n\r\n\tget numberOfTransitions(): number {\r\n\t\treturn this.transitions.length;\r\n\t}\r\n\r\n\tpublic addTransition(e: Transition, index?: number): void {\r\n\t\tif (this.transitions.length === 0) {\r\n\t\t\tthis.epsilonOnlyTransitions = e.isEpsilon;\r\n\t\t}\r\n\t\telse if (this.epsilonOnlyTransitions !== e.isEpsilon) {\r\n\t\t\tthis.epsilonOnlyTransitions = false;\r\n\t\t\tthrow new Error(\"ATN state \" + this.stateNumber + \" has both epsilon and non-epsilon transitions.\");\r\n\t\t}\r\n\r\n\t\tthis.transitions.splice(index !== undefined ? index : this.transitions.length, 0, e);\r\n\t}\r\n\r\n\tpublic transition(i: number): Transition {\r\n\t\treturn this.transitions[i];\r\n\t}\r\n\r\n\tpublic setTransition(i: number, e: Transition): void {\r\n\t\tthis.transitions[i] = e;\r\n\t}\r\n\r\n\tpublic removeTransition(index: number): Transition {\r\n\t\treturn this.transitions.splice(index, 1)[0];\r\n\t}\r\n\r\n\tpublic abstract readonly stateType: ATNStateType;\r\n\r\n\tget onlyHasEpsilonTransitions(): boolean {\r\n\t\treturn this.epsilonOnlyTransitions;\r\n\t}\r\n\r\n\tpublic setRuleIndex(ruleIndex: number): void {\r\n\t\tthis.ruleIndex = ruleIndex;\r\n\t}\r\n\r\n\tget isOptimized(): boolean {\r\n\t\treturn this.optimizedTransitions !== this.transitions;\r\n\t}\r\n\r\n\tget numberOfOptimizedTransitions(): number {\r\n\t\treturn this.optimizedTransitions.length;\r\n\t}\r\n\r\n\tpublic getOptimizedTransition(i: number): Transition {\r\n\t\treturn this.optimizedTransitions[i];\r\n\t}\r\n\r\n\tpublic addOptimizedTransition(e: Transition): void {\r\n\t\tif (!this.isOptimized) {\r\n\t\t\tthis.optimizedTransitions = new Array();\r\n\t\t}\r\n\r\n\t\tthis.optimizedTransitions.push(e);\r\n\t}\r\n\r\n\tpublic setOptimizedTransition(i: number, e: Transition): void {\r\n\t\tif (!this.isOptimized) {\r\n\t\t\tthrow new Error(\"This ATNState is not optimized.\");\r\n\t\t}\r\n\r\n\t\tthis.optimizedTransitions[i] = e;\r\n\t}\r\n\r\n\tpublic removeOptimizedTransition(i: number): void {\r\n\t\tif (!this.isOptimized) {\r\n\t\t\tthrow new Error(\"This ATNState is not optimized.\");\r\n\t\t}\r\n\r\n\t\tthis.optimizedTransitions.splice(i, 1);\r\n\t}\r\n}\r\n\r\nexport namespace ATNState {\r\n\texport const INVALID_STATE_NUMBER: number = -1;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:27.4734328-07:00\r\n\r\nexport enum ATNStateType {\r\n\tINVALID_TYPE = 0,\r\n\tBASIC = 1,\r\n\tRULE_START = 2,\r\n\tBLOCK_START = 3,\r\n\tPLUS_BLOCK_START = 4,\r\n\tSTAR_BLOCK_START = 5,\r\n\tTOKEN_START = 6,\r\n\tRULE_STOP = 7,\r\n\tBLOCK_END = 8,\r\n\tSTAR_LOOP_BACK = 9,\r\n\tSTAR_LOOP_ENTRY = 10,\r\n\tPLUS_LOOP_BACK = 11,\r\n\tLOOP_END = 12,\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:57.0697674-07:00\r\nimport { CharStream } from \"./CharStream\";\r\nimport { IntervalSet } from \"./misc/IntervalSet\";\r\nimport { IntStream } from \"./IntStream\";\r\nimport { Lexer } from \"./Lexer\";\r\nimport { Parser } from \"./Parser\";\r\nimport { ParserRuleContext } from \"./ParserRuleContext\";\r\nimport { Recognizer } from \"./Recognizer\";\r\nimport { RuleContext } from \"./RuleContext\";\r\nimport { Token } from \"./Token\";\r\n\r\n\r\n/** The root of the ANTLR exception hierarchy. In general, ANTLR tracks just\r\n * 3 kinds of errors: prediction errors, failed predicate errors, and\r\n * mismatched input errors. In each case, the parser knows where it is\r\n * in the input, where it is in the ATN, the rule invocation stack,\r\n * and what kind of problem occurred.\r\n */\r\nexport class RecognitionException extends Error {\r\n\t// private static serialVersionUID: number = -3861826954750022374L;\r\n\r\n\t/** The {@link Recognizer} where this exception originated. */\r\n\tprivate _recognizer?: Recognizer;\r\n\r\n\tprivate ctx?: RuleContext;\r\n\r\n\tprivate input?: IntStream;\r\n\r\n\t/**\r\n\t * The current {@link Token} when an error occurred. Since not all streams\r\n\t * support accessing symbols by index, we have to track the {@link Token}\r\n\t * instance itself.\r\n\t */\r\n\tprivate offendingToken?: Token;\r\n\r\n\tprivate _offendingState: number = -1;\r\n\r\n\tconstructor(\r\n\t\tlexer: Lexer | undefined,\r\n\t\tinput: CharStream);\r\n\r\n\tconstructor(\r\n\t\trecognizer: Recognizer | undefined,\r\n\t\tinput: IntStream | undefined,\r\n\t\tctx: ParserRuleContext | undefined);\r\n\r\n\tconstructor(\r\n\t\trecognizer: Recognizer | undefined,\r\n\t\tinput: IntStream | undefined,\r\n\t\tctx: ParserRuleContext | undefined,\r\n\t\tmessage: string);\r\n\r\n\tconstructor(\r\n\t\trecognizer: Lexer | Recognizer | undefined,\r\n\t\tinput: CharStream | IntStream | undefined,\r\n\t\tctx?: ParserRuleContext,\r\n\t\tmessage?: string) {\r\n\t\tsuper(message);\r\n\r\n\t\tthis._recognizer = recognizer;\r\n\t\tthis.input = input;\r\n\t\tthis.ctx = ctx;\r\n\t\tif (recognizer) {\r\n\t\t\tthis._offendingState = recognizer.state;\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Get the ATN state number the parser was in at the time the error\r\n\t * occurred. For {@link NoViableAltException} and\r\n\t * {@link LexerNoViableAltException} exceptions, this is the\r\n\t * {@link DecisionState} number. For others, it is the state whose outgoing\r\n\t * edge we couldn't match.\r\n\t *\r\n\t * If the state number is not known, this method returns -1.\r\n\t */\r\n\tget offendingState(): number {\r\n\t\treturn this._offendingState;\r\n\t}\r\n\r\n\tprotected setOffendingState(offendingState: number): void {\r\n\t\tthis._offendingState = offendingState;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the set of input symbols which could potentially follow the\r\n\t * previously matched symbol at the time this exception was thrown.\r\n\t *\r\n\t * If the set of expected tokens is not known and could not be computed,\r\n\t * this method returns `undefined`.\r\n\t *\r\n\t * @returns The set of token types that could potentially follow the current\r\n\t * state in the ATN, or `undefined` if the information is not available.\r\n\t */\r\n\tget expectedTokens(): IntervalSet | undefined {\r\n\t\tif (this._recognizer) {\r\n\t\t\treturn this._recognizer.atn.getExpectedTokens(this._offendingState, this.ctx);\r\n\t\t}\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the {@link RuleContext} at the time this exception was thrown.\r\n\t *\r\n\t * If the context is not available, this method returns `undefined`.\r\n\t *\r\n\t * @returns The {@link RuleContext} at the time this exception was thrown.\r\n\t * If the context is not available, this method returns `undefined`.\r\n\t */\r\n\tget context(): RuleContext | undefined {\r\n\t\treturn this.ctx;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the input stream which is the symbol source for the recognizer where\r\n\t * this exception was thrown.\r\n\t *\r\n\t * If the input stream is not available, this method returns `undefined`.\r\n\t *\r\n\t * @returns The input stream which is the symbol source for the recognizer\r\n\t * where this exception was thrown, or `undefined` if the stream is not\r\n\t * available.\r\n\t */\r\n\r\n\tget inputStream(): IntStream | undefined {\r\n\t\treturn this.input;\r\n\t}\r\n\r\n\tpublic getOffendingToken(recognizer?: Recognizer): Token | undefined {\r\n\t\tif (recognizer && recognizer !== this._recognizer) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\t\treturn this.offendingToken;\r\n\t}\r\n\r\n\tprotected setOffendingToken(\r\n\t\trecognizer: Recognizer,\r\n\t\toffendingToken?: TSymbol): void {\r\n\t\tif (recognizer === this._recognizer) {\r\n\t\t\tthis.offendingToken = offendingToken;\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the {@link Recognizer} where this exception occurred.\r\n\t *\r\n\t * If the recognizer is not available, this method returns `undefined`.\r\n\t *\r\n\t * @returns The recognizer where this exception occurred, or `undefined` if\r\n\t * the recognizer is not available.\r\n\t */\r\n\tget recognizer(): Recognizer | undefined {\r\n\t\treturn this._recognizer;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:37.8530496-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { IntervalSet } from \"../misc/IntervalSet\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { TransitionType } from \"./TransitionType\";\r\n\r\n/** An ATN transition between any two ATN states. Subclasses define\r\n * atom, set, epsilon, action, predicate, rule transitions.\r\n *\r\n * This is a one way link. It emanates from a state (usually via a list of\r\n * transitions) and has a target state.\r\n *\r\n * Since we never have to change the ATN transitions once we construct it,\r\n * we can fix these transitions as specific classes. The DFA transitions\r\n * on the other hand need to update the labels as it adds transitions to\r\n * the states. We'll use the term Edge for the DFA to distinguish them from\r\n * ATN transitions.\r\n */\r\nexport abstract class Transition {\r\n\tpublic static readonly serializationNames: string[] = [\r\n\t\t\"INVALID\",\r\n\t\t\"EPSILON\",\r\n\t\t\"RANGE\",\r\n\t\t\"RULE\",\r\n\t\t\"PREDICATE\",\r\n\t\t\"ATOM\",\r\n\t\t\"ACTION\",\r\n\t\t\"SET\",\r\n\t\t\"NOT_SET\",\r\n\t\t\"WILDCARD\",\r\n\t\t\"PRECEDENCE\",\r\n\t];\r\n\r\n\t// @SuppressWarnings(\"serial\")\r\n\t// static serializationTypes: Map, number> =\r\n\t// \tCollections.unmodifiableMap(new HashMap, Integer>() {{\r\n\t// \t\tput(EpsilonTransition.class, EPSILON);\r\n\t// \t\tput(RangeTransition.class, RANGE);\r\n\t// \t\tput(RuleTransition.class, RULE);\r\n\t// \t\tput(PredicateTransition.class, PREDICATE);\r\n\t// \t\tput(AtomTransition.class, ATOM);\r\n\t// \t\tput(ActionTransition.class, ACTION);\r\n\t// \t\tput(SetTransition.class, SET);\r\n\t// \t\tput(NotSetTransition.class, NOT_SET);\r\n\t// \t\tput(WildcardTransition.class, WILDCARD);\r\n\t// \t\tput(PrecedencePredicateTransition.class, PRECEDENCE);\r\n\t// \t}});\r\n\r\n\t/** The target of this transition. */\r\n\t@NotNull\r\n\tpublic target: ATNState;\r\n\r\n\tconstructor(@NotNull target: ATNState) {\r\n\t\tif (target == null) {\r\n\t\t\tthrow new Error(\"target cannot be null.\");\r\n\t\t}\r\n\r\n\t\tthis.target = target;\r\n\t}\r\n\r\n\tpublic abstract readonly serializationType: TransitionType;\r\n\r\n\t/**\r\n\t * Determines if the transition is an \"epsilon\" transition.\r\n\t *\r\n\t * The default implementation returns `false`.\r\n\t *\r\n\t * @returns `true` if traversing this transition in the ATN does not\r\n\t * consume an input symbol; otherwise, `false` if traversing this\r\n\t * transition consumes (matches) an input symbol.\r\n\t */\r\n\tget isEpsilon(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\tget label(): IntervalSet | undefined {\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\tpublic abstract matches(symbol: number, minVocabSymbol: number, maxVocabSymbol: number): boolean;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:24.6596177-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { Transition } from \"./Transition\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport abstract class AbstractPredicateTransition extends Transition {\r\n\r\n\tconstructor(target: ATNState) {\r\n\t\tsuper(target);\r\n\t}\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-03T02:09:42.1239660-07:00\r\nimport { Equatable } from \"./Stubs\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport namespace MurmurHash {\r\n\r\n\tconst DEFAULT_SEED: number = 0;\r\n\r\n\t/**\r\n\t * Initialize the hash using the specified `seed`.\r\n\t *\r\n\t * @param seed the seed (optional)\r\n\t * @returns the intermediate hash value\r\n\t */\r\n\texport function initialize(seed: number = DEFAULT_SEED): number {\r\n\t\treturn seed;\r\n\t}\r\n\r\n\t/**\r\n\t * Update the intermediate hash value for the next input `value`.\r\n\t *\r\n\t * @param hash the intermediate hash value\r\n\t * @param value the value to add to the current hash\r\n\t * @returns the updated intermediate hash value\r\n\t */\r\n\texport function update(hash: number, value: number | string | Equatable | null | undefined): number {\r\n\t\tconst c1: number = 0xCC9E2D51;\r\n\t\tconst c2: number = 0x1B873593;\r\n\t\tconst r1: number = 15;\r\n\t\tconst r2: number = 13;\r\n\t\tconst m: number = 5;\r\n\t\tconst n: number = 0xE6546B64;\r\n\r\n\t\tif (value == null) {\r\n\t\t\tvalue = 0;\r\n\t\t} else if (typeof value === \"string\") {\r\n\t\t\tvalue = hashString(value);\r\n\t\t} else if (typeof value === \"object\") {\r\n\t\t\tvalue = value.hashCode();\r\n\t\t}\r\n\r\n\t\tlet k: number = value;\r\n\t\tk = Math.imul(k, c1);\r\n\t\tk = (k << r1) | (k >>> (32 - r1));\r\n\t\tk = Math.imul(k, c2);\r\n\r\n\t\thash = hash ^ k;\r\n\t\thash = (hash << r2) | (hash >>> (32 - r2));\r\n\t\thash = Math.imul(hash, m) + n;\r\n\r\n\t\treturn hash & 0xFFFFFFFF;\r\n\t}\r\n\r\n\r\n\t/**\r\n\t * Apply the final computation steps to the intermediate value `hash`\r\n\t * to form the final result of the MurmurHash 3 hash function.\r\n\t *\r\n\t * @param hash the intermediate hash value\r\n\t * @param numberOfWords the number of integer values added to the hash\r\n\t * @returns the final hash result\r\n\t */\r\n\texport function finish(hash: number, numberOfWords: number): number {\r\n\t\thash = hash ^ (numberOfWords * 4);\r\n\t\thash = hash ^ (hash >>> 16);\r\n\t\thash = Math.imul(hash, 0x85EBCA6B);\r\n\t\thash = hash ^ (hash >>> 13);\r\n\t\thash = Math.imul(hash, 0xC2B2AE35);\r\n\t\thash = hash ^ (hash >>> 16);\r\n\t\treturn hash;\r\n\t}\r\n\r\n\t/**\r\n\t * Utility function to compute the hash code of an array using the\r\n\t * MurmurHash algorithm.\r\n\t *\r\n\t * @param the array element type\r\n\t * @param data the array data\r\n\t * @param seed the seed for the MurmurHash algorithm\r\n\t * @returns the hash code of the data\r\n\t */\r\n\texport function hashCode(data: Iterable, seed: number = DEFAULT_SEED): number {\r\n\t\tlet hash: number = initialize(seed);\r\n\t\tlet length = 0;\r\n\t\tfor (let value of data) {\r\n\t\t\thash = update(hash, value);\r\n\t\t\tlength++;\r\n\t\t}\r\n\r\n\t\thash = finish(hash, length);\r\n\t\treturn hash;\r\n\t}\r\n\r\n\t/**\r\n\t * Function to hash a string. Based on the implementation found here:\r\n\t * http://stackoverflow.com/a/7616484\r\n\t */\r\n\tfunction hashString(str: string): number {\r\n\t\tlet len = str.length;\r\n\t\tif (len === 0) {\r\n\t\t\treturn 0;\r\n\t\t}\r\n\r\n\t\tlet hash = 0;\r\n\t\tfor (let i = 0; i < len; i++) {\r\n\t\t\tlet c = str.charCodeAt(i);\r\n\t\t\thash = (((hash << 5) >>> 0) - hash) + c;\r\n\t\t\thash |= 0;\r\n\t\t}\r\n\r\n\t\treturn hash;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-03T02:09:42.2127260-07:00\r\nimport { EqualityComparator } from \"./EqualityComparator\";\r\nimport { Override } from \"../Decorators\";\r\nimport { Equatable } from \"./Stubs\";\r\n\r\n/**\r\n * This default implementation of {@link EqualityComparator} uses object equality\r\n * for comparisons by calling {@link Object#hashCode} and {@link Object#equals}.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class ObjectEqualityComparator implements EqualityComparator {\r\n\tpublic static readonly INSTANCE: ObjectEqualityComparator = new ObjectEqualityComparator();\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This implementation returns\r\n\t * `obj.`{@link Object#hashCode hashCode()}.\r\n\t */\r\n\t@Override\r\n\tpublic hashCode(obj: Equatable | null | undefined): number {\r\n\t\tif (obj == null) {\r\n\t\t\treturn 0;\r\n\t\t}\r\n\r\n\t\treturn obj.hashCode();\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This implementation relies on object equality. If both objects are\r\n\t * `undefined` or `null`, this method returns `true`. Otherwise if only\r\n\t * `a` is `undefined` or `null`, this method returns `false`. Otherwise,\r\n\t * this method returns the result of\r\n\t * `a.`{@link Object#equals equals}`(b)`.\r\n\t */\r\n\t@Override\r\n\tpublic equals(a: Equatable | null | undefined, b: Equatable | null | undefined): boolean {\r\n\t\tif (a == null) {\r\n\t\t\treturn b == null;\r\n\t\t}\r\n\r\n\t\treturn a.equals(b);\r\n\t}\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nimport { EqualityComparator } from \"./EqualityComparator\";\r\nimport { Override } from \"../Decorators\";\r\nimport { Equatable } from \"./Stubs\";\r\nimport { MurmurHash } from \"./MurmurHash\";\r\nimport { ObjectEqualityComparator } from \"./ObjectEqualityComparator\";\r\n\r\n/**\r\n * This default implementation of {@link EqualityComparator} uses object equality\r\n * for comparisons by calling {@link Object#hashCode} and {@link Object#equals}.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class DefaultEqualityComparator implements EqualityComparator {\r\n\tpublic static readonly INSTANCE: DefaultEqualityComparator = new DefaultEqualityComparator();\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This implementation returns\r\n\t * `obj.`{@link Object#hashCode hashCode()}.\r\n\t */\r\n\t@Override\r\n\tpublic hashCode(obj: any): number {\r\n\t\tif (obj == null) {\r\n\t\t\treturn 0;\r\n\t\t} else if (typeof obj === \"string\" || typeof obj === \"number\") {\r\n\t\t\treturn MurmurHash.hashCode([obj]);\r\n\t\t} else {\r\n\t\t\treturn ObjectEqualityComparator.INSTANCE.hashCode(obj as Equatable);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This implementation relies on object equality. If both objects are\r\n\t * `undefined` or `null`, this method returns `true`. Otherwise if only\r\n\t * `a` is `undefined` or `null`, this method returns `false`. Otherwise,\r\n\t * this method returns the result of\r\n\t * `a.`{@link Object#equals equals}`(b)`.\r\n\t */\r\n\t@Override\r\n\tpublic equals(a: any, b: any): boolean {\r\n\t\tif (a == null) {\r\n\t\t\treturn b == null;\r\n\t\t} else if (typeof a === \"string\" || typeof a === \"number\") {\r\n\t\t\treturn a === b;\r\n\t\t} else {\r\n\t\t\treturn ObjectEqualityComparator.INSTANCE.equals(a as Equatable, b as Equatable);\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-03T02:09:41.7434086-07:00\r\n\r\nimport * as assert from \"assert\";\r\nimport { DefaultEqualityComparator } from \"./DefaultEqualityComparator\";\r\nimport { EqualityComparator } from \"./EqualityComparator\";\r\nimport { NotNull, Nullable, Override, SuppressWarnings } from \"../Decorators\";\r\nimport { JavaCollection, JavaSet } from \"./Stubs\";\r\nimport { ObjectEqualityComparator } from \"./ObjectEqualityComparator\";\r\nimport { MurmurHash } from \"./MurmurHash\";\r\n\r\n/** {@link Set} implementation with closed hashing (open addressing). */\r\n\r\n// NOTE: JavaScript's Set interface has on significant different diffrence from Java's:\r\n// \t\t e.g. the return type of add() differs!\r\n// For this reason I've commented tweaked the implements clause\r\n\r\nconst INITAL_CAPACITY: number = 16; // must be power of 2\r\nconst LOAD_FACTOR: number = 0.75;\r\n\r\nexport class Array2DHashSet implements JavaSet {\r\n\t@NotNull\r\n\tprotected comparator: EqualityComparator;\r\n\r\n\tprotected buckets: Array;\r\n\r\n\t/** How many elements in set */\r\n\tprotected n: number = 0;\r\n\r\n\tprotected threshold: number = Math.floor(INITAL_CAPACITY * LOAD_FACTOR); // when to expand\r\n\r\n\tconstructor(comparator?: EqualityComparator, initialCapacity?: number);\r\n\tconstructor(set: Array2DHashSet);\r\n\tconstructor(\r\n\t\tcomparatorOrSet?: EqualityComparator | Array2DHashSet,\r\n\t\tinitialCapacity: number = INITAL_CAPACITY) {\r\n\r\n\t\tif (comparatorOrSet instanceof Array2DHashSet) {\r\n\t\t\tthis.comparator = comparatorOrSet.comparator;\r\n\t\t\tthis.buckets = comparatorOrSet.buckets.slice(0);\r\n\t\t\tfor (let i = 0; i < this.buckets.length; i++) {\r\n\t\t\t\tlet bucket = this.buckets[i];\r\n\t\t\t\tif (bucket) {\r\n\t\t\t\t\tthis.buckets[i] = bucket.slice(0);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tthis.n = comparatorOrSet.n;\r\n\t\t\tthis.threshold = comparatorOrSet.threshold;\r\n\t\t} else {\r\n\t\t\tthis.comparator = comparatorOrSet || DefaultEqualityComparator.INSTANCE;\r\n\t\t\tthis.buckets = this.createBuckets(initialCapacity);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Add `o` to set if not there; return existing value if already\r\n\t * there. This method performs the same operation as {@link #add} aside from\r\n\t * the return value.\r\n\t */\r\n\tpublic getOrAdd(o: T): T {\r\n\t\tif (this.n > this.threshold) {\r\n\t\t\tthis.expand();\r\n\t\t}\r\n\t\treturn this.getOrAddImpl(o);\r\n\t}\r\n\r\n\tprotected getOrAddImpl(o: T): T {\r\n\t\tlet b: number = this.getBucket(o);\r\n\t\tlet bucket = this.buckets[b];\r\n\r\n\t\t// NEW BUCKET\r\n\t\tif (!bucket) {\r\n\t\t\tbucket = [o];\r\n\t\t\tthis.buckets[b] = bucket;\r\n\t\t\tthis.n++;\r\n\t\t\treturn o;\r\n\t\t}\r\n\r\n\t\t// LOOK FOR IT IN BUCKET\r\n\t\tfor (let existing of bucket) {\r\n\t\t\tif (this.comparator.equals(existing, o)) {\r\n\t\t\t\treturn existing; // found existing, quit\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// FULL BUCKET, expand and add to end\r\n\t\tbucket.push(o);\r\n\t\tthis.n++;\r\n\t\treturn o;\r\n\t}\r\n\r\n\tpublic get(o: T): T | undefined {\r\n\t\tif (o == null) {\r\n\t\t\treturn o;\r\n\t\t}\r\n\t\tlet b: number = this.getBucket(o);\r\n\t\tlet bucket = this.buckets[b];\r\n\t\tif (!bucket) {\r\n\t\t\t// no bucket\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\tfor (let e of bucket) {\r\n\t\t\tif (this.comparator.equals(e, o)) {\r\n\t\t\t\treturn e;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\tprotected getBucket(o: T): number {\r\n\t\tlet hash: number = this.comparator.hashCode(o);\r\n\t\tlet b: number = hash & (this.buckets.length - 1); // assumes len is power of 2\r\n\t\treturn b;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\tfor (let bucket of this.buckets) {\r\n\t\t\tif (bucket == null) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\t\t\tfor (let o of bucket) {\r\n\t\t\t\tif (o == null) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t\thash = MurmurHash.update(hash, this.comparator.hashCode(o));\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\thash = MurmurHash.finish(hash, this.size);\r\n\t\treturn hash;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(o: any): boolean {\r\n\t\tif (o === this) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\t\tif (!(o instanceof Array2DHashSet)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\t\tif (o.size !== this.size) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\t\tlet same: boolean = this.containsAll(o);\r\n\t\treturn same;\r\n\t}\r\n\r\n\tprotected expand(): void {\r\n\t\tlet old = this.buckets;\r\n\t\tlet newCapacity: number = this.buckets.length * 2;\r\n\t\tlet newTable: Array = this.createBuckets(newCapacity);\r\n\t\tthis.buckets = newTable;\r\n\t\tthis.threshold = Math.floor(newCapacity * LOAD_FACTOR);\r\n//\t\tSystem.out.println(\"new size=\"+newCapacity+\", thres=\"+threshold);\r\n\t\t// rehash all existing entries\r\n\t\tlet oldSize: number = this.size;\r\n\t\tfor (let bucket of old) {\r\n\t\t\tif (!bucket) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tfor (let o of bucket) {\r\n\t\t\t\tlet b: number = this.getBucket(o);\r\n\t\t\t\tlet newBucket: T[] | undefined = this.buckets[b];\r\n\t\t\t\tif (!newBucket) {\r\n\t\t\t\t\tnewBucket = [];\r\n\t\t\t\t\tthis.buckets[b] = newBucket;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tnewBucket.push(o);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tassert(this.n === oldSize);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic add(t: T): boolean {\r\n\t\tlet existing: T = this.getOrAdd(t);\r\n\t\treturn existing === t;\r\n\t}\r\n\r\n\t@Override\r\n\tget size(): number {\r\n\t\treturn this.n;\r\n\t}\r\n\r\n\t@Override\r\n\tget isEmpty(): boolean {\r\n\t\treturn this.n === 0;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic contains(o: any): boolean {\r\n\t\treturn this.containsFast(this.asElementType(o));\r\n\t}\r\n\r\n\tpublic containsFast(@Nullable obj: T): boolean {\r\n\t\tif (obj == null) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this.get(obj) != null;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic *[Symbol.iterator](): IterableIterator {\r\n\t\tyield* this.toArray();\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toArray(): T[] {\r\n\t\tconst a = new Array(this.size);\r\n\r\n\t\t// Copy elements from the nested arrays into the destination array\r\n\t\tlet i: number = 0; // Position within destination array\r\n\t\tfor (let bucket of this.buckets) {\r\n\t\t\tif (bucket == null) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tfor (let o of bucket) {\r\n\t\t\t\tif (o == null) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t\ta[i++] = o;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn a;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic containsAll(collection: JavaCollection): boolean {\r\n\t\tif (collection instanceof Array2DHashSet) {\r\n\t\t\tlet s = collection as any as Array2DHashSet;\r\n\t\t\tfor (let bucket of s.buckets) {\r\n\t\t\t\tif (bucket == null) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\t\t\t\tfor (let o of bucket) {\r\n\t\t\t\t\tif (o == null) {\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif (!this.containsFast(this.asElementType(o))) {\r\n\t\t\t\t\t\treturn false;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t\telse {\r\n\t\t\tfor (let o of collection) {\r\n\t\t\t\tif (!this.containsFast(this.asElementType(o))) {\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn true;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic addAll(c: Iterable): boolean {\r\n\t\tlet changed: boolean = false;\r\n\r\n\t\tfor (let o of c) {\r\n\t\t\tlet existing: T = this.getOrAdd(o);\r\n\t\t\tif (existing !== o) {\r\n\t\t\t\tchanged = true;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn changed;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic clear(): void {\r\n\t\tthis.buckets = this.createBuckets(INITAL_CAPACITY);\r\n\t\tthis.n = 0;\r\n\t\tthis.threshold = Math.floor(INITAL_CAPACITY * LOAD_FACTOR);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\tif (this.size === 0) {\r\n\t\t\treturn \"{}\";\r\n\t\t}\r\n\r\n\t\tlet buf = \"{\";\r\n\t\tlet first: boolean = true;\r\n\t\tfor (let bucket of this.buckets) {\r\n\t\t\tif (bucket == null) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\t\t\tfor (let o of bucket) {\r\n\t\t\t\tif (o == null) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t\tif (first) {\r\n\t\t\t\t\tfirst = false;\r\n\t\t\t\t} else {\r\n\t\t\t\t\tbuf += \", \";\r\n\t\t\t\t}\r\n\t\t\t\tbuf += o.toString();\r\n\t\t\t}\r\n\t\t}\r\n\t\tbuf += \"}\";\r\n\t\treturn buf;\r\n\t}\r\n\r\n\tpublic toTableString(): string {\r\n\t\tlet buf = \"\";\r\n\t\tfor (let bucket of this.buckets) {\r\n\t\t\tif (bucket == null) {\r\n\t\t\t\tbuf += \"null\\n\";\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\t\t\tbuf += \"[\";\r\n\t\t\tlet first: boolean = true;\r\n\t\t\tfor (let o of bucket) {\r\n\t\t\t\tif (first) {\r\n\t\t\t\t\tfirst = false;\r\n\t\t\t\t} else {\r\n\t\t\t\t\tbuf += \" \";\r\n\t\t\t\t}\r\n\t\t\t\tif (o == null) {\r\n\t\t\t\t\tbuf += \"_\";\r\n\t\t\t\t} else {\r\n\t\t\t\t\tbuf += o.toString();\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\tbuf += \"]\\n\";\r\n\t\t}\r\n\t\treturn buf;\r\n\t}\r\n\r\n\t/**\r\n\t * Return `o` as an instance of the element type `T`. If\r\n\t * `o` is non-undefined but known to not be an instance of `T`, this\r\n\t * method returns `undefined`. The base implementation does not perform any\r\n\t * type checks; override this method to provide strong type checks for the\r\n\t * {@link #contains} and {@link #remove} methods to ensure the arguments to\r\n\t * the {@link EqualityComparator} for the set always have the expected\r\n\t * types.\r\n\t *\r\n\t * @param o the object to try and cast to the element type of the set\r\n\t * @returns `o` if it could be an instance of `T`, otherwise\r\n\t * `undefined`.\r\n\t */\r\n\t@SuppressWarnings(\"unchecked\")\r\n\tprotected asElementType(o: any): T {\r\n\t\treturn o as T;\r\n\t}\r\n\r\n\t/**\r\n\t * Return an array of `T[]` with length `capacity`.\r\n\t *\r\n\t * @param capacity the length of the array to return\r\n\t * @returns the newly constructed array\r\n\t */\r\n\t@SuppressWarnings(\"unchecked\")\r\n\tprotected createBuckets(capacity: number): Array {\r\n\t\treturn new Array(capacity);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-03T02:09:42.2127260-07:00\r\nimport { EqualityComparator } from \"./EqualityComparator\";\r\nimport { Override } from \"../Decorators\";\r\nimport { Equatable } from \"./Stubs\";\r\nimport { MurmurHash } from \"./MurmurHash\";\r\nimport { ObjectEqualityComparator } from \"./ObjectEqualityComparator\";\r\n\r\n/**\r\n * This default implementation of {@link EqualityComparator} uses object equality\r\n * for comparisons by calling {@link Object#hashCode} and {@link Object#equals}.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class ArrayEqualityComparator implements EqualityComparator {\r\n\tpublic static readonly INSTANCE: ArrayEqualityComparator = new ArrayEqualityComparator();\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This implementation returns\r\n\t * `obj.`{@link Object#hashCode hashCode()}.\r\n\t */\r\n\t@Override\r\n\tpublic hashCode(obj: Equatable[]): number {\r\n\t\tif (obj == null) {\r\n\t\t\treturn 0;\r\n\t\t}\r\n\r\n\t\treturn MurmurHash.hashCode(obj, 0);\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This implementation relies on object equality. If both objects are\r\n\t * `undefined`, this method returns `true`. Otherwise if only\r\n\t * `a` is `undefined`, this method returns `false`. Otherwise,\r\n\t * this method returns the result of\r\n\t * `a.`{@link Object#equals equals}`(b)`.\r\n\t */\r\n\t@Override\r\n\tpublic equals(a: Equatable[], b: Equatable[]): boolean {\r\n\t\tif (a == null) {\r\n\t\t\treturn b == null;\r\n\t\t} else if (b == null) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tif (a.length !== b.length) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tfor (let i = 0; i < a.length; i++) {\r\n\t\t\tif (!ObjectEqualityComparator.INSTANCE.equals(a[i], b[i])) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn true;\r\n\t}\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:45.0833752-07:00\r\n\r\n// Taking a case-by-case approach to pporting this functionaltiy\r\n// as much of it may be supported natively by JavaScript. Or otherwise need\r\n// substantial rethink\r\n\r\nimport { NotNull } from \"../Decorators\";\r\nimport { Equatable } from \"./Stubs\";\r\nimport { IntegerList } from \"./IntegerList\";\r\n\r\nexport function escapeWhitespace(s: string, escapeSpaces: boolean): string {\r\n\treturn escapeSpaces ? s.replace(/ /, \"\\u00B7\") : s\r\n\t\t.replace(/\\t/, \"\\\\t\")\r\n\t\t.replace(/\\n/, \"\\\\n\")\r\n\t\t.replace(/\\r/, \"\\\\r\");\r\n}\r\n\r\n// Seriously: why isn't this built in to java? ugh!\r\nexport function join(collection: Iterable, separator: string): string {\r\n\tlet buf = \"\";\r\n\tlet first = true;\r\n\tfor (let current of collection) {\r\n\t\tif (first) {\r\n\t\t\tfirst = false;\r\n\t\t} else {\r\n\t\t\tbuf += separator;\r\n\t\t}\r\n\r\n\t\tbuf += current;\r\n\t}\r\n\r\n\treturn buf;\r\n}\r\n\r\nexport function equals(x: Equatable | undefined, y: Equatable | undefined): boolean {\r\n\tif (x === y) {\r\n\t\treturn true;\r\n\t}\r\n\r\n\tif (x === undefined || y === undefined) {\r\n\t\treturn false;\r\n\t}\r\n\r\n\treturn x.equals(y);\r\n}\r\n\r\n// export function numNonnull(data: any[]): number {\r\n// \tlet n: number = 0;\r\n// \tif ( data == null ) return n;\r\n// \tfor (let o of data) {\r\n// \t\tif ( o!=null ) n++;\r\n// \t}\r\n// \treturn n;\r\n// }\r\n\r\n// export function removeAllElements(data: Collection, value: T): void {\r\n// \tif ( data==null ) return;\r\n// \twhile ( data.contains(value) ) data.remove(value);\r\n// }\r\n\r\n// export function writeFile(@NotNull file: File, @NotNull content: Uint8Array): void {\r\n// \tlet fos: FileOutputStream = new FileOutputStream(file);\r\n// \ttry {\r\n// \t\tfos.write(content);\r\n// \t} finally {\r\n// \t\tfos.close();\r\n// \t}\r\n// }\r\n\r\n// export function writeFile(@NotNull fileName: string, @NotNull content: string): void {\r\n// \twriteFile(fileName, content, null);\r\n// }\r\n\r\n// export function writeFile(@NotNull fileName: string, @NotNull content: string, @Nullable encoding: string): void {\r\n// \tlet f: File = new File(fileName);\r\n// \tlet fos: FileOutputStream = new FileOutputStream(f);\r\n// \tlet osw: OutputStreamWriter;\r\n// \tif (encoding != null) {\r\n// \t\tosw = new OutputStreamWriter(fos, encoding);\r\n// \t}\r\n// \telse {\r\n// \t\tosw = new OutputStreamWriter(fos);\r\n// \t}\r\n\r\n// \ttry {\r\n// \t\tosw.write(content);\r\n// \t}\r\n// \tfinally {\r\n// \t\tosw.close();\r\n// \t}\r\n// }\r\n\r\n// @NotNull\r\n// export function readFile(@NotNull fileName: string): char[] {\r\n// \treturn readFile(fileName, null);\r\n// }\r\n\r\n// @NotNull\r\n// export function readFile(@NotNull fileName: string, @Nullable encoding: string): char[] {\r\n// \tlet f: File = new File(fileName);\r\n// \tlet size: number = (int)f.length();\r\n// \tlet isr: InputStreamReader;\r\n// \tlet fis: FileInputStream = new FileInputStream(fileName);\r\n// \tif ( encoding!=null ) {\r\n// \t\tisr = new InputStreamReader(fis, encoding);\r\n// \t}\r\n// \telse {\r\n// \t\tisr = new InputStreamReader(fis);\r\n// \t}\r\n// \tlet data: char[] = null;\r\n// \ttry {\r\n// \t\tdata = new char[size];\r\n// \t\tlet n: number = isr.read(data);\r\n// \t\tif (n < data.length) {\r\n// \t\t\tdata = Arrays.copyOf(data, n);\r\n// \t\t}\r\n// \t}\r\n// \tfinally {\r\n// \t\tisr.close();\r\n// \t}\r\n// \treturn data;\r\n// }\r\n\r\n// export function removeAll(@NotNull predicate: List list,@NotNull Predicate): void {\r\n// \tlet j: number = 0;\r\n// \tfor (let i = 0; i < list.size; i++) {\r\n// \t\tlet item: T = list.get(i);\r\n// \t\tif (!predicate.eval(item)) {\r\n// \t\t\tif (j != i) {\r\n// \t\t\t\tlist.set(j, item);\r\n// \t\t\t}\r\n\r\n// \t\t\tj++;\r\n// \t\t}\r\n// \t}\r\n\r\n// \tif (j < list.size) {\r\n// \t\tlist.subList(j, list.size).clear();\r\n// \t}\r\n// }\r\n\r\n// export function removeAll(@NotNull predicate: Iterable iterable,@NotNull Predicate): void {\r\n// \tif (iterable instanceof List) {\r\n// \t\tremoveAll((List)iterable, predicate);\r\n// \t\treturn;\r\n// \t}\r\n\r\n// \tfor (Iterator iterator = iterable.iterator(); iterator.hasNext(); ) {\r\n// \t\tlet item: T = iterator.next();\r\n// \t\tif (predicate.eval(item)) {\r\n// \t\t\titerator.remove();\r\n// \t\t}\r\n// \t}\r\n// }\r\n\r\n/** Convert array of strings to string→index map. Useful for\r\n * converting rulenames to name→ruleindex map.\r\n */\r\nexport function toMap(keys: string[]): Map {\r\n\tlet m: Map = new Map();\r\n\tfor (let i = 0; i < keys.length; i++) {\r\n\t\tm.set(keys[i], i);\r\n\t}\r\n\r\n\treturn m;\r\n}\r\n\r\nexport function toCharArray(str: string): Uint16Array;\r\nexport function toCharArray(data: IntegerList): Uint16Array;\r\nexport function toCharArray(str: string | IntegerList): Uint16Array {\r\n\tif (typeof str === \"string\") {\r\n\t\tlet result = new Uint16Array(str.length);\r\n\t\tfor (let i = 0; i < str.length; i++) {\r\n\t\t\tresult[i] = str.charCodeAt(i);\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t} else {\r\n\t\treturn str.toCharArray();\r\n\t}\r\n}\r\n\r\n// /**\r\n// \t* @since 4.5\r\n// \t*/\r\n// @NotNull\r\n// export function toSet(@NotNull bits: BitSet): IntervalSet {\r\n// \tlet s: IntervalSet = new IntervalSet();\r\n// \tlet i: number = bits.nextSetBit(0);\r\n// \twhile ( i >= 0 ) {\r\n// \t\ts.add(i);\r\n// \t\ti = bits.nextSetBit(i+1);\r\n// \t}\r\n// \treturn s;\r\n// }\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:36.9521478-07:00\r\n\r\nimport { Array2DHashSet } from \"../misc/Array2DHashSet\";\r\nimport { ArrayEqualityComparator } from \"../misc/ArrayEqualityComparator\";\r\nimport { Comparable } from \"../misc/Stubs\";\r\nimport { Equatable } from \"../misc/Stubs\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { ObjectEqualityComparator } from \"../misc/ObjectEqualityComparator\";\r\nimport { Recognizer } from \"../Recognizer\";\r\nimport { RuleContext } from \"../RuleContext\";\r\nimport * as Utils from \"../misc/Utils\";\r\n\r\nfunction max>(items: Iterable): T | undefined {\r\n\tlet result: T | undefined;\r\n\tfor (let current of items) {\r\n\t\tif (result === undefined) {\r\n\t\t\tresult = current;\r\n\t\t\tcontinue;\r\n\t\t}\r\n\r\n\t\tlet comparison = result.compareTo(current);\r\n\t\tif (comparison < 0) {\r\n\t\t\tresult = current;\r\n\t\t}\r\n\t}\r\n\r\n\treturn result;\r\n}\r\n\r\nfunction min>(items: Iterable): T | undefined {\r\n\tlet result: T | undefined;\r\n\tfor (let current of items) {\r\n\t\tif (result === undefined) {\r\n\t\t\tresult = current;\r\n\t\t\tcontinue;\r\n\t\t}\r\n\r\n\t\tlet comparison = result.compareTo(current);\r\n\t\tif (comparison > 0) {\r\n\t\t\tresult = current;\r\n\t\t}\r\n\t}\r\n\r\n\treturn result;\r\n}\r\n\r\n/** A tree structure used to record the semantic context in which\r\n * an ATN configuration is valid. It's either a single predicate,\r\n * a conjunction `p1&&p2`, or a sum of products `p1||p2`.\r\n *\r\n * I have scoped the {@link AND}, {@link OR}, and {@link Predicate} subclasses of\r\n * {@link SemanticContext} within the scope of this outer class.\r\n */\r\nexport abstract class SemanticContext implements Equatable {\r\n\tprivate static _NONE: SemanticContext;\r\n\r\n\t/**\r\n\t * The default {@link SemanticContext}, which is semantically equivalent to\r\n\t * a predicate of the form `{true}?`.\r\n\t */\r\n\tstatic get NONE(): SemanticContext {\r\n\t\tif (SemanticContext._NONE === undefined) {\r\n\t\t\tSemanticContext._NONE = new SemanticContext.Predicate();\r\n\t\t}\r\n\r\n\t\treturn SemanticContext._NONE;\r\n\t}\r\n\r\n\t/**\r\n\t * For context independent predicates, we evaluate them without a local\r\n\t * context (i.e., unedfined context). That way, we can evaluate them without\r\n\t * having to create proper rule-specific context during prediction (as\r\n\t * opposed to the parser, which creates them naturally). In a practical\r\n\t * sense, this avoids a cast exception from RuleContext to myruleContext.\r\n\t *\r\n\t * For context dependent predicates, we must pass in a local context so that\r\n\t * references such as $arg evaluate properly as _localctx.arg. We only\r\n\t * capture context dependent predicates in the context in which we begin\r\n\t * prediction, so we passed in the outer context here in case of context\r\n\t * dependent predicate evaluation.\r\n\t */\r\n\tpublic abstract eval(parser: Recognizer, parserCallStack: RuleContext): boolean;\r\n\r\n\t/**\r\n\t * Evaluate the precedence predicates for the context and reduce the result.\r\n\t *\r\n\t * @param parser The parser instance.\r\n\t * @param parserCallStack\r\n\t * @returns The simplified semantic context after precedence predicates are\r\n\t * evaluated, which will be one of the following values.\r\n\t *\r\n\t * * {@link #NONE}: if the predicate simplifies to `true` after\r\n\t * precedence predicates are evaluated.\r\n\t * * `undefined`: if the predicate simplifies to `false` after\r\n\t * precedence predicates are evaluated.\r\n\t * * `this`: if the semantic context is not changed as a result of\r\n\t * precedence predicate evaluation.\r\n\t * * A non-`undefined` {@link SemanticContext}: the new simplified\r\n\t * semantic context after precedence predicates are evaluated.\r\n\t */\r\n\tpublic evalPrecedence(parser: Recognizer, parserCallStack: RuleContext): SemanticContext | undefined {\r\n\t\treturn this;\r\n\t}\r\n\r\n\tpublic abstract hashCode(): number;\r\n\r\n\tpublic abstract equals(obj: any): boolean;\r\n\r\n\tpublic static and(a: SemanticContext | undefined, b: SemanticContext): SemanticContext {\r\n\t\tif (!a || a === SemanticContext.NONE) {\r\n\t\t\treturn b;\r\n\t\t}\r\n\t\tif (b === SemanticContext.NONE) {\r\n\t\t\treturn a;\r\n\t\t}\r\n\t\tlet result: SemanticContext.AND = new SemanticContext.AND(a, b);\r\n\t\tif (result.opnds.length === 1) {\r\n\t\t\treturn result.opnds[0];\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\t/**\r\n\t *\r\n\t * @see ParserATNSimulator#getPredsForAmbigAlts\r\n\t */\r\n\tpublic static or(a: SemanticContext | undefined, b: SemanticContext): SemanticContext {\r\n\t\tif (!a) {\r\n\t\t\treturn b;\r\n\t\t}\r\n\r\n\t\tif (a === SemanticContext.NONE || b === SemanticContext.NONE) {\r\n\t\t\treturn SemanticContext.NONE;\r\n\t\t}\r\n\t\tlet result: SemanticContext.OR = new SemanticContext.OR(a, b);\r\n\t\tif (result.opnds.length === 1) {\r\n\t\t\treturn result.opnds[0];\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n}\r\n\r\nexport namespace SemanticContext {\r\n\t/**\r\n\t * This random 30-bit prime represents the value of `AND.class.hashCode()`.\r\n\t */\r\n\tconst AND_HASHCODE = 40363613;\r\n\t/**\r\n\t * This random 30-bit prime represents the value of `OR.class.hashCode()`.\r\n\t */\r\n\tconst OR_HASHCODE = 486279973;\r\n\r\n\tfunction filterPrecedencePredicates(collection: SemanticContext[]): SemanticContext.PrecedencePredicate[] {\r\n\t\tlet result: SemanticContext.PrecedencePredicate[] = [];\r\n\t\tfor (let i = 0; i < collection.length; i++) {\r\n\t\t\tlet context: SemanticContext = collection[i];\r\n\t\t\tif (context instanceof SemanticContext.PrecedencePredicate) {\r\n\t\t\t\tresult.push(context);\r\n\r\n\t\t\t\t// Remove the item from 'collection' and move i back so we look at the same index again\r\n\t\t\t\tcollection.splice(i, 1);\r\n\t\t\t\ti--;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\texport class Predicate extends SemanticContext {\r\n\t\tpublic ruleIndex: number;\r\n\t\tpublic predIndex: number;\r\n\t\tpublic isCtxDependent: boolean; // e.g., $i ref in pred\r\n\r\n\t\tconstructor();\r\n\t\tconstructor(ruleIndex: number, predIndex: number, isCtxDependent: boolean);\r\n\r\n\t\tconstructor(ruleIndex: number = -1, predIndex: number = -1, isCtxDependent: boolean = false) {\r\n\t\t\tsuper();\r\n\t\t\tthis.ruleIndex = ruleIndex;\r\n\t\t\tthis.predIndex = predIndex;\r\n\t\t\tthis.isCtxDependent = isCtxDependent;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic eval(parser: Recognizer, parserCallStack: RuleContext): boolean {\r\n\t\t\tlet localctx: RuleContext | undefined = this.isCtxDependent ? parserCallStack : undefined;\r\n\t\t\treturn parser.sempred(localctx, this.ruleIndex, this.predIndex);\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic hashCode(): number {\r\n\t\t\tlet hashCode: number = MurmurHash.initialize();\r\n\t\t\thashCode = MurmurHash.update(hashCode, this.ruleIndex);\r\n\t\t\thashCode = MurmurHash.update(hashCode, this.predIndex);\r\n\t\t\thashCode = MurmurHash.update(hashCode, this.isCtxDependent ? 1 : 0);\r\n\t\t\thashCode = MurmurHash.finish(hashCode, 3);\r\n\t\t\treturn hashCode;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic equals(obj: any): boolean {\r\n\t\t\tif (!(obj instanceof Predicate)) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\tif (this === obj) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t\treturn this.ruleIndex === obj.ruleIndex &&\r\n\t\t\t\tthis.predIndex === obj.predIndex &&\r\n\t\t\t\tthis.isCtxDependent === obj.isCtxDependent;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic toString(): string {\r\n\t\t\treturn \"{\" + this.ruleIndex + \":\" + this.predIndex + \"}?\";\r\n\t\t}\r\n\t}\r\n\r\n\texport class PrecedencePredicate extends SemanticContext implements Comparable {\r\n\t\tpublic precedence: number;\r\n\r\n\t\tconstructor(precedence: number) {\r\n\t\t\tsuper();\r\n\t\t\tthis.precedence = precedence;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic eval(parser: Recognizer, parserCallStack: RuleContext): boolean {\r\n\t\t\treturn parser.precpred(parserCallStack, this.precedence);\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic evalPrecedence(parser: Recognizer, parserCallStack: RuleContext): SemanticContext | undefined {\r\n\t\t\tif (parser.precpred(parserCallStack, this.precedence)) {\r\n\t\t\t\treturn SemanticContext.NONE;\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\treturn undefined;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic compareTo(o: PrecedencePredicate): number {\r\n\t\t\treturn this.precedence - o.precedence;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic hashCode(): number {\r\n\t\t\tlet hashCode: number = 1;\r\n\t\t\thashCode = 31 * hashCode + this.precedence;\r\n\t\t\treturn hashCode;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic equals(obj: any): boolean {\r\n\t\t\tif (!(obj instanceof PrecedencePredicate)) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\r\n\t\t\tif (this === obj) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\r\n\t\t\treturn this.precedence === obj.precedence;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\t// precedence >= _precedenceStack.peek()\r\n\t\tpublic toString(): string {\r\n\t\t\treturn \"{\" + this.precedence + \">=prec}?\";\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * This is the base class for semantic context \"operators\", which operate on\r\n\t * a collection of semantic context \"operands\".\r\n\t *\r\n\t * @since 4.3\r\n\t */\r\n\texport abstract class Operator extends SemanticContext {\r\n\t\t/**\r\n\t\t * Gets the operands for the semantic context operator.\r\n\t\t *\r\n\t\t * @returns a collection of {@link SemanticContext} operands for the\r\n\t\t * operator.\r\n\t\t *\r\n\t\t * @since 4.3\r\n\t\t */\r\n\t\t// @NotNull\r\n\t\tpublic abstract readonly operands: Iterable;\r\n\t}\r\n\r\n\t/**\r\n\t * A semantic context which is true whenever none of the contained contexts\r\n\t * is false.\r\n\t */\r\n\texport class AND extends Operator {\r\n\t\tpublic opnds: SemanticContext[];\r\n\r\n\t\tconstructor(@NotNull a: SemanticContext, @NotNull b: SemanticContext) {\r\n\t\t\tsuper();\r\n\r\n\t\t\tlet operands: Array2DHashSet = new Array2DHashSet(ObjectEqualityComparator.INSTANCE);\r\n\t\t\tif (a instanceof AND) {\r\n\t\t\t\toperands.addAll(a.opnds);\r\n\t\t\t} else {\r\n\t\t\t\toperands.add(a);\r\n\t\t\t}\r\n\r\n\t\t\tif (b instanceof AND) {\r\n\t\t\t\toperands.addAll(b.opnds);\r\n\t\t\t} else {\r\n\t\t\t\toperands.add(b);\r\n\t\t\t}\r\n\r\n\t\t\tthis.opnds = operands.toArray();\r\n\t\t\tlet precedencePredicates: PrecedencePredicate[] = filterPrecedencePredicates(this.opnds);\r\n\r\n\t\t\t// interested in the transition with the lowest precedence\r\n\t\t\tlet reduced = min(precedencePredicates);\r\n\t\t\tif (reduced) {\r\n\t\t\t\tthis.opnds.push(reduced);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tget operands(): Iterable {\r\n\t\t\treturn this.opnds;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic equals(obj: any): boolean {\r\n\t\t\tif (this === obj) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t\tif (!(obj instanceof AND)) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\treturn ArrayEqualityComparator.INSTANCE.equals(this.opnds, obj.opnds);\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic hashCode(): number {\r\n\t\t\treturn MurmurHash.hashCode(this.opnds, AND_HASHCODE);\r\n\t\t}\r\n\r\n\t\t/**\r\n\t\t * {@inheritDoc}\r\n\t\t *\r\n\t\t * The evaluation of predicates by this context is short-circuiting, but\r\n\t\t * unordered.\r\n\t\t */\r\n\t\t@Override\r\n\t\tpublic eval(parser: Recognizer, parserCallStack: RuleContext): boolean {\r\n\t\t\tfor (let opnd of this.opnds) {\r\n\t\t\t\tif (!opnd.eval(parser, parserCallStack)) {\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic evalPrecedence(parser: Recognizer, parserCallStack: RuleContext): SemanticContext | undefined {\r\n\t\t\tlet differs: boolean = false;\r\n\t\t\tlet operands: SemanticContext[] = [];\r\n\t\t\tfor (let context of this.opnds) {\r\n\t\t\t\tlet evaluated: SemanticContext | undefined = context.evalPrecedence(parser, parserCallStack);\r\n\t\t\t\tdiffers = differs || (evaluated !== context);\r\n\t\t\t\tif (evaluated == null) {\r\n\t\t\t\t\t// The AND context is false if any element is false\r\n\t\t\t\t\treturn undefined;\r\n\t\t\t\t}\r\n\t\t\t\telse if (evaluated !== SemanticContext.NONE) {\r\n\t\t\t\t\t// Reduce the result by skipping true elements\r\n\t\t\t\t\toperands.push(evaluated);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (!differs) {\r\n\t\t\t\treturn this;\r\n\t\t\t}\r\n\r\n\t\t\tif (operands.length === 0) {\r\n\t\t\t\t// all elements were true, so the AND context is true\r\n\t\t\t\treturn SemanticContext.NONE;\r\n\t\t\t}\r\n\r\n\t\t\tlet result: SemanticContext = operands[0];\r\n\t\t\tfor (let i = 1; i < operands.length; i++) {\r\n\t\t\t\tresult = SemanticContext.and(result, operands[i]);\r\n\t\t\t}\r\n\r\n\t\t\treturn result;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic toString(): string {\r\n\t\t\treturn Utils.join(this.opnds, \"&&\");\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * A semantic context which is true whenever at least one of the contained\r\n\t * contexts is true.\r\n\t */\r\n\texport class OR extends Operator {\r\n\t\tpublic opnds: SemanticContext[];\r\n\r\n\t\tconstructor(@NotNull a: SemanticContext, @NotNull b: SemanticContext) {\r\n\t\t\tsuper();\r\n\r\n\t\t\tlet operands: Array2DHashSet = new Array2DHashSet(ObjectEqualityComparator.INSTANCE);\r\n\t\t\tif (a instanceof OR) {\r\n\t\t\t\toperands.addAll(a.opnds);\r\n\t\t\t} else {\r\n\t\t\t\toperands.add(a);\r\n\t\t\t}\r\n\r\n\t\t\tif (b instanceof OR) {\r\n\t\t\t\toperands.addAll(b.opnds);\r\n\t\t\t} else {\r\n\t\t\t\toperands.add(b);\r\n\t\t\t}\r\n\r\n\t\t\tthis.opnds = operands.toArray();\r\n\t\t\tlet precedencePredicates: PrecedencePredicate[] = filterPrecedencePredicates(this.opnds);\r\n\r\n\t\t\t// interested in the transition with the highest precedence\r\n\t\t\tlet reduced = max(precedencePredicates);\r\n\t\t\tif (reduced) {\r\n\t\t\t\tthis.opnds.push(reduced);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tget operands(): Iterable {\r\n\t\t\treturn this.opnds;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic equals(obj: any): boolean {\r\n\t\t\tif (this === obj) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t\tif (!(obj instanceof OR)) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\treturn ArrayEqualityComparator.INSTANCE.equals(this.opnds, obj.opnds);\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic hashCode(): number {\r\n\t\t\treturn MurmurHash.hashCode(this.opnds, OR_HASHCODE);\r\n\t\t}\r\n\r\n\t\t/**\r\n\t\t * {@inheritDoc}\r\n\t\t *\r\n\t\t * The evaluation of predicates by this context is short-circuiting, but\r\n\t\t * unordered.\r\n\t\t */\r\n\t\t@Override\r\n\t\tpublic eval(parser: Recognizer, parserCallStack: RuleContext): boolean {\r\n\t\t\tfor (let opnd of this.opnds) {\r\n\t\t\t\tif (opnd.eval(parser, parserCallStack)) {\r\n\t\t\t\t\treturn true;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic evalPrecedence(parser: Recognizer, parserCallStack: RuleContext): SemanticContext | undefined {\r\n\t\t\tlet differs: boolean = false;\r\n\t\t\tlet operands: SemanticContext[] = [];\r\n\t\t\tfor (let context of this.opnds) {\r\n\t\t\t\tlet evaluated: SemanticContext | undefined = context.evalPrecedence(parser, parserCallStack);\r\n\t\t\t\tdiffers = differs || (evaluated !== context);\r\n\t\t\t\tif (evaluated === SemanticContext.NONE) {\r\n\t\t\t\t\t// The OR context is true if any element is true\r\n\t\t\t\t\treturn SemanticContext.NONE;\r\n\t\t\t\t} else if (evaluated) {\r\n\t\t\t\t\t// Reduce the result by skipping false elements\r\n\t\t\t\t\toperands.push(evaluated);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (!differs) {\r\n\t\t\t\treturn this;\r\n\t\t\t}\r\n\r\n\t\t\tif (operands.length === 0) {\r\n\t\t\t\t// all elements were false, so the OR context is false\r\n\t\t\t\treturn undefined;\r\n\t\t\t}\r\n\r\n\t\t\tlet result: SemanticContext = operands[0];\r\n\t\t\tfor (let i = 1; i < operands.length; i++) {\r\n\t\t\t\tresult = SemanticContext.or(result, operands[i]);\r\n\t\t\t}\r\n\r\n\t\t\treturn result;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic toString(): string {\r\n\t\t\treturn Utils.join(this.opnds, \"||\");\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:35.2826960-07:00\r\n\r\nimport { AbstractPredicateTransition } from \"./AbstractPredicateTransition\";\r\nimport { ATNState } from \"./ATNState\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { SemanticContext } from \"./SemanticContext\";\r\nimport { TransitionType } from \"./TransitionType\";\r\n\r\n/** TODO: this is old comment:\r\n * A tree of semantic predicates from the grammar AST if label==SEMPRED.\r\n * In the ATN, labels will always be exactly one predicate, but the DFA\r\n * may have to combine a bunch of them as it collects predicates from\r\n * multiple ATN configurations into a single DFA state.\r\n */\r\nexport class PredicateTransition extends AbstractPredicateTransition {\r\n\tpublic ruleIndex: number;\r\n\tpublic predIndex: number;\r\n\tpublic isCtxDependent: boolean; // e.g., $i ref in pred\r\n\r\n\tconstructor(@NotNull target: ATNState, ruleIndex: number, predIndex: number, isCtxDependent: boolean) {\r\n\t\tsuper(target);\r\n\t\tthis.ruleIndex = ruleIndex;\r\n\t\tthis.predIndex = predIndex;\r\n\t\tthis.isCtxDependent = isCtxDependent;\r\n\t}\r\n\r\n\t@Override\r\n\tget serializationType(): TransitionType {\r\n\t\treturn TransitionType.PREDICATE;\r\n\t}\r\n\r\n\t@Override\r\n\tget isEpsilon(): boolean { return true; }\r\n\r\n\t@Override\r\n\tpublic matches(symbol: number, minVocabSymbol: number, maxVocabSymbol: number): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\tget predicate(): SemanticContext.Predicate {\r\n\t\treturn new SemanticContext.Predicate(this.ruleIndex, this.predIndex, this.isCtxDependent);\r\n\t}\r\n\r\n\t@Override\r\n\t@NotNull\r\n\tpublic toString(): string {\r\n\t\treturn \"pred_\" + this.ruleIndex + \":\" + this.predIndex;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:51.4099946-07:00\r\n\r\nimport { AbstractPredicateTransition } from \"./atn/AbstractPredicateTransition\";\r\nimport { ATN } from \"./atn/ATN\";\r\nimport { ATNState } from \"./atn/ATNState\";\r\nimport { Parser } from \"./Parser\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { Recognizer } from \"./Recognizer\";\r\nimport { NotNull } from \"./Decorators\";\r\nimport { PredicateTransition } from \"./atn/PredicateTransition\";\r\n\r\n/** A semantic predicate failed during validation. Validation of predicates\r\n * occurs when normally parsing the alternative just like matching a token.\r\n * Disambiguating predicate evaluation occurs when we test a predicate during\r\n * prediction.\r\n */\r\nexport class FailedPredicateException extends RecognitionException {\r\n\t//private static serialVersionUID: number = 5379330841495778709L;\r\n\r\n\tprivate _ruleIndex: number;\r\n\tprivate _predicateIndex: number;\r\n\tprivate _predicate?: string;\r\n\r\n\tconstructor(@NotNull recognizer: Parser, predicate?: string, message?: string) {\r\n\t\tsuper(\r\n\t\t\trecognizer,\r\n\t\t\trecognizer.inputStream,\r\n\t\t\trecognizer.context,\r\n\t\t\tFailedPredicateException.formatMessage(predicate, message));\r\n\t\tlet s: ATNState = recognizer.interpreter.atn.states[recognizer.state];\r\n\r\n\t\tlet trans = s.transition(0) as AbstractPredicateTransition;\r\n\t\tif (trans instanceof PredicateTransition) {\r\n\t\t\tthis._ruleIndex = trans.ruleIndex;\r\n\t\t\tthis._predicateIndex = trans.predIndex;\r\n\t\t}\r\n\t\telse {\r\n\t\t\tthis._ruleIndex = 0;\r\n\t\t\tthis._predicateIndex = 0;\r\n\t\t}\r\n\r\n\t\tthis._predicate = predicate;\r\n\t\tsuper.setOffendingToken(recognizer, recognizer.currentToken);\r\n\t}\r\n\r\n\tget ruleIndex(): number {\r\n\t\treturn this._ruleIndex;\r\n\t}\r\n\r\n\tget predicateIndex(): number {\r\n\t\treturn this._predicateIndex;\r\n\t}\r\n\r\n\tget predicate(): string | undefined {\r\n\t\treturn this._predicate;\r\n\t}\r\n\r\n\t@NotNull\r\n\tprivate static formatMessage(predicate: string | undefined, message: string | undefined): string {\r\n\t\tif (message) {\r\n\t\t\treturn message;\r\n\t\t}\r\n\r\n\t\treturn `failed predicate: {${predicate}}?`;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:51.5187682-07:00\r\n\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { NotNull } from \"./Decorators\";\r\nimport { Parser } from \"./Parser\";\r\nimport { ParserRuleContext } from \"./ParserRuleContext\";\r\n\r\n/** This signifies any kind of mismatched input exceptions such as\r\n * when the current input does not match the expected token.\r\n */\r\nexport class InputMismatchException extends RecognitionException {\r\n\t//private static serialVersionUID: number = 1532568338707443067L;\r\n\r\n\tconstructor(/*@NotNull*/ recognizer: Parser);\r\n\tconstructor(/*@NotNull*/ recognizer: Parser, state: number, context: ParserRuleContext);\r\n\tconstructor(@NotNull recognizer: Parser, state?: number, context?: ParserRuleContext) {\r\n\t\tif (context === undefined) {\r\n\t\t\tcontext = recognizer.context;\r\n\t\t}\r\n\r\n\t\tsuper(recognizer, recognizer.inputStream, context);\r\n\r\n\t\tif (state !== undefined) {\r\n\t\t\tthis.setOffendingState(state);\r\n\t\t}\r\n\r\n\t\tthis.setOffendingToken(recognizer, recognizer.currentToken);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nexport namespace Arrays {\r\n\t/**\r\n\t * Searches the specified array of numbers for the specified value using the binary search algorithm. The array must\r\n\t * be sorted prior to making this call. If it is not sorted, the results are unspecified. If the array contains\r\n\t * multiple elements with the specified value, there is no guarantee which one will be found.\r\n\t *\r\n\t * @returns index of the search key, if it is contained in the array; otherwise, (-(insertion point) - 1). The\r\n\t * insertion point is defined as the point at which the key would be inserted into the array: the index of the first\r\n\t * element greater than the key, or array.length if all elements in the array are less than the specified key. Note\r\n\t * that this guarantees that the return value will be >= 0 if and only if the key is found.\r\n\t */\r\n\texport function binarySearch(array: ArrayLike, key: number, fromIndex?: number, toIndex?: number): number {\r\n\t\treturn binarySearch0(array, fromIndex !== undefined ? fromIndex : 0, toIndex !== undefined ? toIndex : array.length, key);\r\n\t}\r\n\r\n\tfunction binarySearch0(array: ArrayLike, fromIndex: number, toIndex: number, key: number): number {\r\n\t\tlet low: number = fromIndex;\r\n\t\tlet high: number = toIndex - 1;\r\n\r\n\t\twhile (low <= high) {\r\n\t\t\tlet mid: number = (low + high) >>> 1;\r\n\t\t\tlet midVal: number = array[mid];\r\n\r\n\t\t\tif (midVal < key) {\r\n\t\t\t\tlow = mid + 1;\r\n\t\t\t} else if (midVal > key) {\r\n\t\t\t\thigh = mid - 1;\r\n\t\t\t} else {\r\n\t\t\t\t// key found\r\n\t\t\t\treturn mid;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// key not found.\r\n\t\treturn -(low + 1);\r\n\t}\r\n\r\n\texport function toString(array: Iterable) {\r\n\t\tlet result = \"[\";\r\n\r\n\t\tlet first = true;\r\n\t\tfor (let element of array) {\r\n\t\t\tif (first) {\r\n\t\t\t\tfirst = false;\r\n\t\t\t} else {\r\n\t\t\t\tresult += \", \";\r\n\t\t\t}\r\n\r\n\t\t\tif (element === null) {\r\n\t\t\t\tresult += \"null\";\r\n\t\t\t} else if (element === undefined) {\r\n\t\t\t\tresult += \"undefined\";\r\n\t\t\t} else {\r\n\t\t\t\tresult += element;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tresult += \"]\";\r\n\t\treturn result;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:40.5099429-07:00\r\n\r\nimport { Arrays } from \"./Arrays\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { JavaCollection } from \"./Stubs\";\r\n\r\nconst EMPTY_DATA: Int32Array = new Int32Array(0);\r\n\r\nconst INITIAL_SIZE: number = 4;\r\nconst MAX_ARRAY_SIZE: number = (((1 << 31) >>> 0) - 1) - 8;\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class IntegerList {\r\n\t@NotNull\r\n\tprivate _data: Int32Array;\r\n\r\n\tprivate _size: number;\r\n\r\n\tconstructor(arg?: number | IntegerList | Iterable) {\r\n\t\tif (!arg) {\r\n\t\t\tthis._data = EMPTY_DATA;\r\n\t\t\tthis._size = 0;\r\n\t\t} else if (arg instanceof IntegerList) {\r\n\t\t\tthis._data = arg._data.slice(0);\r\n\t\t\tthis._size = arg._size;\r\n\t\t} else if (typeof arg === \"number\") {\r\n\t\t\tif (arg === 0) {\r\n\t\t\t\tthis._data = EMPTY_DATA;\r\n\t\t\t\tthis._size = 0;\r\n\t\t\t} else {\r\n\t\t\t\tthis._data = new Int32Array(arg);\r\n\t\t\t\tthis._size = 0;\r\n\t\t\t}\r\n\t\t} else {\r\n\t\t\t// arg is Iterable\r\n\t\t\tthis._data = EMPTY_DATA;\r\n\t\t\tthis._size = 0;\r\n\t\t\tfor (let value of arg) {\r\n\t\t\t\tthis.add(value);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\tpublic add(value: number): void {\r\n\t\tif (this._data.length === this._size) {\r\n\t\t\tthis.ensureCapacity(this._size + 1);\r\n\t\t}\r\n\r\n\t\tthis._data[this._size] = value;\r\n\t\tthis._size++;\r\n\t}\r\n\r\n\tpublic addAll(list: number[] | IntegerList | JavaCollection): void {\r\n\t\tif (Array.isArray(list)) {\r\n\t\t\tthis.ensureCapacity(this._size + list.length);\r\n\t\t\tthis._data.subarray(this._size, this._size + list.length).set(list);\r\n\t\t\tthis._size += list.length;\r\n\t\t} else if (list instanceof IntegerList) {\r\n\t\t\tthis.ensureCapacity(this._size + list._size);\r\n\t\t\tthis._data.subarray(this._size, this._size + list.size).set(list._data);\r\n\t\t\tthis._size += list._size;\r\n\t\t} else {\r\n\t\t\t// list is JavaCollection\r\n\t\t\tthis.ensureCapacity(this._size + list.size);\r\n\t\t\tlet current: number = 0;\r\n\t\t\tfor (let xi of list) {\r\n\t\t\t\tthis._data[this._size + current] = xi;\r\n\t\t\t\tcurrent++;\r\n\t\t\t}\r\n\r\n\t\t\tthis._size += list.size;\r\n\t\t}\r\n\t}\r\n\r\n\tpublic get(index: number): number {\r\n\t\tif (index < 0 || index >= this._size) {\r\n\t\t\tthrow RangeError();\r\n\t\t}\r\n\r\n\t\treturn this._data[index];\r\n\t}\r\n\r\n\tpublic contains(value: number): boolean {\r\n\t\tfor (let i = 0; i < this._size; i++) {\r\n\t\t\tif (this._data[i] === value) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn false;\r\n\t}\r\n\r\n\tpublic set(index: number, value: number): number {\r\n\t\tif (index < 0 || index >= this._size) {\r\n\t\t\tthrow RangeError();\r\n\t\t}\r\n\r\n\t\tlet previous: number = this._data[index];\r\n\t\tthis._data[index] = value;\r\n\t\treturn previous;\r\n\t}\r\n\r\n\tpublic removeAt(index: number): number {\r\n\t\tlet value: number = this.get(index);\r\n\t\tthis._data.copyWithin(index, index + 1, this._size);\r\n\t\tthis._data[this._size - 1] = 0;\r\n\t\tthis._size--;\r\n\t\treturn value;\r\n\t}\r\n\r\n\tpublic removeRange(fromIndex: number, toIndex: number): void {\r\n\t\tif (fromIndex < 0 || toIndex < 0 || fromIndex > this._size || toIndex > this._size) {\r\n\t\t\tthrow RangeError();\r\n\t\t}\r\n\r\n\t\tif (fromIndex > toIndex) {\r\n\t\t\tthrow RangeError();\r\n\t\t}\r\n\r\n\t\tthis._data.copyWithin(toIndex, fromIndex, this._size);\r\n\t\tthis._data.fill(0, this._size - (toIndex - fromIndex), this._size);\r\n\t\tthis._size -= (toIndex - fromIndex);\r\n\t}\r\n\r\n\tget isEmpty(): boolean {\r\n\t\treturn this._size === 0;\r\n\t}\r\n\r\n\tget size(): number {\r\n\t\treturn this._size;\r\n\t}\r\n\r\n\tpublic trimToSize(): void {\r\n\t\tif (this._data.length === this._size) {\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tthis._data = this._data.slice(0, this._size);\r\n\t}\r\n\r\n\tpublic clear(): void {\r\n\t\tthis._data.fill(0, 0, this._size);\r\n\t\tthis._size = 0;\r\n\t}\r\n\r\n\tpublic toArray(): number[] {\r\n\t\tif (this._size === 0) {\r\n\t\t\treturn [];\r\n\t\t}\r\n\r\n\t\treturn Array.from(this._data.subarray(0, this._size));\r\n\t}\r\n\r\n\tpublic sort(): void {\r\n\t\tthis._data.subarray(0, this._size).sort();\r\n\t}\r\n\r\n\t/**\r\n\t * Compares the specified object with this list for equality. Returns\r\n\t * `true` if and only if the specified object is also an {@link IntegerList},\r\n\t * both lists have the same size, and all corresponding pairs of elements in\r\n\t * the two lists are equal. In other words, two lists are defined to be\r\n\t * equal if they contain the same elements in the same order.\r\n\t *\r\n\t * This implementation first checks if the specified object is this\r\n\t * list. If so, it returns `true`; if not, it checks if the\r\n\t * specified object is an {@link IntegerList}. If not, it returns `false`;\r\n\t * if so, it checks the size of both lists. If the lists are not the same size,\r\n\t * it returns `false`; otherwise it iterates over both lists, comparing\r\n\t * corresponding pairs of elements. If any comparison returns `false`,\r\n\t * this method returns `false`.\r\n\t *\r\n\t * @param o the object to be compared for equality with this list\r\n\t * @returns `true` if the specified object is equal to this list\r\n\t */\r\n\t@Override\r\n\tpublic equals(o: any): boolean {\r\n\t\tif (o === this) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\tif (!(o instanceof IntegerList)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tif (this._size !== o._size) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tfor (let i = 0; i < this._size; i++) {\r\n\t\t\tif (this._data[i] !== o._data[i]) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn true;\r\n\t}\r\n\r\n\t/**\r\n\t * Returns the hash code value for this list.\r\n\t *\r\n\t * This implementation uses exactly the code that is used to define the\r\n\t * list hash function in the documentation for the {@link List#hashCode}\r\n\t * method.\r\n\t *\r\n\t * @returns the hash code value for this list\r\n\t */\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hashCode: number = 1;\r\n\t\tfor (let i = 0; i < this._size; i++) {\r\n\t\t\thashCode = 31 * hashCode + this._data[i];\r\n\t\t}\r\n\r\n\t\treturn hashCode;\r\n\t}\r\n\r\n\t/**\r\n\t * Returns a string representation of this list.\r\n\t */\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn this._data.toString();\r\n\t}\r\n\r\n\tpublic binarySearch(key: number, fromIndex?: number, toIndex?: number): number {\r\n\t\tif (fromIndex === undefined) {\r\n\t\t\tfromIndex = 0;\r\n\t\t}\r\n\r\n\t\tif (toIndex === undefined) {\r\n\t\t\ttoIndex = this._size;\r\n\t\t}\r\n\r\n\t\tif (fromIndex < 0 || toIndex < 0 || fromIndex > this._size || toIndex > this._size) {\r\n\t\t\tthrow new RangeError();\r\n\t\t}\r\n\r\n\t\tif (fromIndex > toIndex) {\r\n\t\t\tthrow new RangeError();\r\n\t\t}\r\n\r\n\t\treturn Arrays.binarySearch(this._data, key, fromIndex, toIndex);\r\n\t}\r\n\r\n\tprivate ensureCapacity(capacity: number): void {\r\n\t\tif (capacity < 0 || capacity > MAX_ARRAY_SIZE) {\r\n\t\t\tthrow new RangeError();\r\n\t\t}\r\n\r\n\t\tlet newLength: number;\r\n\t\tif (this._data.length === 0) {\r\n\t\t\tnewLength = INITIAL_SIZE;\r\n\t\t} else {\r\n\t\t\tnewLength = this._data.length;\r\n\t\t}\r\n\r\n\t\twhile (newLength < capacity) {\r\n\t\t\tnewLength = newLength * 2;\r\n\t\t\tif (newLength < 0 || newLength > MAX_ARRAY_SIZE) {\r\n\t\t\t\tnewLength = MAX_ARRAY_SIZE;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet tmp = new Int32Array(newLength);\r\n\t\ttmp.set(this._data);\r\n\t\tthis._data = tmp;\r\n\t}\r\n\r\n\t/** Convert the list to a UTF-16 encoded char array. If all values are less\r\n\t * than the 0xFFFF 16-bit code point limit then this is just a char array\r\n\t * of 16-bit char as usual. For values in the supplementary range, encode\r\n\t * them as two UTF-16 code units.\r\n\t */\r\n\tpublic toCharArray(): Uint16Array {\r\n\t\t// Optimize for the common case (all data values are < 0xFFFF) to avoid an extra scan\r\n\t\tlet resultArray: Uint16Array = new Uint16Array(this._size);\r\n\t\tlet resultIdx = 0;\r\n\t\tlet calculatedPreciseResultSize = false;\r\n\t\tfor (let i = 0; i < this._size; i++) {\r\n\t\t\tlet codePoint = this._data[i];\r\n\t\t\tif (codePoint >= 0 && codePoint < 0x10000) {\r\n\t\t\t\tresultArray[resultIdx] = codePoint;\r\n\t\t\t\tresultIdx++;\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\t// Calculate the precise result size if we encounter a code point > 0xFFFF\r\n\t\t\tif (!calculatedPreciseResultSize) {\r\n\t\t\t\tlet newResultArray = new Uint16Array(this.charArraySize());\r\n\t\t\t\tnewResultArray.set(resultArray, 0);\r\n\t\t\t\tresultArray = newResultArray;\r\n\t\t\t\tcalculatedPreciseResultSize = true;\r\n\t\t\t}\r\n\r\n\t\t\t// This will throw RangeError if the code point is not a valid Unicode code point\r\n\t\t\tlet pair = String.fromCodePoint(codePoint);\r\n\t\t\tresultArray[resultIdx] = pair.charCodeAt(0);\r\n\t\t\tresultArray[resultIdx + 1] = pair.charCodeAt(1);\r\n\t\t\tresultIdx += 2;\r\n\t\t}\r\n\t\treturn resultArray;\r\n\t}\r\n\r\n\tprivate charArraySize(): number {\r\n\t\tlet result = 0;\r\n\t\tfor (let i = 0; i < this._size; i++) {\r\n\t\t\tresult += this._data[i] >= 0x10000 ? 2 : 1;\r\n\t\t}\r\n\t\treturn result;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:40.7402214-07:00\r\n\r\nimport { Override } from \"../Decorators\";\r\nimport { Equatable } from \"./Stubs\";\r\n\r\nconst INTERVAL_POOL_MAX_VALUE: number = 1000;\r\n\r\n/** An immutable inclusive interval a..b */\r\nexport class Interval implements Equatable {\r\n\tprivate static _INVALID: Interval = new Interval(-1, -2);\r\n\tstatic get INVALID(): Interval {\r\n\t\treturn Interval._INVALID;\r\n\t}\r\n\r\n\tprivate static readonly cache: Interval[] = new Array(INTERVAL_POOL_MAX_VALUE + 1);\r\n\r\n\t/**\r\n\t * @param a The start of the interval\r\n\t * @param b The end of the interval (inclusive)\r\n\t */\r\n\tconstructor(public a: number, public b: number) {\r\n\t}\r\n\r\n\t/** Interval objects are used readonly so share all with the\r\n\t * same single value a==b up to some max size. Use an array as a perfect hash.\r\n\t * Return shared object for 0..INTERVAL_POOL_MAX_VALUE or a new\r\n\t * Interval object with a..a in it. On Java.g4, 218623 IntervalSets\r\n\t * have a..a (set with 1 element).\r\n\t */\r\n\tpublic static of(a: number, b: number): Interval {\r\n\t\t// cache just a..a\r\n\t\tif (a !== b || a < 0 || a > INTERVAL_POOL_MAX_VALUE) {\r\n\t\t\treturn new Interval(a, b);\r\n\t\t}\r\n\r\n\t\tif (Interval.cache[a] == null) {\r\n\t\t\tInterval.cache[a] = new Interval(a, a);\r\n\t\t}\r\n\r\n\t\treturn Interval.cache[a];\r\n\t}\r\n\r\n\t/** return number of elements between a and b inclusively. x..x is length 1.\r\n\t * if b < a, then length is 0. 9..10 has length 2.\r\n\t */\r\n\tget length(): number {\r\n\t\tif (this.b < this.a) {\r\n\t\t\treturn 0;\r\n\t\t}\r\n\r\n\t\treturn this.b - this.a + 1;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(o: any): boolean {\r\n\t\tif (o === this) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\t\telse if (!(o instanceof Interval)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this.a === o.a && this.b === o.b;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = 23;\r\n\t\thash = hash * 31 + this.a;\r\n\t\thash = hash * 31 + this.b;\r\n\t\treturn hash;\r\n\t}\r\n\r\n\t/** Does this start completely before other? Disjoint */\r\n\tpublic startsBeforeDisjoint(other: Interval): boolean {\r\n\t\treturn this.a < other.a && this.b < other.a;\r\n\t}\r\n\r\n\t/** Does this start at or before other? Nondisjoint */\r\n\tpublic startsBeforeNonDisjoint(other: Interval): boolean {\r\n\t\treturn this.a <= other.a && this.b >= other.a;\r\n\t}\r\n\r\n\t/** Does this.a start after other.b? May or may not be disjoint */\r\n\tpublic startsAfter(other: Interval): boolean {\r\n\t\treturn this.a > other.a;\r\n\t}\r\n\r\n\t/** Does this start completely after other? Disjoint */\r\n\tpublic startsAfterDisjoint(other: Interval): boolean {\r\n\t\treturn this.a > other.b;\r\n\t}\r\n\r\n\t/** Does this start after other? NonDisjoint */\r\n\tpublic startsAfterNonDisjoint(other: Interval): boolean {\r\n\t\treturn this.a > other.a && this.a <= other.b; // this.b>=other.b implied\r\n\t}\r\n\r\n\t/** Are both ranges disjoint? I.e., no overlap? */\r\n\tpublic disjoint(other: Interval): boolean {\r\n\t\treturn this.startsBeforeDisjoint(other) || this.startsAfterDisjoint(other);\r\n\t}\r\n\r\n\t/** Are two intervals adjacent such as 0..41 and 42..42? */\r\n\tpublic adjacent(other: Interval): boolean {\r\n\t\treturn this.a === other.b + 1 || this.b === other.a - 1;\r\n\t}\r\n\r\n\tpublic properlyContains(other: Interval): boolean {\r\n\t\treturn other.a >= this.a && other.b <= this.b;\r\n\t}\r\n\r\n\t/** Return the interval computed from combining this and other */\r\n\tpublic union(other: Interval): Interval {\r\n\t\treturn Interval.of(Math.min(this.a, other.a), Math.max(this.b, other.b));\r\n\t}\r\n\r\n\t/** Return the interval in common between this and o */\r\n\tpublic intersection(other: Interval): Interval {\r\n\t\treturn Interval.of(Math.max(this.a, other.a), Math.min(this.b, other.b));\r\n\t}\r\n\r\n\t/** Return the interval with elements from `this` not in `other`;\r\n\t * `other` must not be totally enclosed (properly contained)\r\n\t * within `this`, which would result in two disjoint intervals\r\n\t * instead of the single one returned by this method.\r\n\t */\r\n\tpublic differenceNotProperlyContained(other: Interval): Interval | undefined {\r\n\t\tlet diff: Interval | undefined;\r\n\t\tif (other.startsBeforeNonDisjoint(this)) {\r\n\t\t\t// other.a to left of this.a (or same)\r\n\t\t\tdiff = Interval.of(Math.max(this.a, other.b + 1), this.b);\r\n\t\t} else if (other.startsAfterNonDisjoint(this)) {\r\n\t\t\t// other.a to right of this.a\r\n\t\t\tdiff = Interval.of(this.a, other.a - 1);\r\n\t\t}\r\n\r\n\t\treturn diff;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn this.a + \"..\" + this.b;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:57.7862802-07:00\r\n\r\nimport { CharStream } from \"./CharStream\";\r\nimport { IntStream } from \"./IntStream\";\r\nimport { TokenSource } from \"./TokenSource\";\r\nimport { TokenStream } from \"./TokenStream\";\r\n\r\n/** A token has properties: text, type, line, character position in the line\r\n * (so we can ignore tabs), token channel, index, and source from which\r\n * we obtained this token.\r\n */\r\nexport interface Token {\r\n\t/**\r\n\t * Get the text of the token.\r\n\t */\r\n\treadonly text: string | undefined;\r\n\r\n\t/** Get the token type of the token */\r\n\treadonly type: number;\r\n\r\n\t/** The line number on which the 1st character of this token was matched,\r\n\t * line=1..n\r\n\t */\r\n\treadonly line: number;\r\n\r\n\t/** The index of the first character of this token relative to the\r\n\t * beginning of the line at which it occurs, 0..n-1\r\n\t */\r\n\treadonly charPositionInLine: number;\r\n\r\n\t/** Return the channel this token. Each token can arrive at the parser\r\n\t * on a different channel, but the parser only \"tunes\" to a single channel.\r\n\t * The parser ignores everything not on DEFAULT_CHANNEL.\r\n\t */\r\n\treadonly channel: number;\r\n\r\n\t/** An index from 0..n-1 of the token object in the input stream.\r\n\t * This must be valid in order to print token streams and\r\n\t * use TokenRewriteStream.\r\n\t *\r\n\t * Return -1 to indicate that this token was conjured up since\r\n\t * it doesn't have a valid index.\r\n\t */\r\n\treadonly tokenIndex: number;\r\n\r\n\t/** The starting character index of the token\r\n\t * This method is optional; return -1 if not implemented.\r\n\t */\r\n\treadonly startIndex: number;\r\n\r\n\t/** The last character index of the token.\r\n\t * This method is optional; return -1 if not implemented.\r\n\t */\r\n\treadonly stopIndex: number;\r\n\r\n\t/** Gets the {@link TokenSource} which created this token.\r\n\t */\r\n\treadonly tokenSource: TokenSource | undefined;\r\n\r\n\t/**\r\n\t * Gets the {@link CharStream} from which this token was derived.\r\n\t */\r\n\treadonly inputStream: CharStream | undefined;\r\n}\r\n\r\nexport namespace Token {\r\n\texport const INVALID_TYPE: number = 0;\r\n\r\n\t/** During lookahead operations, this \"token\" signifies we hit rule end ATN state\r\n\t * and did not follow it despite needing to.\r\n\t */\r\n\texport const EPSILON: number = -2;\r\n\r\n\texport const MIN_USER_TOKEN_TYPE: number = 1;\r\n\r\n\texport const EOF: number = IntStream.EOF;\r\n\r\n\t/** All tokens go to the parser (unless skip() is called in that rule)\r\n\t * on a particular \"channel\". The parser tunes to a particular channel\r\n\t * so that whitespace etc... can go to the parser on a \"hidden\" channel.\r\n\t */\r\n\texport const DEFAULT_CHANNEL: number = 0;\r\n\r\n\t/** Anything on different channel than DEFAULT_CHANNEL is not parsed\r\n\t * by parser.\r\n\t */\r\n\texport const HIDDEN_CHANNEL: number = 1;\r\n\r\n\t/**\r\n\t * This is the minimum constant value which can be assigned to a\r\n\t * user-defined token channel.\r\n\t *\r\n\t * The non-negative numbers less than {@link #MIN_USER_CHANNEL_VALUE} are\r\n\t * assigned to the predefined channels {@link #DEFAULT_CHANNEL} and\r\n\t * {@link #HIDDEN_CHANNEL}.\r\n\t *\r\n\t * @see `Token.channel`\r\n\t */\r\n\texport const MIN_USER_CHANNEL_VALUE: number = 2;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:50.1614404-07:00\r\n\r\nimport { ATNSimulator } from \"./atn/ATNSimulator\";\r\nimport { CharStream } from \"./CharStream\";\r\nimport { Interval } from \"./misc/Interval\";\r\nimport { NotNull, Override } from \"./Decorators\";\r\nimport { Recognizer } from \"./Recognizer\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenSource } from \"./TokenSource\";\r\nimport { WritableToken } from \"./WritableToken\";\r\n\r\nexport class CommonToken implements WritableToken {\r\n\t/**\r\n\t * An empty {@link Tuple2} which is used as the default value of\r\n\t * {@link #source} for tokens that do not have a source.\r\n\t */\r\n\tprotected static readonly EMPTY_SOURCE: { source?: TokenSource, stream?: CharStream } =\r\n\t\t{ source: undefined, stream: undefined };\r\n\r\n\t/**\r\n\t * This is the backing field for `type`.\r\n\t */\r\n\tprivate _type: number;\r\n\t/**\r\n\t * This is the backing field for {@link #getLine} and {@link #setLine}.\r\n\t */\r\n\tprivate _line: number = 0;\r\n\t/**\r\n\t * This is the backing field for {@link #getCharPositionInLine} and\r\n\t * {@link #setCharPositionInLine}.\r\n\t */\r\n\tprivate _charPositionInLine: number = -1; // set to invalid position\r\n\t/**\r\n\t * This is the backing field for {@link #getChannel} and\r\n\t * {@link #setChannel}.\r\n\t */\r\n\tprivate _channel: number = Token.DEFAULT_CHANNEL;\r\n\t/**\r\n\t * This is the backing field for {@link #getTokenSource} and\r\n\t * {@link #getInputStream}.\r\n\t *\r\n\t * These properties share a field to reduce the memory footprint of\r\n\t * {@link CommonToken}. Tokens created by a {@link CommonTokenFactory} from\r\n\t * the same source and input stream share a reference to the same\r\n\t * {@link Tuple2} containing these values.\r\n\t */\r\n\t@NotNull\r\n\tprotected source: { source?: TokenSource, stream?: CharStream };\r\n\r\n\t/**\r\n\t * This is the backing field for {@link #getText} when the token text is\r\n\t * explicitly set in the constructor or via {@link #setText}.\r\n\t *\r\n\t * @see `text`\r\n\t */\r\n\tprivate _text?: string;\r\n\r\n\t/**\r\n\t * This is the backing field for `tokenIndex`.\r\n\t */\r\n\tprotected index: number = -1;\r\n\r\n\t/**\r\n\t * This is the backing field for `startIndex`.\r\n\t */\r\n\tprotected start: number;\r\n\r\n\t/**\r\n\t * This is the backing field for `stopIndex`.\r\n\t */\r\n\tprivate stop: number;\r\n\r\n\tconstructor(type: number, text?: string, @NotNull source: { source?: TokenSource, stream?: CharStream } = CommonToken.EMPTY_SOURCE, channel: number = Token.DEFAULT_CHANNEL, start: number = 0, stop: number = 0) {\r\n\t\tthis._text = text;\r\n\t\tthis._type = type;\r\n\t\tthis.source = source;\r\n\t\tthis._channel = channel;\r\n\t\tthis.start = start;\r\n\t\tthis.stop = stop;\r\n\t\tif (source.source != null) {\r\n\t\t\tthis._line = source.source.line;\r\n\t\t\tthis._charPositionInLine = source.source.charPositionInLine;\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Constructs a new {@link CommonToken} as a copy of another {@link Token}.\r\n\t *\r\n\t * If `oldToken` is also a {@link CommonToken} instance, the newly\r\n\t * constructed token will share a reference to the {@link #text} field and\r\n\t * the {@link Tuple2} stored in {@link #source}. Otherwise, {@link #text} will\r\n\t * be assigned the result of calling {@link #getText}, and {@link #source}\r\n\t * will be constructed from the result of {@link Token#getTokenSource} and\r\n\t * {@link Token#getInputStream}.\r\n\t *\r\n\t * @param oldToken The token to copy.\r\n\t */\r\n\tpublic static fromToken(@NotNull oldToken: Token): CommonToken {\r\n\t\tlet result: CommonToken = new CommonToken(oldToken.type, undefined, CommonToken.EMPTY_SOURCE, oldToken.channel, oldToken.startIndex, oldToken.stopIndex);\r\n\t\tresult._line = oldToken.line;\r\n\t\tresult.index = oldToken.tokenIndex;\r\n\t\tresult._charPositionInLine = oldToken.charPositionInLine;\r\n\r\n\t\tif (oldToken instanceof CommonToken) {\r\n\t\t\tresult._text = oldToken._text;\r\n\t\t\tresult.source = oldToken.source;\r\n\t\t} else {\r\n\t\t\tresult._text = oldToken.text;\r\n\t\t\tresult.source = { source: oldToken.tokenSource, stream: oldToken.inputStream };\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\t@Override\r\n\tget type(): number {\r\n\t\treturn this._type;\r\n\t}\r\n\r\n\t// @Override\r\n\tset type(type: number) {\r\n\t\tthis._type = type;\r\n\t}\r\n\r\n\t@Override\r\n\tget line(): number {\r\n\t\treturn this._line;\r\n\t}\r\n\r\n\t// @Override\r\n\tset line(line: number) {\r\n\t\tthis._line = line;\r\n\t}\r\n\r\n\t@Override\r\n\tget text(): string | undefined {\r\n\t\tif (this._text != null) {\r\n\t\t\treturn this._text;\r\n\t\t}\r\n\r\n\t\tlet input: CharStream | undefined = this.inputStream;\r\n\t\tif (input == null) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\tlet n: number = input.size;\r\n\t\tif (this.start < n && this.stop < n) {\r\n\t\t\treturn input.getText(Interval.of(this.start, this.stop));\r\n\t\t} else {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Explicitly set the text for this token. If {code text} is not\r\n\t * `undefined`, then {@link #getText} will return this value rather than\r\n\t * extracting the text from the input.\r\n\t *\r\n\t * @param text The explicit text of the token, or `undefined` if the text\r\n\t * should be obtained from the input along with the start and stop indexes\r\n\t * of the token.\r\n\t */\r\n\t// @Override\r\n\tset text(text: string | undefined) {\r\n\t\tthis._text = text;\r\n\t}\r\n\r\n\t@Override\r\n\tget charPositionInLine(): number {\r\n\t\treturn this._charPositionInLine;\r\n\t}\r\n\r\n\t// @Override\r\n\tset charPositionInLine(charPositionInLine: number) {\r\n\t\tthis._charPositionInLine = charPositionInLine;\r\n\t}\r\n\r\n\t@Override\r\n\tget channel(): number {\r\n\t\treturn this._channel;\r\n\t}\r\n\r\n\t// @Override\r\n\tset channel(channel: number) {\r\n\t\tthis._channel = channel;\r\n\t}\r\n\r\n\t@Override\r\n\tget startIndex(): number {\r\n\t\treturn this.start;\r\n\t}\r\n\r\n\tset startIndex(start: number) {\r\n\t\tthis.start = start;\r\n\t}\r\n\r\n\t@Override\r\n\tget stopIndex(): number {\r\n\t\treturn this.stop;\r\n\t}\r\n\r\n\tset stopIndex(stop: number) {\r\n\t\tthis.stop = stop;\r\n\t}\r\n\r\n\t@Override\r\n\tget tokenIndex(): number {\r\n\t\treturn this.index;\r\n\t}\r\n\r\n\t// @Override\r\n\tset tokenIndex(index: number) {\r\n\t\tthis.index = index;\r\n\t}\r\n\r\n\t@Override\r\n\tget tokenSource(): TokenSource | undefined {\r\n\t\treturn this.source.source;\r\n\t}\r\n\r\n\t@Override\r\n\tget inputStream(): CharStream | undefined {\r\n\t\treturn this.source.stream;\r\n\t}\r\n\r\n\tpublic toString(): string;\r\n\tpublic toString(recognizer: Recognizer | undefined): string;\r\n\r\n\t@Override\r\n\tpublic toString(recognizer?: Recognizer): string {\r\n\t\tlet channelStr: string = \"\";\r\n\t\tif (this._channel > 0) {\r\n\t\t\tchannelStr = \",channel=\" + this._channel;\r\n\t\t}\r\n\r\n\t\tlet txt: string | undefined = this.text;\r\n\t\tif (txt != null) {\r\n\t\t\ttxt = txt.replace(/\\n/g, \"\\\\n\");\r\n\t\t\ttxt = txt.replace(/\\r/g, \"\\\\r\");\r\n\t\t\ttxt = txt.replace(/\\t/g, \"\\\\t\");\r\n\t\t} else {\r\n\t\t\ttxt = \"\";\r\n\t\t}\r\n\r\n\t\tlet typeString = String(this._type);\r\n\t\tif (recognizer) {\r\n\t\t\ttypeString = recognizer.vocabulary.getDisplayName(this._type);\r\n\t\t}\r\n\r\n\t\treturn \"[@\" + this.tokenIndex + \",\" + this.start + \":\" + this.stop + \"='\" + txt + \"',<\" + typeString + \">\" + channelStr + \",\" + this._line + \":\" + this.charPositionInLine + \"]\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:50.3010112-07:00\r\n\r\nimport { CharStream } from \"./CharStream\";\r\nimport { CommonToken } from \"./CommonToken\";\r\nimport { Interval } from \"./misc/Interval\";\r\nimport { Override } from \"./Decorators\";\r\nimport { TokenFactory } from \"./TokenFactory\";\r\nimport { TokenSource } from \"./TokenSource\";\r\n\r\n/**\r\n * This default implementation of {@link TokenFactory} creates\r\n * {@link CommonToken} objects.\r\n */\r\nexport class CommonTokenFactory implements TokenFactory {\r\n\t/**\r\n\t * Indicates whether {@link CommonToken#setText} should be called after\r\n\t * constructing tokens to explicitly set the text. This is useful for cases\r\n\t * where the input stream might not be able to provide arbitrary substrings\r\n\t * of text from the input after the lexer creates a token (e.g. the\r\n\t * implementation of {@link CharStream#getText} in\r\n\t * {@link UnbufferedCharStream}\r\n\t * {@link UnsupportedOperationException}). Explicitly setting the token text\r\n\t * allows {@link Token#getText} to be called at any time regardless of the\r\n\t * input stream implementation.\r\n\t *\r\n\t * The default value is `false` to avoid the performance and memory\r\n\t * overhead of copying text for every token unless explicitly requested.\r\n\t */\r\n\tprotected copyText: boolean;\r\n\r\n\t/**\r\n\t * Constructs a {@link CommonTokenFactory} with the specified value for\r\n\t * {@link #copyText}.\r\n\t *\r\n\t * When `copyText` is `false`, the {@link #DEFAULT} instance\r\n\t * should be used instead of constructing a new instance.\r\n\t *\r\n\t * @param copyText The value for {@link #copyText}.\r\n\t */\r\n\tconstructor(copyText: boolean = false) {\r\n\t\tthis.copyText = copyText;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic create(\r\n\t\tsource: { source?: TokenSource, stream?: CharStream },\r\n\t\ttype: number,\r\n\t\ttext: string | undefined,\r\n\t\tchannel: number,\r\n\t\tstart: number,\r\n\t\tstop: number,\r\n\t\tline: number,\r\n\t\tcharPositionInLine: number): CommonToken {\r\n\r\n\t\tlet t: CommonToken = new CommonToken(type, text, source, channel, start, stop);\r\n\t\tt.line = line;\r\n\t\tt.charPositionInLine = charPositionInLine;\r\n\t\tif (text == null && this.copyText && source.stream != null) {\r\n\t\t\tt.text = source.stream.getText(Interval.of(start, stop));\r\n\t\t}\r\n\r\n\t\treturn t;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic createSimple(type: number, text: string): CommonToken {\r\n\t\treturn new CommonToken(type, text);\r\n\t}\r\n}\r\n\r\nexport namespace CommonTokenFactory {\r\n\t/**\r\n\t * The default {@link CommonTokenFactory} instance.\r\n\t *\r\n\t * This token factory does not explicitly copy token text when constructing\r\n\t * tokens.\r\n\t */\r\n\texport const DEFAULT: TokenFactory = new CommonTokenFactory();\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:40.6647101-07:00\r\n\r\nimport { IntegerList } from \"./IntegerList\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class IntegerStack extends IntegerList {\r\n\r\n\tconstructor(arg?: number | IntegerStack) {\r\n\t\tsuper(arg);\r\n\t}\r\n\r\n\tpublic push(value: number): void {\r\n\t\tthis.add(value);\r\n\t}\r\n\r\n\tpublic pop(): number {\r\n\t\treturn this.removeAt(this.size - 1);\r\n\t}\r\n\r\n\tpublic peek(): number {\r\n\t\treturn this.get(this.size - 1);\r\n\t}\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:38.1172076-07:00\r\n\r\nimport { LexerActionExecutor } from \"../atn/LexerActionExecutor\";\r\n\r\n/**\r\n * Stores information about a {@link DFAState} which is an accept state under\r\n * some condition. Certain settings, such as\r\n * {@link ParserATNSimulator#getPredictionMode()}, may be used in addition to\r\n * this information to determine whether or not a particular state is an accept\r\n * state.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class AcceptStateInfo {\r\n\tprivate readonly _prediction: number;\r\n\tprivate readonly _lexerActionExecutor?: LexerActionExecutor;\r\n\r\n\tconstructor(prediction: number);\r\n\tconstructor(prediction: number, lexerActionExecutor: LexerActionExecutor | undefined);\r\n\tconstructor(prediction: number, lexerActionExecutor?: LexerActionExecutor) {\r\n\t\tthis._prediction = prediction;\r\n\t\tthis._lexerActionExecutor = lexerActionExecutor;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the prediction made by this accept state. Note that this value\r\n\t * assumes the predicates, if any, in the {@link DFAState} evaluate to\r\n\t * `true`. If predicate evaluation is enabled, the final prediction of\r\n\t * the accept state will be determined by the result of predicate\r\n\t * evaluation.\r\n\t */\r\n\tget prediction(): number {\r\n\t\treturn this._prediction;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the {@link LexerActionExecutor} which can be used to execute actions\r\n\t * and/or commands after the lexer matches a token.\r\n\t */\r\n\tget lexerActionExecutor(): LexerActionExecutor | undefined {\r\n\t\treturn this._lexerActionExecutor;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nimport { Array2DHashSet } from \"./Array2DHashSet\";\r\nimport { DefaultEqualityComparator } from \"./DefaultEqualityComparator\";\r\nimport { EqualityComparator } from \"./EqualityComparator\";\r\nimport { Equatable, JavaCollection, JavaMap, JavaSet } from \"./Stubs\";\r\n\r\n// Since `Array2DHashMap` is implemented on top of `Array2DHashSet`, we defined a bucket type which can store a\r\n// key-value pair. The value is optional since looking up values in the map by a key only needs to include the key.\r\ninterface Bucket { key: K; value?: V; }\r\n\r\nclass MapKeyEqualityComparator implements EqualityComparator> {\r\n\tprivate readonly keyComparator: EqualityComparator;\r\n\r\n\tconstructor(keyComparator: EqualityComparator) {\r\n\t\tthis.keyComparator = keyComparator;\r\n\t}\r\n\r\n\tpublic hashCode(obj: Bucket): number {\r\n\t\treturn this.keyComparator.hashCode(obj.key);\r\n\t}\r\n\r\n\tpublic equals(a: Bucket, b: Bucket): boolean {\r\n\t\treturn this.keyComparator.equals(a.key, b.key);\r\n\t}\r\n}\r\n\r\nexport class Array2DHashMap implements JavaMap {\r\n\tprivate backingStore: Array2DHashSet>;\r\n\r\n\tconstructor(keyComparer: EqualityComparator);\r\n\tconstructor(map: Array2DHashMap);\r\n\tconstructor(keyComparer: EqualityComparator | Array2DHashMap) {\r\n\t\tif (keyComparer instanceof Array2DHashMap) {\r\n\t\t\tthis.backingStore = new Array2DHashSet>(keyComparer.backingStore);\r\n\t\t} else {\r\n\t\t\tthis.backingStore = new Array2DHashSet>(new MapKeyEqualityComparator(keyComparer));\r\n\t\t}\r\n\t}\r\n\r\n\tpublic clear(): void {\r\n\t\tthis.backingStore.clear();\r\n\t}\r\n\r\n\tpublic containsKey(key: K): boolean {\r\n\t\treturn this.backingStore.contains({ key });\r\n\t}\r\n\r\n\tpublic get(key: K): V | undefined {\r\n\t\tlet bucket = this.backingStore.get({ key });\r\n\t\tif (!bucket) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\treturn bucket.value;\r\n\t}\r\n\r\n\tget isEmpty(): boolean {\r\n\t\treturn this.backingStore.isEmpty;\r\n\t}\r\n\r\n\tpublic put(key: K, value: V): V | undefined {\r\n\t\tlet element = this.backingStore.get({ key, value });\r\n\t\tlet result: V | undefined;\r\n\t\tif (!element) {\r\n\t\t\tthis.backingStore.add({ key, value });\r\n\t\t} else {\r\n\t\t\tresult = element.value;\r\n\t\t\telement.value = value;\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\tpublic putIfAbsent(key: K, value: V): V | undefined {\r\n\t\tlet element = this.backingStore.get({ key, value });\r\n\t\tlet result: V | undefined;\r\n\t\tif (!element) {\r\n\t\t\tthis.backingStore.add({ key, value });\r\n\t\t} else {\r\n\t\t\tresult = element.value;\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\tget size(): number {\r\n\t\treturn this.backingStore.size;\r\n\t}\r\n\r\n\tpublic hashCode(): number {\r\n\t\treturn this.backingStore.hashCode();\r\n\t}\r\n\r\n\tpublic equals(o: any): boolean {\r\n\t\tif (!(o instanceof Array2DHashMap)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this.backingStore.equals(o.backingStore);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:28.4381103-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\n\r\nexport abstract class DecisionState extends ATNState {\r\n\tpublic decision: number = -1;\r\n\tpublic nonGreedy: boolean = false;\r\n\tpublic sll: boolean = false;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:35.6390614-07:00\r\n\r\nimport { Array2DHashMap } from \"../misc/Array2DHashMap\";\r\nimport { Override } from \"../Decorators\";\r\nimport { JavaMap } from \"../misc/Stubs\";\r\nimport { ObjectEqualityComparator } from \"../misc/ObjectEqualityComparator\";\r\nimport { PredictionContext } from \"./PredictionContext\";\r\nimport * as assert from \"assert\";\r\n\r\n/** Used to cache {@link PredictionContext} objects. Its used for the shared\r\n * context cash associated with contexts in DFA states. This cache\r\n * can be used for both lexers and parsers.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class PredictionContextCache {\r\n\tpublic static UNCACHED: PredictionContextCache = new PredictionContextCache(false);\r\n\r\n\tprivate contexts: JavaMap =\r\n\t\tnew Array2DHashMap(ObjectEqualityComparator.INSTANCE);\r\n\tprivate childContexts: JavaMap =\r\n\t\tnew Array2DHashMap(ObjectEqualityComparator.INSTANCE);\r\n\tprivate joinContexts: JavaMap =\r\n\t\tnew Array2DHashMap(ObjectEqualityComparator.INSTANCE);\r\n\r\n\tprivate enableCache: boolean;\r\n\r\n\tconstructor(enableCache: boolean = true) {\r\n\t\tthis.enableCache = enableCache;\r\n\t}\r\n\r\n\tpublic getAsCached(context: PredictionContext): PredictionContext {\r\n\t\tif (!this.enableCache) {\r\n\t\t\treturn context;\r\n\t\t}\r\n\r\n\t\tlet result = this.contexts.get(context);\r\n\t\tif (!result) {\r\n\t\t\tresult = context;\r\n\t\t\tthis.contexts.put(context, context);\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\tpublic getChild(context: PredictionContext, invokingState: number): PredictionContext {\r\n\t\tif (!this.enableCache) {\r\n\t\t\treturn context.getChild(invokingState);\r\n\t\t}\r\n\r\n\t\tlet operands: PredictionContextCache.PredictionContextAndInt = new PredictionContextCache.PredictionContextAndInt(context, invokingState);\r\n\t\tlet result = this.childContexts.get(operands);\r\n\t\tif (!result) {\r\n\t\t\tresult = context.getChild(invokingState);\r\n\t\t\tresult = this.getAsCached(result);\r\n\t\t\tthis.childContexts.put(operands, result);\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\tpublic join(x: PredictionContext, y: PredictionContext): PredictionContext {\r\n\t\tif (!this.enableCache) {\r\n\t\t\treturn PredictionContext.join(x, y, this);\r\n\t\t}\r\n\r\n\t\tlet operands: PredictionContextCache.IdentityCommutativePredictionContextOperands = new PredictionContextCache.IdentityCommutativePredictionContextOperands(x, y);\r\n\t\tlet result = this.joinContexts.get(operands);\r\n\t\tif (result) {\r\n\t\t\treturn result;\r\n\t\t}\r\n\r\n\t\tresult = PredictionContext.join(x, y, this);\r\n\t\tresult = this.getAsCached(result);\r\n\t\tthis.joinContexts.put(operands, result);\r\n\t\treturn result;\r\n\t}\r\n}\r\n\r\nexport namespace PredictionContextCache {\r\n\texport class PredictionContextAndInt {\r\n\t\tprivate obj: PredictionContext;\r\n\t\tprivate value: number;\r\n\r\n\t\tconstructor(obj: PredictionContext, value: number) {\r\n\t\t\tthis.obj = obj;\r\n\t\t\tthis.value = value;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic equals(obj: any): boolean {\r\n\t\t\tif (!(obj instanceof PredictionContextAndInt)) {\r\n\t\t\t\treturn false;\r\n\t\t\t} else if (obj === this) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\r\n\t\t\tlet other: PredictionContextAndInt = obj;\r\n\t\t\treturn this.value === other.value\r\n\t\t\t\t&& (this.obj === other.obj || (this.obj != null && this.obj.equals(other.obj)));\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic hashCode(): number {\r\n\t\t\tlet hashCode: number = 5;\r\n\t\t\thashCode = 7 * hashCode + (this.obj != null ? this.obj.hashCode() : 0);\r\n\t\t\thashCode = 7 * hashCode + this.value;\r\n\t\t\treturn hashCode;\r\n\t\t}\r\n\t}\r\n\r\n\texport class IdentityCommutativePredictionContextOperands {\r\n\t\tprivate _x: PredictionContext;\r\n\t\tprivate _y: PredictionContext;\r\n\r\n\t\tconstructor(x: PredictionContext, y: PredictionContext) {\r\n\t\t\tassert(x != null);\r\n\t\t\tassert(y != null);\r\n\t\t\tthis._x = x;\r\n\t\t\tthis._y = y;\r\n\t\t}\r\n\r\n\t\tget x(): PredictionContext {\r\n\t\t\treturn this._x;\r\n\t\t}\r\n\r\n\t\tget y(): PredictionContext {\r\n\t\t\treturn this._y;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic equals(o: any): boolean {\r\n\t\t\tif (!(o instanceof IdentityCommutativePredictionContextOperands)) {\r\n\t\t\t\treturn false;\r\n\t\t\t} else if (this === o) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\r\n\t\t\tlet other: IdentityCommutativePredictionContextOperands = o;\r\n\t\t\treturn (this._x === other._x && this._y === other._y) || (this._x === other._y && this._y === other._x);\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic hashCode(): number {\r\n\t\t\treturn this._x.hashCode() ^ this._y.hashCode();\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:35.3812636-07:00\r\n\r\n\r\nimport { Array2DHashMap } from \"../misc/Array2DHashMap\";\r\nimport { Array2DHashSet } from \"../misc/Array2DHashSet\";\r\nimport { Arrays } from \"../misc/Arrays\";\r\nimport { ATN } from \"./ATN\";\r\nimport { ATNState } from \"./ATNState\";\r\nimport { EqualityComparator } from \"../misc/EqualityComparator\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { Equatable, JavaSet } from \"../misc/Stubs\";\r\nimport { PredictionContextCache } from \"./PredictionContextCache\";\r\nimport { Recognizer } from \"../Recognizer\";\r\nimport { RuleContext } from \"../RuleContext\";\r\nimport { RuleTransition } from \"./RuleTransition\";\r\n\r\nimport * as assert from \"assert\";\r\n\r\nconst INITIAL_HASH: number = 1;\r\n\r\nexport abstract class PredictionContext implements Equatable {\r\n\t/**\r\n\t * Stores the computed hash code of this {@link PredictionContext}. The hash\r\n\t * code is computed in parts to match the following reference algorithm.\r\n\t *\r\n\t * ```\r\n\t * private int referenceHashCode() {\r\n\t * int hash = {@link MurmurHash#initialize MurmurHash.initialize}({@link #INITIAL_HASH});\r\n\t *\r\n\t * for (int i = 0; i < this.size; i++) {\r\n\t * hash = {@link MurmurHash#update MurmurHash.update}(hash, {@link #getParent getParent}(i));\r\n\t * }\r\n\t *\r\n\t * for (int i = 0; i < this.size; i++) {\r\n\t * hash = {@link MurmurHash#update MurmurHash.update}(hash, {@link #getReturnState getReturnState}(i));\r\n\t * }\r\n\t *\r\n\t * hash = {@link MurmurHash#finish MurmurHash.finish}(hash, 2 * this.size);\r\n\t * return hash;\r\n\t * }\r\n\t * ```\r\n\t */\r\n\tprivate readonly cachedHashCode: number;\r\n\r\n\tconstructor(cachedHashCode: number) {\r\n\t\tthis.cachedHashCode = cachedHashCode;\r\n\t}\r\n\r\n\tprotected static calculateEmptyHashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize(INITIAL_HASH);\r\n\t\thash = MurmurHash.finish(hash, 0);\r\n\t\treturn hash;\r\n\t}\r\n\r\n\tprotected static calculateSingleHashCode(parent: PredictionContext, returnState: number): number {\r\n\t\tlet hash: number = MurmurHash.initialize(INITIAL_HASH);\r\n\t\thash = MurmurHash.update(hash, parent);\r\n\t\thash = MurmurHash.update(hash, returnState);\r\n\t\thash = MurmurHash.finish(hash, 2);\r\n\t\treturn hash;\r\n\t}\r\n\r\n\tprotected static calculateHashCode(parents: PredictionContext[], returnStates: number[]): number {\r\n\t\tlet hash: number = MurmurHash.initialize(INITIAL_HASH);\r\n\r\n\t\tfor (let parent of parents) {\r\n\t\t\thash = MurmurHash.update(hash, parent);\r\n\t\t}\r\n\r\n\t\tfor (let returnState of returnStates) {\r\n\t\t\thash = MurmurHash.update(hash, returnState);\r\n\t\t}\r\n\r\n\t\thash = MurmurHash.finish(hash, 2 * parents.length);\r\n\t\treturn hash;\r\n\t}\r\n\r\n\tpublic abstract readonly size: number;\r\n\r\n\tpublic abstract getReturnState(index: number): number;\r\n\r\n\tpublic abstract findReturnState(returnState: number): number;\r\n\r\n\t// @NotNull\r\n\tpublic abstract getParent(index: number): PredictionContext;\r\n\r\n\tprotected abstract addEmptyContext(): PredictionContext;\r\n\r\n\tprotected abstract removeEmptyContext(): PredictionContext;\r\n\r\n\tpublic static fromRuleContext(atn: ATN, outerContext: RuleContext, fullContext: boolean = true): PredictionContext {\r\n\t\tif (outerContext.isEmpty) {\r\n\t\t\treturn fullContext ? PredictionContext.EMPTY_FULL : PredictionContext.EMPTY_LOCAL;\r\n\t\t}\r\n\r\n\t\tlet parent: PredictionContext;\r\n\t\tif (outerContext._parent) {\r\n\t\t\tparent = PredictionContext.fromRuleContext(atn, outerContext._parent, fullContext);\r\n\t\t} else {\r\n\t\t\tparent = fullContext ? PredictionContext.EMPTY_FULL : PredictionContext.EMPTY_LOCAL;\r\n\t\t}\r\n\r\n\t\tlet state: ATNState = atn.states[outerContext.invokingState];\r\n\t\tlet transition: RuleTransition = state.transition(0) as RuleTransition;\r\n\t\treturn parent.getChild(transition.followState.stateNumber);\r\n\t}\r\n\r\n\tprivate static addEmptyContext(context: PredictionContext): PredictionContext {\r\n\t\treturn context.addEmptyContext();\r\n\t}\r\n\r\n\tprivate static removeEmptyContext(context: PredictionContext): PredictionContext {\r\n\t\treturn context.removeEmptyContext();\r\n\t}\r\n\r\n\tpublic static join(@NotNull context0: PredictionContext, @NotNull context1: PredictionContext, @NotNull contextCache: PredictionContextCache = PredictionContextCache.UNCACHED): PredictionContext {\r\n\t\tif (context0 === context1) {\r\n\t\t\treturn context0;\r\n\t\t}\r\n\r\n\t\tif (context0.isEmpty) {\r\n\t\t\treturn PredictionContext.isEmptyLocal(context0) ? context0 : PredictionContext.addEmptyContext(context1);\r\n\t\t} else if (context1.isEmpty) {\r\n\t\t\treturn PredictionContext.isEmptyLocal(context1) ? context1 : PredictionContext.addEmptyContext(context0);\r\n\t\t}\r\n\r\n\t\tlet context0size: number = context0.size;\r\n\t\tlet context1size: number = context1.size;\r\n\t\tif (context0size === 1 && context1size === 1 && context0.getReturnState(0) === context1.getReturnState(0)) {\r\n\t\t\tlet merged: PredictionContext = contextCache.join(context0.getParent(0), context1.getParent(0));\r\n\t\t\tif (merged === context0.getParent(0)) {\r\n\t\t\t\treturn context0;\r\n\t\t\t} else if (merged === context1.getParent(0)) {\r\n\t\t\t\treturn context1;\r\n\t\t\t} else {\r\n\t\t\t\treturn merged.getChild(context0.getReturnState(0));\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet count: number = 0;\r\n\t\tlet parentsList: PredictionContext[] = new Array(context0size + context1size);\r\n\t\tlet returnStatesList: number[] = new Array(parentsList.length);\r\n\t\tlet leftIndex: number = 0;\r\n\t\tlet rightIndex: number = 0;\r\n\t\tlet canReturnLeft: boolean = true;\r\n\t\tlet canReturnRight: boolean = true;\r\n\t\twhile (leftIndex < context0size && rightIndex < context1size) {\r\n\t\t\tif (context0.getReturnState(leftIndex) === context1.getReturnState(rightIndex)) {\r\n\t\t\t\tparentsList[count] = contextCache.join(context0.getParent(leftIndex), context1.getParent(rightIndex));\r\n\t\t\t\treturnStatesList[count] = context0.getReturnState(leftIndex);\r\n\t\t\t\tcanReturnLeft = canReturnLeft && parentsList[count] === context0.getParent(leftIndex);\r\n\t\t\t\tcanReturnRight = canReturnRight && parentsList[count] === context1.getParent(rightIndex);\r\n\t\t\t\tleftIndex++;\r\n\t\t\t\trightIndex++;\r\n\t\t\t} else if (context0.getReturnState(leftIndex) < context1.getReturnState(rightIndex)) {\r\n\t\t\t\tparentsList[count] = context0.getParent(leftIndex);\r\n\t\t\t\treturnStatesList[count] = context0.getReturnState(leftIndex);\r\n\t\t\t\tcanReturnRight = false;\r\n\t\t\t\tleftIndex++;\r\n\t\t\t} else {\r\n\t\t\t\tassert(context1.getReturnState(rightIndex) < context0.getReturnState(leftIndex));\r\n\t\t\t\tparentsList[count] = context1.getParent(rightIndex);\r\n\t\t\t\treturnStatesList[count] = context1.getReturnState(rightIndex);\r\n\t\t\t\tcanReturnLeft = false;\r\n\t\t\t\trightIndex++;\r\n\t\t\t}\r\n\r\n\t\t\tcount++;\r\n\t\t}\r\n\r\n\t\twhile (leftIndex < context0size) {\r\n\t\t\tparentsList[count] = context0.getParent(leftIndex);\r\n\t\t\treturnStatesList[count] = context0.getReturnState(leftIndex);\r\n\t\t\tleftIndex++;\r\n\t\t\tcanReturnRight = false;\r\n\t\t\tcount++;\r\n\t\t}\r\n\r\n\t\twhile (rightIndex < context1size) {\r\n\t\t\tparentsList[count] = context1.getParent(rightIndex);\r\n\t\t\treturnStatesList[count] = context1.getReturnState(rightIndex);\r\n\t\t\trightIndex++;\r\n\t\t\tcanReturnLeft = false;\r\n\t\t\tcount++;\r\n\t\t}\r\n\r\n\t\tif (canReturnLeft) {\r\n\t\t\treturn context0;\r\n\t\t} else if (canReturnRight) {\r\n\t\t\treturn context1;\r\n\t\t}\r\n\r\n\t\tif (count < parentsList.length) {\r\n\t\t\tparentsList = parentsList.slice(0, count);\r\n\t\t\treturnStatesList = returnStatesList.slice(0, count);\r\n\t\t}\r\n\r\n\t\tif (parentsList.length === 0) {\r\n\t\t\t// if one of them was EMPTY_LOCAL, it would be empty and handled at the beginning of the method\r\n\t\t\treturn PredictionContext.EMPTY_FULL;\r\n\t\t} else if (parentsList.length === 1) {\r\n\t\t\treturn new SingletonPredictionContext(parentsList[0], returnStatesList[0]);\r\n\t\t} else {\r\n\t\t\treturn new ArrayPredictionContext(parentsList, returnStatesList);\r\n\t\t}\r\n\t}\r\n\r\n\tpublic static isEmptyLocal(context: PredictionContext): boolean {\r\n\t\treturn context === PredictionContext.EMPTY_LOCAL;\r\n\t}\r\n\r\n\tpublic static getCachedContext(\r\n\t\t@NotNull context: PredictionContext,\r\n\t\t@NotNull contextCache: Array2DHashMap,\r\n\t\t@NotNull visited: PredictionContext.IdentityHashMap): PredictionContext {\r\n\t\tif (context.isEmpty) {\r\n\t\t\treturn context;\r\n\t\t}\r\n\r\n\t\tlet existing = visited.get(context);\r\n\t\tif (existing) {\r\n\t\t\treturn existing;\r\n\t\t}\r\n\r\n\t\texisting = contextCache.get(context);\r\n\t\tif (existing) {\r\n\t\t\tvisited.put(context, existing);\r\n\t\t\treturn existing;\r\n\t\t}\r\n\r\n\t\tlet changed: boolean = false;\r\n\t\tlet parents: PredictionContext[] = new Array(context.size);\r\n\t\tfor (let i = 0; i < parents.length; i++) {\r\n\t\t\tlet parent: PredictionContext = PredictionContext.getCachedContext(context.getParent(i), contextCache, visited);\r\n\t\t\tif (changed || parent !== context.getParent(i)) {\r\n\t\t\t\tif (!changed) {\r\n\t\t\t\t\tparents = new Array(context.size);\r\n\t\t\t\t\tfor (let j = 0; j < context.size; j++) {\r\n\t\t\t\t\t\tparents[j] = context.getParent(j);\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tchanged = true;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tparents[i] = parent;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (!changed) {\r\n\t\t\texisting = contextCache.putIfAbsent(context, context);\r\n\t\t\tvisited.put(context, existing != null ? existing : context);\r\n\t\t\treturn context;\r\n\t\t}\r\n\r\n\t\t// We know parents.length>0 because context.isEmpty is checked at the beginning of the method.\r\n\t\tlet updated: PredictionContext;\r\n\t\tif (parents.length === 1) {\r\n\t\t\tupdated = new SingletonPredictionContext(parents[0], context.getReturnState(0));\r\n\t\t} else {\r\n\t\t\tlet returnStates: number[] = new Array(context.size);\r\n\t\t\tfor (let i = 0; i < context.size; i++) {\r\n\t\t\t\treturnStates[i] = context.getReturnState(i);\r\n\t\t\t}\r\n\r\n\t\t\tupdated = new ArrayPredictionContext(parents, returnStates, context.hashCode());\r\n\t\t}\r\n\r\n\t\texisting = contextCache.putIfAbsent(updated, updated);\r\n\t\tvisited.put(updated, existing || updated);\r\n\t\tvisited.put(context, existing || updated);\r\n\r\n\t\treturn updated;\r\n\t}\r\n\r\n\tpublic appendSingleContext(returnContext: number, contextCache: PredictionContextCache): PredictionContext {\r\n\t\treturn this.appendContext(PredictionContext.EMPTY_FULL.getChild(returnContext), contextCache);\r\n\t}\r\n\r\n\tpublic abstract appendContext(suffix: PredictionContext, contextCache: PredictionContextCache): PredictionContext;\r\n\r\n\tpublic getChild(returnState: number): PredictionContext {\r\n\t\treturn new SingletonPredictionContext(this, returnState);\r\n\t}\r\n\r\n\tpublic abstract readonly isEmpty: boolean;\r\n\r\n\tpublic abstract readonly hasEmpty: boolean;\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\treturn this.cachedHashCode;\r\n\t}\r\n\r\n\t// @Override\r\n\tpublic abstract equals(o: any): boolean;\r\n\r\n\tpublic toStrings(recognizer: Recognizer | undefined, currentState: number, stop: PredictionContext = PredictionContext.EMPTY_FULL): string[] {\r\n\t\tlet result: string[] = [];\r\n\r\n\t\touter:\r\n\t\tfor (let perm = 0; ; perm++) {\r\n\t\t\tlet offset: number = 0;\r\n\t\t\tlet last: boolean = true;\r\n\t\t\tlet p: PredictionContext = this;\r\n\t\t\tlet stateNumber: number = currentState;\r\n\t\t\tlet localBuffer: string = \"\";\r\n\t\t\tlocalBuffer += \"[\";\r\n\t\t\twhile (!p.isEmpty && p !== stop) {\r\n\t\t\t\tlet index: number = 0;\r\n\t\t\t\tif (p.size > 0) {\r\n\t\t\t\t\tlet bits: number = 1;\r\n\t\t\t\t\twhile (((1 << bits) >>> 0) < p.size) {\r\n\t\t\t\t\t\tbits++;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tlet mask: number = ((1 << bits) >>> 0) - 1;\r\n\t\t\t\t\tindex = (perm >> offset) & mask;\r\n\t\t\t\t\tlast = last && index >= p.size - 1;\r\n\t\t\t\t\tif (index >= p.size) {\r\n\t\t\t\t\t\tcontinue outer;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\toffset += bits;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (recognizer) {\r\n\t\t\t\t\tif (localBuffer.length > 1) {\r\n\t\t\t\t\t\t// first char is '[', if more than that this isn't the first rule\r\n\t\t\t\t\t\tlocalBuffer += \" \";\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tlet atn: ATN = recognizer.atn;\r\n\t\t\t\t\tlet s: ATNState = atn.states[stateNumber];\r\n\t\t\t\t\tlet ruleName: string = recognizer.ruleNames[s.ruleIndex];\r\n\t\t\t\t\tlocalBuffer += ruleName;\r\n\t\t\t\t} else if (p.getReturnState(index) !== PredictionContext.EMPTY_FULL_STATE_KEY) {\r\n\t\t\t\t\tif (!p.isEmpty) {\r\n\t\t\t\t\t\tif (localBuffer.length > 1) {\r\n\t\t\t\t\t\t\t// first char is '[', if more than that this isn't the first rule\r\n\t\t\t\t\t\t\tlocalBuffer += \" \";\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tlocalBuffer += p.getReturnState(index);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tstateNumber = p.getReturnState(index);\r\n\t\t\t\tp = p.getParent(index);\r\n\t\t\t}\r\n\r\n\t\t\tlocalBuffer += \"]\";\r\n\t\t\tresult.push(localBuffer);\r\n\r\n\t\t\tif (last) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n}\r\n\r\nclass EmptyPredictionContext extends PredictionContext {\r\n\tprivate fullContext: boolean;\r\n\r\n\tconstructor(fullContext: boolean) {\r\n\t\tsuper(PredictionContext.calculateEmptyHashCode());\r\n\t\tthis.fullContext = fullContext;\r\n\t}\r\n\r\n\tget isFullContext(): boolean {\r\n\t\treturn this.fullContext;\r\n\t}\r\n\r\n\t@Override\r\n\tprotected addEmptyContext(): PredictionContext {\r\n\t\treturn this;\r\n\t}\r\n\r\n\t@Override\r\n\tprotected removeEmptyContext(): PredictionContext {\r\n\t\tthrow new Error(\"Cannot remove the empty context from itself.\");\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getParent(index: number): PredictionContext {\r\n\t\tthrow new Error(\"index out of bounds\");\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getReturnState(index: number): number {\r\n\t\tthrow new Error(\"index out of bounds\");\r\n\t}\r\n\r\n\t@Override\r\n\tpublic findReturnState(returnState: number): number {\r\n\t\treturn -1;\r\n\t}\r\n\r\n\t@Override\r\n\tget size(): number {\r\n\t\treturn 0;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic appendSingleContext(returnContext: number, contextCache: PredictionContextCache): PredictionContext {\r\n\t\treturn contextCache.getChild(this, returnContext);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic appendContext(suffix: PredictionContext, contextCache: PredictionContextCache): PredictionContext {\r\n\t\treturn suffix;\r\n\t}\r\n\r\n\t@Override\r\n\tget isEmpty(): boolean {\r\n\t\treturn true;\r\n\t}\r\n\r\n\t@Override\r\n\tget hasEmpty(): boolean {\r\n\t\treturn true;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(o: any): boolean {\r\n\t\treturn this === o;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toStrings(recognizer: any, currentState: number, stop?: PredictionContext): string[] {\r\n\t\treturn [\"[]\"];\r\n\t}\r\n\r\n}\r\n\r\nclass ArrayPredictionContext extends PredictionContext {\r\n\t@NotNull\r\n\tpublic parents: PredictionContext[];\r\n\r\n\t@NotNull\r\n\tpublic returnStates: number[];\r\n\r\n\tconstructor( @NotNull parents: PredictionContext[], returnStates: number[], hashCode?: number) {\r\n\t\tsuper(hashCode || PredictionContext.calculateHashCode(parents, returnStates));\r\n\t\tassert(parents.length === returnStates.length);\r\n\t\tassert(returnStates.length > 1 || returnStates[0] !== PredictionContext.EMPTY_FULL_STATE_KEY, \"Should be using PredictionContext.EMPTY instead.\");\r\n\r\n\t\tthis.parents = parents;\r\n\t\tthis.returnStates = returnStates;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getParent(index: number): PredictionContext {\r\n\t\treturn this.parents[index];\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getReturnState(index: number): number {\r\n\t\treturn this.returnStates[index];\r\n\t}\r\n\r\n\t@Override\r\n\tpublic findReturnState(returnState: number): number {\r\n\t\treturn Arrays.binarySearch(this.returnStates, returnState);\r\n\t}\r\n\r\n\t@Override\r\n\tget size(): number {\r\n\t\treturn this.returnStates.length;\r\n\t}\r\n\r\n\t@Override\r\n\tget isEmpty(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t@Override\r\n\tget hasEmpty(): boolean {\r\n\t\treturn this.returnStates[this.returnStates.length - 1] === PredictionContext.EMPTY_FULL_STATE_KEY;\r\n\t}\r\n\r\n\t@Override\r\n\tprotected addEmptyContext(): PredictionContext {\r\n\t\tif (this.hasEmpty) {\r\n\t\t\treturn this;\r\n\t\t}\r\n\r\n\t\tlet parents2: PredictionContext[] = this.parents.slice(0);\r\n\t\tlet returnStates2: number[] = this.returnStates.slice(0);\r\n\t\tparents2.push(PredictionContext.EMPTY_FULL);\r\n\t\treturnStates2.push(PredictionContext.EMPTY_FULL_STATE_KEY);\r\n\t\treturn new ArrayPredictionContext(parents2, returnStates2);\r\n\t}\r\n\r\n\t@Override\r\n\tprotected removeEmptyContext(): PredictionContext {\r\n\t\tif (!this.hasEmpty) {\r\n\t\t\treturn this;\r\n\t\t}\r\n\r\n\t\tif (this.returnStates.length === 2) {\r\n\t\t\treturn new SingletonPredictionContext(this.parents[0], this.returnStates[0]);\r\n\t\t} else {\r\n\t\t\tlet parents2: PredictionContext[] = this.parents.slice(0, this.parents.length - 1);\r\n\t\t\tlet returnStates2: number[] = this.returnStates.slice(0, this.returnStates.length - 1);\r\n\t\t\treturn new ArrayPredictionContext(parents2, returnStates2);\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\tpublic appendContext(suffix: PredictionContext, contextCache: PredictionContextCache): PredictionContext {\r\n\t\treturn ArrayPredictionContext.appendContextImpl(this, suffix, new PredictionContext.IdentityHashMap());\r\n\t}\r\n\r\n\tprivate static appendContextImpl(context: PredictionContext, suffix: PredictionContext, visited: PredictionContext.IdentityHashMap): PredictionContext {\r\n\t\tif (suffix.isEmpty) {\r\n\t\t\tif (PredictionContext.isEmptyLocal(suffix)) {\r\n\t\t\t\tif (context.hasEmpty) {\r\n\t\t\t\t\treturn PredictionContext.EMPTY_LOCAL;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tthrow new Error(\"what to do here?\");\r\n\t\t\t}\r\n\r\n\t\t\treturn context;\r\n\t\t}\r\n\r\n\t\tif (suffix.size !== 1) {\r\n\t\t\tthrow new Error(\"Appending a tree suffix is not yet supported.\");\r\n\t\t}\r\n\r\n\t\tlet result = visited.get(context);\r\n\t\tif (!result) {\r\n\t\t\tif (context.isEmpty) {\r\n\t\t\t\tresult = suffix;\r\n\t\t\t} else {\r\n\t\t\t\tlet parentCount: number = context.size;\r\n\t\t\t\tif (context.hasEmpty) {\r\n\t\t\t\t\tparentCount--;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet updatedParents: PredictionContext[] = new Array(parentCount);\r\n\t\t\t\tlet updatedReturnStates: number[] = new Array(parentCount);\r\n\t\t\t\tfor (let i = 0; i < parentCount; i++) {\r\n\t\t\t\t\tupdatedReturnStates[i] = context.getReturnState(i);\r\n\t\t\t\t}\r\n\r\n\t\t\t\tfor (let i = 0; i < parentCount; i++) {\r\n\t\t\t\t\tupdatedParents[i] = ArrayPredictionContext.appendContextImpl(context.getParent(i), suffix, visited);\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (updatedParents.length === 1) {\r\n\t\t\t\t\tresult = new SingletonPredictionContext(updatedParents[0], updatedReturnStates[0]);\r\n\t\t\t\t} else {\r\n\t\t\t\t\tassert(updatedParents.length > 1);\r\n\t\t\t\t\tresult = new ArrayPredictionContext(updatedParents, updatedReturnStates);\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (context.hasEmpty) {\r\n\t\t\t\t\tresult = PredictionContext.join(result, suffix);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tvisited.put(context, result);\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(o: any): boolean {\r\n\t\tif (this === o) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(o instanceof ArrayPredictionContext)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tif (this.hashCode() !== o.hashCode()) {\r\n\t\t\t// can't be same if hash is different\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tlet other: ArrayPredictionContext = o;\r\n\t\treturn this.equalsImpl(other, new Array2DHashSet());\r\n\t}\r\n\r\n\tprivate equalsImpl(other: ArrayPredictionContext, visited: JavaSet): boolean {\r\n\t\tlet selfWorkList: PredictionContext[] = [];\r\n\t\tlet otherWorkList: PredictionContext[] = [];\r\n\t\tselfWorkList.push(this);\r\n\t\totherWorkList.push(other);\r\n\t\twhile (true) {\r\n\t\t\tlet currentSelf = selfWorkList.pop();\r\n\t\t\tlet currentOther = otherWorkList.pop();\r\n\t\t\tif (!currentSelf || !currentOther) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\tlet operands: PredictionContextCache.IdentityCommutativePredictionContextOperands = new PredictionContextCache.IdentityCommutativePredictionContextOperands(currentSelf, currentOther);\r\n\t\t\tif (!visited.add(operands)) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tlet selfSize: number = operands.x.size;\r\n\t\t\tif (selfSize === 0) {\r\n\t\t\t\tif (!operands.x.equals(operands.y)) {\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tlet otherSize: number = operands.y.size;\r\n\t\t\tif (selfSize !== otherSize) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\r\n\t\t\tfor (let i = 0; i < selfSize; i++) {\r\n\t\t\t\tif (operands.x.getReturnState(i) !== operands.y.getReturnState(i)) {\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet selfParent: PredictionContext = operands.x.getParent(i);\r\n\t\t\t\tlet otherParent: PredictionContext = operands.y.getParent(i);\r\n\t\t\t\tif (selfParent.hashCode() !== otherParent.hashCode()) {\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (selfParent !== otherParent) {\r\n\t\t\t\t\tselfWorkList.push(selfParent);\r\n\t\t\t\t\totherWorkList.push(otherParent);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn true;\r\n\t}\r\n}\r\n\r\nexport class SingletonPredictionContext extends PredictionContext {\r\n\r\n\t@NotNull\r\n\tpublic parent: PredictionContext;\r\n\tpublic returnState: number;\r\n\r\n\tconstructor(@NotNull parent: PredictionContext, returnState: number) {\r\n\t\tsuper(PredictionContext.calculateSingleHashCode(parent, returnState));\r\n\t\t// assert(returnState != PredictionContext.EMPTY_FULL_STATE_KEY && returnState != PredictionContext.EMPTY_LOCAL_STATE_KEY);\r\n\t\tthis.parent = parent;\r\n\t\tthis.returnState = returnState;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getParent(index: number): PredictionContext {\r\n\t\t// assert(index == 0);\r\n\t\treturn this.parent;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getReturnState(index: number): number {\r\n\t\t// assert(index == 0);\r\n\t\treturn this.returnState;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic findReturnState(returnState: number): number {\r\n\t\treturn this.returnState === returnState ? 0 : -1;\r\n\t}\r\n\r\n\t@Override\r\n\tget size(): number {\r\n\t\treturn 1;\r\n\t}\r\n\r\n\t@Override\r\n\tget isEmpty(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t@Override\r\n\tget hasEmpty(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic appendContext(suffix: PredictionContext, contextCache: PredictionContextCache): PredictionContext {\r\n\t\treturn contextCache.getChild(this.parent.appendContext(suffix, contextCache), this.returnState);\r\n\t}\r\n\r\n\t@Override\r\n\tprotected addEmptyContext(): PredictionContext {\r\n\t\tlet parents: PredictionContext[] = [this.parent, PredictionContext.EMPTY_FULL];\r\n\t\tlet returnStates: number[] = [this.returnState, PredictionContext.EMPTY_FULL_STATE_KEY];\r\n\t\treturn new ArrayPredictionContext(parents, returnStates);\r\n\t}\r\n\r\n\t@Override\r\n\tprotected removeEmptyContext(): PredictionContext {\r\n\t\treturn this;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(o: any): boolean {\r\n\t\tif (o === this) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(o instanceof SingletonPredictionContext)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tlet other: SingletonPredictionContext = o;\r\n\t\tif (this.hashCode() !== other.hashCode()) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this.returnState === other.returnState\r\n\t\t\t&& this.parent.equals(other.parent);\r\n\t}\r\n}\r\n\r\nexport namespace PredictionContext {\r\n\texport const EMPTY_LOCAL: PredictionContext = new EmptyPredictionContext(false);\r\n\texport const EMPTY_FULL: PredictionContext = new EmptyPredictionContext(true);\r\n\texport const EMPTY_LOCAL_STATE_KEY: number = -((1 << 31) >>> 0);\r\n\texport const EMPTY_FULL_STATE_KEY: number = ((1 << 31) >>> 0) - 1;\r\n\r\n\texport class IdentityHashMap extends Array2DHashMap {\r\n\t\tconstructor() {\r\n\t\t\tsuper(IdentityEqualityComparator.INSTANCE);\r\n\t\t}\r\n\t}\r\n\r\n\texport class IdentityEqualityComparator implements EqualityComparator {\r\n\t\tpublic static readonly INSTANCE: IdentityEqualityComparator = new IdentityEqualityComparator();\r\n\r\n\t\tprivate IdentityEqualityComparator() {\r\n\t\t\t// intentionally empty\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic hashCode(obj: PredictionContext): number {\r\n\t\t\treturn obj.hashCode();\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic equals(a: PredictionContext, b: PredictionContext): boolean {\r\n\t\t\treturn a === b;\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:25.2796692-07:00\r\n\r\nimport { Array2DHashMap } from \"../misc/Array2DHashMap\";\r\nimport { ATNState } from \"./ATNState\";\r\nimport { DecisionState } from \"./DecisionState\";\r\nimport { Equatable } from \"../misc/Stubs\";\r\nimport { LexerActionExecutor } from \"./LexerActionExecutor\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { ObjectEqualityComparator } from \"../misc/ObjectEqualityComparator\";\r\nimport { PredictionContext } from \"./PredictionContext\";\r\nimport { PredictionContextCache } from \"./PredictionContextCache\";\r\nimport { Recognizer } from \"../Recognizer\";\r\nimport { SemanticContext } from \"./SemanticContext\";\r\n\r\nimport * as assert from \"assert\";\r\n\r\n/**\r\n * This field stores the bit mask for implementing the\r\n * {@link #isPrecedenceFilterSuppressed} property as a bit within the\r\n * existing {@link #altAndOuterContextDepth} field.\r\n */\r\nconst SUPPRESS_PRECEDENCE_FILTER: number = 0x80000000;\r\n\r\n/**\r\n * Represents a location with context in an ATN. The location is identified by the following values:\r\n *\r\n * * The current ATN state\r\n * * The predicted alternative\r\n * * The semantic context which must be true for this configuration to be enabled\r\n * * The syntactic context, which is represented as a graph-structured stack whose path(s) lead to the root of the rule\r\n * invocations leading to this state\r\n *\r\n * In addition to these values, `ATNConfig` stores several properties about paths taken to get to the location which\r\n * were added over time to help with performance, correctness, and/or debugging.\r\n *\r\n * * `reachesIntoOuterContext`:: Used to ensure semantic predicates are not evaluated in the wrong context.\r\n * * `hasPassedThroughNonGreedyDecision`: Used for enabling first-match-wins instead of longest-match-wins after\r\n * crossing a non-greedy decision.\r\n * * `lexerActionExecutor`: Used for tracking the lexer action(s) to execute should this instance be selected during\r\n * lexing.\r\n * * `isPrecedenceFilterSuppressed`: A state variable for one of the dynamic disambiguation strategies employed by\r\n * `ParserATNSimulator.applyPrecedenceFilter`.\r\n *\r\n * Due to the use of a graph-structured stack, a single `ATNConfig` is capable of representing many individual ATN\r\n * configurations which reached the same location in an ATN by following different paths.\r\n *\r\n * PERF: To conserve memory, `ATNConfig` is split into several different concrete types. `ATNConfig` itself stores the\r\n * minimum amount of information typically used to define an `ATNConfig` instance. Various derived types provide\r\n * additional storage space for cases where a non-default value is used for some of the object properties. The\r\n * `ATNConfig.create` and `ATNConfig.transform` methods automatically select the smallest concrete type capable of\r\n * representing the unique information for any given `ATNConfig`.\r\n */\r\nexport class ATNConfig implements Equatable {\r\n\t/** The ATN state associated with this configuration */\r\n\t@NotNull\r\n\tprivate _state: ATNState;\r\n\r\n\t/**\r\n\t * This is a bit-field currently containing the following values.\r\n\t *\r\n\t * * 0x00FFFFFF: Alternative\r\n\t * * 0x7F000000: Outer context depth\r\n\t * * 0x80000000: Suppress precedence filter\r\n\t */\r\n\tprivate altAndOuterContextDepth: number;\r\n\r\n\t/** The stack of invoking states leading to the rule/states associated\r\n\t * with this config. We track only those contexts pushed during\r\n\t * execution of the ATN simulator.\r\n\t */\r\n\t@NotNull\r\n\tprivate _context: PredictionContext;\r\n\r\n\tconstructor(/*@NotNull*/ state: ATNState, alt: number, /*@NotNull*/ context: PredictionContext);\r\n\tconstructor(/*@NotNull*/ state: ATNState, /*@NotNull*/ c: ATNConfig, /*@NotNull*/ context: PredictionContext);\r\n\r\n\tconstructor(@NotNull state: ATNState, altOrConfig: number | ATNConfig, @NotNull context: PredictionContext) {\r\n\t\tif (typeof altOrConfig === \"number\") {\r\n\t\t\tassert((altOrConfig & 0xFFFFFF) === altOrConfig);\r\n\t\t\tthis._state = state;\r\n\t\t\tthis.altAndOuterContextDepth = altOrConfig;\r\n\t\t\tthis._context = context;\r\n\t\t} else {\r\n\t\t\tthis._state = state;\r\n\t\t\tthis.altAndOuterContextDepth = altOrConfig.altAndOuterContextDepth;\r\n\t\t\tthis._context = context;\r\n\t\t}\r\n\t}\r\n\r\n\tpublic static create(/*@NotNull*/ state: ATNState, alt: number, context: PredictionContext): ATNConfig;\r\n\r\n\tpublic static create(/*@NotNull*/ state: ATNState, alt: number, context: PredictionContext, /*@NotNull*/ semanticContext: SemanticContext): ATNConfig;\r\n\r\n\tpublic static create(/*@NotNull*/ state: ATNState, alt: number, context: PredictionContext, /*@*/ semanticContext: SemanticContext, lexerActionExecutor: LexerActionExecutor | undefined): ATNConfig;\r\n\r\n\tpublic static create(@NotNull state: ATNState, alt: number, context: PredictionContext, @NotNull semanticContext: SemanticContext = SemanticContext.NONE, lexerActionExecutor?: LexerActionExecutor): ATNConfig {\r\n\t\tif (semanticContext !== SemanticContext.NONE) {\r\n\t\t\tif (lexerActionExecutor != null) {\r\n\t\t\t\treturn new ActionSemanticContextATNConfig(lexerActionExecutor, semanticContext, state, alt, context, false);\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\treturn new SemanticContextATNConfig(semanticContext, state, alt, context);\r\n\t\t\t}\r\n\t\t}\r\n\t\telse if (lexerActionExecutor != null) {\r\n\t\t\treturn new ActionATNConfig(lexerActionExecutor, state, alt, context, false);\r\n\t\t}\r\n\t\telse {\r\n\t\t\treturn new ATNConfig(state, alt, context);\r\n\t\t}\r\n\t}\r\n\r\n\t/** Gets the ATN state associated with this configuration */\r\n\t@NotNull\r\n\tget state(): ATNState {\r\n\t\treturn this._state;\r\n\t}\r\n\r\n\t/** What alt (or lexer rule) is predicted by this configuration */\r\n\tget alt(): number {\r\n\t\treturn this.altAndOuterContextDepth & 0x00FFFFFF;\r\n\t}\r\n\r\n\t@NotNull\r\n\tget context(): PredictionContext {\r\n\t\treturn this._context;\r\n\t}\r\n\r\n\tset context(@NotNull context: PredictionContext) {\r\n\t\tthis._context = context;\r\n\t}\r\n\r\n\tget reachesIntoOuterContext(): boolean {\r\n\t\treturn this.outerContextDepth !== 0;\r\n\t}\r\n\r\n\t/**\r\n\t * We cannot execute predicates dependent upon local context unless\r\n\t * we know for sure we are in the correct context. Because there is\r\n\t * no way to do this efficiently, we simply cannot evaluate\r\n\t * dependent predicates unless we are in the rule that initially\r\n\t * invokes the ATN simulator.\r\n\t *\r\n\t * closure() tracks the depth of how far we dip into the outer context:\r\n\t * depth > 0. Note that it may not be totally accurate depth since I\r\n\t * don't ever decrement. TODO: make it a boolean then\r\n\t */\r\n\tget outerContextDepth(): number {\r\n\t\treturn (this.altAndOuterContextDepth >>> 24) & 0x7F;\r\n\t}\r\n\r\n\tset outerContextDepth(outerContextDepth: number) {\r\n\t\tassert(outerContextDepth >= 0);\r\n\t\t// saturate at 0x7F - everything but zero/positive is only used for debug information anyway\r\n\t\touterContextDepth = Math.min(outerContextDepth, 0x7F);\r\n\t\tthis.altAndOuterContextDepth = ((outerContextDepth << 24) | (this.altAndOuterContextDepth & ~0x7F000000) >>> 0);\r\n\t}\r\n\r\n\tget lexerActionExecutor(): LexerActionExecutor | undefined {\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\t@NotNull\r\n\tget semanticContext(): SemanticContext {\r\n\t\treturn SemanticContext.NONE;\r\n\t}\r\n\r\n\tget hasPassedThroughNonGreedyDecision(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic clone(): ATNConfig {\r\n\t\treturn this.transform(this.state, false);\r\n\t}\r\n\r\n\tpublic transform(/*@NotNull*/ state: ATNState, checkNonGreedy: boolean): ATNConfig;\r\n\tpublic transform(/*@NotNull*/ state: ATNState, checkNonGreedy: boolean, /*@NotNull*/ semanticContext: SemanticContext): ATNConfig;\r\n\tpublic transform(/*@NotNull*/ state: ATNState, checkNonGreedy: boolean, context: PredictionContext): ATNConfig;\r\n\tpublic transform(/*@NotNull*/ state: ATNState, checkNonGreedy: boolean, lexerActionExecutor: LexerActionExecutor): ATNConfig;\r\n\tpublic transform(/*@NotNull*/ state: ATNState, checkNonGreedy: boolean, arg2?: SemanticContext | PredictionContext | LexerActionExecutor): ATNConfig {\r\n\t\tif (arg2 == null) {\r\n\t\t\treturn this.transformImpl(state, this._context, this.semanticContext, checkNonGreedy, this.lexerActionExecutor);\r\n\t\t} else if (arg2 instanceof PredictionContext) {\r\n\t\t\treturn this.transformImpl(state, arg2, this.semanticContext, checkNonGreedy, this.lexerActionExecutor);\r\n\t\t} else if (arg2 instanceof SemanticContext) {\r\n\t\t\treturn this.transformImpl(state, this._context, arg2, checkNonGreedy, this.lexerActionExecutor);\r\n\t\t} else {\r\n\t\t\treturn this.transformImpl(state, this._context, this.semanticContext, checkNonGreedy, arg2);\r\n\t\t}\r\n\t}\r\n\r\n\tprivate transformImpl(@NotNull state: ATNState, context: PredictionContext, @NotNull semanticContext: SemanticContext, checkNonGreedy: boolean, lexerActionExecutor: LexerActionExecutor | undefined): ATNConfig {\r\n\t\tlet passedThroughNonGreedy: boolean = checkNonGreedy && ATNConfig.checkNonGreedyDecision(this, state);\r\n\t\tif (semanticContext !== SemanticContext.NONE) {\r\n\t\t\tif (lexerActionExecutor != null || passedThroughNonGreedy) {\r\n\t\t\t\treturn new ActionSemanticContextATNConfig(lexerActionExecutor, semanticContext, state, this, context, passedThroughNonGreedy);\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\treturn new SemanticContextATNConfig(semanticContext, state, this, context);\r\n\t\t\t}\r\n\t\t}\r\n\t\telse if (lexerActionExecutor != null || passedThroughNonGreedy) {\r\n\t\t\treturn new ActionATNConfig(lexerActionExecutor, state, this, context, passedThroughNonGreedy);\r\n\t\t}\r\n\t\telse {\r\n\t\t\treturn new ATNConfig(state, this, context);\r\n\t\t}\r\n\t}\r\n\r\n\tprivate static checkNonGreedyDecision(source: ATNConfig, target: ATNState): boolean {\r\n\t\treturn source.hasPassedThroughNonGreedyDecision\r\n\t\t\t|| target instanceof DecisionState && target.nonGreedy;\r\n\t}\r\n\r\n\tpublic appendContext(context: number, contextCache: PredictionContextCache): ATNConfig;\r\n\tpublic appendContext(context: PredictionContext, contextCache: PredictionContextCache): ATNConfig;\r\n\tpublic appendContext(context: number | PredictionContext, contextCache: PredictionContextCache): ATNConfig {\r\n\t\tif (typeof context === \"number\") {\r\n\t\t\tlet appendedContext: PredictionContext = this.context.appendSingleContext(context, contextCache);\r\n\t\t\tlet result: ATNConfig = this.transform(this.state, false, appendedContext);\r\n\t\t\treturn result;\r\n\t\t} else {\r\n\t\t\tlet appendedContext: PredictionContext = this.context.appendContext(context, contextCache);\r\n\t\t\tlet result: ATNConfig = this.transform(this.state, false, appendedContext);\r\n\t\t\treturn result;\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Determines if this `ATNConfig` fully contains another `ATNConfig`.\r\n\t *\r\n\t * An ATN configuration represents a position (including context) in an ATN during parsing. Since `ATNConfig` stores\r\n\t * the context as a graph, a single `ATNConfig` instance is capable of representing many ATN configurations which\r\n\t * are all in the same \"location\" but have different contexts. These `ATNConfig` instances are again merged when\r\n\t * they are added to an `ATNConfigSet`. This method supports `ATNConfigSet.contains` by evaluating whether a\r\n\t * particular `ATNConfig` contains all of the ATN configurations represented by another `ATNConfig`.\r\n\t *\r\n\t * An `ATNConfig` _a_ contains another `ATNConfig` _b_ if all of the following conditions are met:\r\n\t *\r\n\t * * The configurations are in the same state (`state`)\r\n\t * * The configurations predict the same alternative (`alt`)\r\n\t * * The semantic context of _a_ implies the semantic context of _b_ (this method performs a weaker equality check)\r\n\t * * Joining the prediction contexts of _a_ and _b_ results in the prediction context of _a_\r\n\t *\r\n\t * This method implements a conservative approximation of containment. As a result, when this method returns `true`\r\n\t * it is known that parsing from `subconfig` can only recognize a subset of the inputs which can be recognized\r\n\t * starting at the current `ATNConfig`. However, due to the imprecise evaluation of implication for the semantic\r\n\t * contexts, no assumptions can be made about the relationship between the configurations when this method returns\r\n\t * `false`.\r\n\t *\r\n\t * @param subconfig The sub configuration.\r\n\t * @returns `true` if this configuration contains `subconfig`; otherwise, `false`.\r\n\t */\r\n\tpublic contains(subconfig: ATNConfig): boolean {\r\n\t\tif (this.state.stateNumber !== subconfig.state.stateNumber\r\n\t\t\t|| this.alt !== subconfig.alt\r\n\t\t\t|| !this.semanticContext.equals(subconfig.semanticContext)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tlet leftWorkList: PredictionContext[] = [];\r\n\t\tlet rightWorkList: PredictionContext[] = [];\r\n\t\tleftWorkList.push(this.context);\r\n\t\trightWorkList.push(subconfig.context);\r\n\t\twhile (true) {\r\n\t\t\tlet left = leftWorkList.pop();\r\n\t\t\tlet right = rightWorkList.pop();\r\n\t\t\tif (!left || !right) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\tif (left === right) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\r\n\t\t\tif (left.size < right.size) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\r\n\t\t\tif (right.isEmpty) {\r\n\t\t\t\treturn left.hasEmpty;\r\n\t\t\t} else {\r\n\t\t\t\tfor (let i = 0; i < right.size; i++) {\r\n\t\t\t\t\tlet index: number = left.findReturnState(right.getReturnState(i));\r\n\t\t\t\t\tif (index < 0) {\r\n\t\t\t\t\t\t// assumes invokingStates has no duplicate entries\r\n\t\t\t\t\t\treturn false;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tleftWorkList.push(left.getParent(index));\r\n\t\t\t\t\trightWorkList.push(right.getParent(i));\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn false;\r\n\t}\r\n\r\n\tget isPrecedenceFilterSuppressed(): boolean {\r\n\t\treturn (this.altAndOuterContextDepth & SUPPRESS_PRECEDENCE_FILTER) !== 0;\r\n\t}\r\n\r\n\tset isPrecedenceFilterSuppressed(value: boolean) {\r\n\t\tif (value) {\r\n\t\t\tthis.altAndOuterContextDepth |= SUPPRESS_PRECEDENCE_FILTER;\r\n\t\t}\r\n\t\telse {\r\n\t\t\tthis.altAndOuterContextDepth &= ~SUPPRESS_PRECEDENCE_FILTER;\r\n\t\t}\r\n\t}\r\n\r\n\t/** An ATN configuration is equal to another if both have\r\n\t * the same state, they predict the same alternative, and\r\n\t * syntactic/semantic contexts are the same.\r\n\t */\r\n\t@Override\r\n\tpublic equals(o: any): boolean {\r\n\t\tif (this === o) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(o instanceof ATNConfig)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this.state.stateNumber === o.state.stateNumber\r\n\t\t\t&& this.alt === o.alt\r\n\t\t\t&& this.reachesIntoOuterContext === o.reachesIntoOuterContext\r\n\t\t\t&& this.context.equals(o.context)\r\n\t\t\t&& this.semanticContext.equals(o.semanticContext)\r\n\t\t\t&& this.isPrecedenceFilterSuppressed === o.isPrecedenceFilterSuppressed\r\n\t\t\t&& this.hasPassedThroughNonGreedyDecision === o.hasPassedThroughNonGreedyDecision\r\n\t\t\t&& ObjectEqualityComparator.INSTANCE.equals(this.lexerActionExecutor, o.lexerActionExecutor);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hashCode: number = MurmurHash.initialize(7);\r\n\t\thashCode = MurmurHash.update(hashCode, this.state.stateNumber);\r\n\t\thashCode = MurmurHash.update(hashCode, this.alt);\r\n\t\thashCode = MurmurHash.update(hashCode, this.reachesIntoOuterContext ? 1 : 0);\r\n\t\thashCode = MurmurHash.update(hashCode, this.context);\r\n\t\thashCode = MurmurHash.update(hashCode, this.semanticContext);\r\n\t\thashCode = MurmurHash.update(hashCode, this.hasPassedThroughNonGreedyDecision ? 1 : 0);\r\n\t\thashCode = MurmurHash.update(hashCode, this.lexerActionExecutor);\r\n\t\thashCode = MurmurHash.finish(hashCode, 7);\r\n\t\treturn hashCode;\r\n\t}\r\n\r\n\t/**\r\n\t * Returns a graphical representation of the current `ATNConfig` in Graphviz format. The graph can be stored to a\r\n\t * **.dot** file and then rendered to an image using Graphviz.\r\n\t *\r\n\t * @returns A Graphviz graph representing the current `ATNConfig`.\r\n\t *\r\n\t * @see http://www.graphviz.org/\r\n\t */\r\n\tpublic toDotString(): string {\r\n\t\tlet builder = \"\";\r\n\t\tbuilder += (\"digraph G {\\n\");\r\n\t\tbuilder += (\"rankdir=LR;\\n\");\r\n\r\n\t\tlet visited = new Array2DHashMap(PredictionContext.IdentityEqualityComparator.INSTANCE);\r\n\t\tlet workList: PredictionContext[] = [];\r\n\t\tfunction getOrAddContext(context: PredictionContext): number {\r\n\t\t\tlet newNumber = visited.size;\r\n\t\t\tlet result = visited.putIfAbsent(context, newNumber);\r\n\t\t\tif (result != null) {\r\n\t\t\t\t// Already saw this context\r\n\t\t\t\treturn result;\r\n\t\t\t}\r\n\r\n\t\t\tworkList.push(context);\r\n\t\t\treturn newNumber;\r\n\t\t}\r\n\r\n\t\tworkList.push(this.context);\r\n\t\tvisited.put(this.context, 0);\r\n\t\twhile (true) {\r\n\t\t\tlet current = workList.pop();\r\n\t\t\tif (!current) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\tfor (let i = 0; i < current.size; i++) {\r\n\t\t\t\tbuilder += (\" s\") + (getOrAddContext(current));\r\n\t\t\t\tbuilder += (\"->\");\r\n\t\t\t\tbuilder += (\"s\") + (getOrAddContext(current.getParent(i)));\r\n\t\t\t\tbuilder += (\"[label=\\\"\") + (current.getReturnState(i)) + (\"\\\"];\\n\");\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tbuilder += (\"}\\n\");\r\n\t\treturn builder.toString();\r\n\t}\r\n\r\n\tpublic toString(): string;\r\n\tpublic toString(recog: Recognizer | undefined, showAlt: boolean): string;\r\n\tpublic toString(recog: Recognizer | undefined, showAlt: boolean, showContext: boolean): string;\r\n\tpublic toString(recog?: Recognizer, showAlt?: boolean, showContext?: boolean): string {\r\n\t\t// Must check showContext before showAlt to preserve original overload behavior\r\n\t\tif (showContext == null) {\r\n\t\t\tshowContext = showAlt != null;\r\n\t\t}\r\n\r\n\t\tif (showAlt == null) {\r\n\t\t\tshowAlt = true;\r\n\t\t}\r\n\r\n\t\tlet buf = \"\";\r\n\t\t// if (this.state.ruleIndex >= 0) {\r\n\t\t// \tif (recog != null) {\r\n\t\t// \t\tbuf += (recog.ruleNames[this.state.ruleIndex] + \":\");\r\n\t\t// \t} else {\r\n\t\t// \t\tbuf += (this.state.ruleIndex + \":\");\r\n\t\t// \t}\r\n\t\t// }\r\n\t\tlet contexts: string[];\r\n\t\tif (showContext) {\r\n\t\t\tcontexts = this.context.toStrings(recog, this.state.stateNumber);\r\n\t\t}\r\n\t\telse {\r\n\t\t\tcontexts = [\"?\"];\r\n\t\t}\r\n\r\n\t\tlet first: boolean = true;\r\n\t\tfor (let contextDesc of contexts) {\r\n\t\t\tif (first) {\r\n\t\t\t\tfirst = false;\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tbuf += (\", \");\r\n\t\t\t}\r\n\r\n\t\t\tbuf += (\"(\");\r\n\t\t\tbuf += (this.state);\r\n\t\t\tif (showAlt) {\r\n\t\t\t\tbuf += (\",\");\r\n\t\t\t\tbuf += (this.alt);\r\n\t\t\t}\r\n\t\t\tif (this.context) {\r\n\t\t\t\tbuf += (\",\");\r\n\t\t\t\tbuf += (contextDesc);\r\n\t\t\t}\r\n\t\t\tif (this.semanticContext !== SemanticContext.NONE) {\r\n\t\t\t\tbuf += (\",\");\r\n\t\t\t\tbuf += (this.semanticContext);\r\n\t\t\t}\r\n\t\t\tif (this.reachesIntoOuterContext) {\r\n\t\t\t\tbuf += (\",up=\") + (this.outerContextDepth);\r\n\t\t\t}\r\n\t\t\tbuf += (\")\");\r\n\t\t}\r\n\t\treturn buf.toString();\r\n\t}\r\n}\r\n\r\n/**\r\n * This class was derived from `ATNConfig` purely as a memory optimization. It allows for the creation of an `ATNConfig`\r\n * with a non-default semantic context.\r\n *\r\n * See the `ATNConfig` documentation for more information about conserving memory through the use of several concrete\r\n * types.\r\n */\r\nclass SemanticContextATNConfig extends ATNConfig {\r\n\t@NotNull\r\n\tprivate _semanticContext: SemanticContext;\r\n\r\n\tconstructor(semanticContext: SemanticContext, /*@NotNull*/ state: ATNState, alt: number, context: PredictionContext);\r\n\tconstructor(semanticContext: SemanticContext, /*@NotNull*/ state: ATNState, /*@NotNull*/ c: ATNConfig, context: PredictionContext);\r\n\tconstructor(semanticContext: SemanticContext, @NotNull state: ATNState, @NotNull altOrConfig: number | ATNConfig, context: PredictionContext) {\r\n\t\tif (typeof altOrConfig === \"number\") {\r\n\t\t\tsuper(state, altOrConfig, context);\r\n\t\t} else {\r\n\t\t\tsuper(state, altOrConfig, context);\r\n\t\t}\r\n\r\n\t\tthis._semanticContext = semanticContext;\r\n\t}\r\n\r\n\t@Override\r\n\tget semanticContext(): SemanticContext {\r\n\t\treturn this._semanticContext;\r\n\t}\r\n\r\n}\r\n\r\n/**\r\n * This class was derived from `ATNConfig` purely as a memory optimization. It allows for the creation of an `ATNConfig`\r\n * with a lexer action.\r\n *\r\n * See the `ATNConfig` documentation for more information about conserving memory through the use of several concrete\r\n * types.\r\n */\r\nclass ActionATNConfig extends ATNConfig {\r\n\tprivate _lexerActionExecutor?: LexerActionExecutor;\r\n\tprivate passedThroughNonGreedyDecision: boolean;\r\n\r\n\tconstructor(lexerActionExecutor: LexerActionExecutor | undefined, /*@NotNull*/ state: ATNState, alt: number, context: PredictionContext, passedThroughNonGreedyDecision: boolean);\r\n\tconstructor(lexerActionExecutor: LexerActionExecutor | undefined, /*@NotNull*/ state: ATNState, /*@NotNull*/ c: ATNConfig, context: PredictionContext, passedThroughNonGreedyDecision: boolean);\r\n\tconstructor(lexerActionExecutor: LexerActionExecutor | undefined, @NotNull state: ATNState, @NotNull altOrConfig: number | ATNConfig, context: PredictionContext, passedThroughNonGreedyDecision: boolean) {\r\n\t\tif (typeof altOrConfig === \"number\") {\r\n\t\t\tsuper(state, altOrConfig, context);\r\n\t\t} else {\r\n\t\t\tsuper(state, altOrConfig, context);\r\n\t\t\tif (altOrConfig.semanticContext !== SemanticContext.NONE) {\r\n\t\t\t\tthrow new Error(\"Not supported\");\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tthis._lexerActionExecutor = lexerActionExecutor;\r\n\t\tthis.passedThroughNonGreedyDecision = passedThroughNonGreedyDecision;\r\n\t}\r\n\r\n\t@Override\r\n\tget lexerActionExecutor(): LexerActionExecutor | undefined {\r\n\t\treturn this._lexerActionExecutor;\r\n\t}\r\n\r\n\t@Override\r\n\tget hasPassedThroughNonGreedyDecision(): boolean {\r\n\t\treturn this.passedThroughNonGreedyDecision;\r\n\t}\r\n}\r\n\r\n/**\r\n * This class was derived from `SemanticContextATNConfig` purely as a memory optimization. It allows for the creation of\r\n * an `ATNConfig` with both a lexer action and a non-default semantic context.\r\n *\r\n * See the `ATNConfig` documentation for more information about conserving memory through the use of several concrete\r\n * types.\r\n */\r\nclass ActionSemanticContextATNConfig extends SemanticContextATNConfig {\r\n\tprivate _lexerActionExecutor?: LexerActionExecutor;\r\n\tprivate passedThroughNonGreedyDecision: boolean;\r\n\r\n\tconstructor(lexerActionExecutor: LexerActionExecutor | undefined, /*@NotNull*/ semanticContext: SemanticContext, /*@NotNull*/ state: ATNState, alt: number, context: PredictionContext, passedThroughNonGreedyDecision: boolean);\r\n\tconstructor(lexerActionExecutor: LexerActionExecutor | undefined, /*@NotNull*/ semanticContext: SemanticContext, /*@NotNull*/ state: ATNState, /*@NotNull*/ c: ATNConfig, context: PredictionContext, passedThroughNonGreedyDecision: boolean);\r\n\tconstructor(lexerActionExecutor: LexerActionExecutor | undefined, @NotNull semanticContext: SemanticContext, @NotNull state: ATNState, altOrConfig: number | ATNConfig, context: PredictionContext, passedThroughNonGreedyDecision: boolean) {\r\n\t\tif (typeof altOrConfig === \"number\") {\r\n\t\t\tsuper(semanticContext, state, altOrConfig, context);\r\n\t\t} else {\r\n\t\t\tsuper(semanticContext, state, altOrConfig, context);\r\n\t\t}\r\n\r\n\t\tthis._lexerActionExecutor = lexerActionExecutor;\r\n\t\tthis.passedThroughNonGreedyDecision = passedThroughNonGreedyDecision;\r\n\t}\r\n\r\n\t@Override\r\n\tget lexerActionExecutor(): LexerActionExecutor | undefined {\r\n\t\treturn this._lexerActionExecutor;\r\n\t}\r\n\r\n\t@Override\r\n\tget hasPassedThroughNonGreedyDecision(): boolean {\r\n\t\treturn this.passedThroughNonGreedyDecision;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nimport * as assert from \"assert\";\r\nimport * as util from \"util\";\r\nimport { MurmurHash } from \"./MurmurHash\";\r\n\r\n/**\r\n * Private empty array used to construct empty BitSets\r\n */\r\nconst EMPTY_DATA: Uint16Array = new Uint16Array(0);\r\n\r\n/**\r\n * Gets the word index of the `UInt16` element in `BitSet.data` containing the bit with the specified index.\r\n */\r\nfunction getIndex(bitNumber: number) {\r\n\treturn bitNumber >>> 4;\r\n}\r\n\r\n/**\r\n * Convert a word index into the bit index of the LSB of that word\r\n */\r\n\r\nfunction unIndex(n: number) {\r\n\treturn n * 16;\r\n}\r\n\r\n/**\r\n * Get's the bit number of the least signficant bit set LSB which is set in a word non-zero word;\r\n * Bit numbers run from LSB to MSB starting with 0.\r\n */\r\nfunction findLSBSet(word: number) {\r\n\tlet bit = 1;\r\n\tfor (let i = 0; i < 16; i++) {\r\n\t\tif ((word & bit) !== 0) {\r\n\t\t\treturn i;\r\n\t\t}\r\n\t\tbit = (bit << 1) >>> 0;\r\n\t}\r\n\tthrow new RangeError(\"No specified bit found\");\r\n}\r\n\r\nfunction findMSBSet(word: number) {\r\n\tlet bit = (1 << 15) >>> 0;\r\n\tfor (let i = 15; i >= 0; i--) {\r\n\t\tif ((word & bit) !== 0) {\r\n\t\t\treturn i;\r\n\t\t}\r\n\t\tbit = bit >>> 1;\r\n\t}\r\n\tthrow new RangeError(\"No specified bit found\");\r\n}\r\n\r\n/**\r\n * Gets a 16-bit mask with bit numbers fromBit to toBit (inclusive) set.\r\n * Bit numbers run from LSB to MSB starting with 0.\r\n */\r\nfunction bitsFor(fromBit: number, toBit: number): number {\r\n\tfromBit &= 0xF;\r\n\ttoBit &= 0xF;\r\n\tif (fromBit === toBit) {\r\n\t\treturn (1 << fromBit) >>> 0;\r\n\t}\r\n\treturn ((0xFFFF >>> (15 - toBit)) ^ (0xFFFF >>> (16 - fromBit)));\r\n}\r\n\r\n/**\r\n * A lookup table for number of set bits in a 16-bit integer. This is used to quickly count the cardinality (number of unique elements) of a BitSet.\r\n */\r\nconst POP_CNT: Uint8Array = new Uint8Array(65536);\r\nfor (let i = 0; i < 16; i++) {\r\n\tconst stride = (1 << i) >>> 0;\r\n\tlet index = 0;\r\n\twhile (index < POP_CNT.length) {\r\n\t\t// skip the numbers where the bit isn't set\r\n\t\tindex += stride;\r\n\r\n\t\t// increment the ones where the bit is set\r\n\t\tfor (let j = 0; j < stride; j++) {\r\n\t\t\tPOP_CNT[index]++;\r\n\t\t\tindex++;\r\n\t\t}\r\n\t}\r\n}\r\n\r\nexport class BitSet implements Iterable{\r\n\tprivate data: Uint16Array;\r\n\r\n\t/**\r\n\t * Creates a new bit set. All bits are initially `false`.\r\n\t */\r\n\tconstructor();\r\n\r\n\t/**\r\n\t * Creates a bit set whose initial size is large enough to explicitly represent bits with indices in the range `0`\r\n\t * through `nbits-1`. All bits are initially `false`.\r\n\t */\r\n\tconstructor(nbits: number);\r\n\r\n\t/**\r\n\t * Creates a bit set from a iterable list of numbers (including another BitSet);\r\n\t */\r\n\tconstructor(numbers: Iterable);\r\n\r\n\t/*\r\n\t** constructor implementation\r\n\t*/\r\n\tconstructor(arg?: number | Iterable) {\r\n\t\tif (!arg) {\r\n\t\t\t// covering the case of unspecified and nbits===0\r\n\t\t\tthis.data = EMPTY_DATA;\r\n\t\t} else if (typeof arg === \"number\") {\r\n\t\t\tif (arg < 0) {\r\n\t\t\t\tthrow new RangeError(\"nbits cannot be negative\");\r\n\t\t\t} else {\r\n\t\t\t\tthis.data = new Uint16Array(getIndex(arg - 1) + 1);\r\n\t\t\t}\r\n\t\t} else {\r\n\t\t\tif (arg instanceof BitSet) {\r\n\t\t\t\tthis.data = arg.data.slice(0); // Clone the data\r\n\t\t\t} else {\r\n\t\t\t\tlet max = -1;\r\n\t\t\t\tfor (let v of arg) {\r\n\t\t\t\t\tif (max < v) {\r\n\t\t\t\t\t\tmax = v;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t\tthis.data = new Uint16Array(getIndex(max - 1) + 1);\r\n\t\t\t\tfor (let v of arg) {\r\n\t\t\t\t\tthis.set(v);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Performs a logical **AND** of this target bit set with the argument bit set. This bit set is modified so that\r\n\t * each bit in it has the value `true` if and only if it both initially had the value `true` and the corresponding\r\n\t * bit in the bit set argument also had the value `true`.\r\n\t */\r\n\tpublic and(set: BitSet): void {\r\n\t\tconst data = this.data;\r\n\t\tconst other = set.data;\r\n\t\tconst words = Math.min(data.length, other.length);\r\n\r\n\t\tlet lastWord = -1;\t// Keep track of index of last non-zero word\r\n\r\n\t\tfor (let i = 0; i < words; i++) {\r\n\t\t\tlet value = data[i] &= other[i];\r\n\t\t\tif (value !== 0) {\r\n\t\t\t\tlastWord = i;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (lastWord === -1) {\r\n\t\t\tthis.data = EMPTY_DATA;\r\n\t\t}\r\n\r\n\t\tif (lastWord < data.length - 1) {\r\n\t\t\tthis.data = data.slice(0, lastWord + 1);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Clears all of the bits in this `BitSet` whose corresponding bit is set in the specified `BitSet`.\r\n\t */\r\n\tpublic andNot(set: BitSet): void {\r\n\t\tconst data = this.data;\r\n\t\tconst other = set.data;\r\n\t\tconst words = Math.min(data.length, other.length);\r\n\r\n\t\tlet lastWord = -1;\t// Keep track of index of last non-zero word\r\n\r\n\t\tfor (let i = 0; i < words; i++) {\r\n\t\t\tlet value = data[i] &= (other[i] ^ 0xFFFF);\r\n\t\t\tif (value !== 0) {\r\n\t\t\t\tlastWord = i;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (lastWord === -1) {\r\n\t\t\tthis.data = EMPTY_DATA;\r\n\t\t}\r\n\r\n\t\tif (lastWord < data.length - 1) {\r\n\t\t\tthis.data = data.slice(0, lastWord + 1);\r\n\t\t}\r\n\t}\r\n\r\n\r\n\t/**\r\n\t * Returns the number of bits set to `true` in this `BitSet`.\r\n\t */\r\n\tpublic cardinality(): number {\r\n\t\tif (this.isEmpty) {\r\n\t\t\treturn 0;\r\n\t\t}\r\n\t\tconst data = this.data;\r\n\t\tconst length = data.length;\r\n\t\tlet result = 0;\r\n\r\n\t\tfor (let i = 0; i < length; i++) {\r\n\t\t\tresult += POP_CNT[data[i]];\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\t/**\r\n\t * Sets all of the bits in this `BitSet` to `false`.\r\n\t */\r\n\tpublic clear(): void;\r\n\r\n\t/**\r\n\t * Sets the bit specified by the index to `false`.\r\n\t *\r\n\t * @param bitIndex the index of the bit to be cleared\r\n\t *\r\n\t * @throws RangeError if the specified index is negative\r\n\t */\r\n\tpublic clear(bitIndex: number): void;\r\n\r\n\t/**\r\n\t * Sets the bits from the specified `fromIndex` (inclusive) to the specified `toIndex` (exclusive) to `false`.\r\n\t *\r\n\t * @param fromIndex index of the first bit to be cleared\r\n\t * @param toIndex index after the last bit to be cleared\r\n\t *\r\n\t * @throws RangeError if `fromIndex` is negative, or `toIndex` is negative, or `fromIndex` is larger than `toIndex`\r\n\t */\r\n\tpublic clear(fromIndex: number, toIndex: number): void;\r\n\tpublic clear(fromIndex?: number, toIndex?: number): void {\r\n\t\tif (fromIndex == null) {\r\n\t\t\tthis.data.fill(0);\r\n\t\t} else if (toIndex == null) {\r\n\t\t\tthis.set(fromIndex, false);\r\n\t\t} else {\r\n\t\t\tthis.set(fromIndex, toIndex, false);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Sets the bit at the specified index to the complement of its current value.\r\n\t *\r\n\t * @param bitIndex the index of the bit to flip\r\n\t *\r\n\t * @throws RangeError if the specified index is negative\r\n\t */\r\n\tpublic flip(bitIndex: number): void;\r\n\r\n\t/**\r\n\t * Sets each bit from the specified `fromIndex` (inclusive) to the specified `toIndex` (exclusive) to the complement\r\n\t * of its current value.\r\n\t *\r\n\t * @param fromIndex index of the first bit to flip\r\n\t * @param toIndex index after the last bit to flip\r\n\t *\r\n\t * @throws RangeError if `fromIndex` is negative, or `toIndex` is negative, or `fromIndex` is larger than `toIndex`\r\n\t */\r\n\tpublic flip(fromIndex: number, toIndex: number): void;\r\n\tpublic flip(fromIndex: number, toIndex?: number): void {\r\n\t\tif (toIndex == null) {\r\n\t\t\ttoIndex = fromIndex;\r\n\t\t}\r\n\t\tif (fromIndex < 0 || toIndex < fromIndex) {\r\n\t\t\tthrow new RangeError();\r\n\t\t}\r\n\r\n\t\tlet word = getIndex(fromIndex);\r\n\t\tconst lastWord = getIndex(toIndex);\r\n\r\n\t\tif (word === lastWord) {\r\n\t\t\tthis.data[word] ^= bitsFor(fromIndex, toIndex);\r\n\t\t} else {\r\n\t\t\tthis.data[word++] ^= bitsFor(fromIndex, 15);\r\n\t\t\twhile (word < lastWord) {\r\n\t\t\t\tthis.data[word++] ^= 0xFFFF;\r\n\t\t\t}\r\n\t\t\tthis.data[word++] ^= bitsFor(0, toIndex);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Returns the value of the bit with the specified index. The value is `true` if the bit with the index `bitIndex`\r\n\t * is currently set in this `BitSet`; otherwise, the result is `false`.\r\n\t *\r\n\t * @param bitIndex the bit index\r\n\t *\r\n\t * @throws RangeError if the specified index is negative\r\n\t */\r\n\tpublic get(bitIndex: number): boolean;\r\n\r\n\t/**\r\n\t * Returns a new `BitSet` composed of bits from this `BitSet` from `fromIndex` (inclusive) to `toIndex` (exclusive).\r\n\t *\r\n\t * @param fromIndex index of the first bit to include\r\n\t * @param toIndex index after the last bit to include\r\n\t *\r\n\t * @throws RangeError if `fromIndex` is negative, or `toIndex` is negative, or `fromIndex` is larger than `toIndex`\r\n\t */\r\n\tpublic get(fromIndex: number, toIndex: number): BitSet;\r\n\tpublic get(fromIndex: number, toIndex?: number): boolean | BitSet {\r\n\t\tif (toIndex === undefined) {\r\n\t\t\treturn !!(this.data[getIndex(fromIndex)] & bitsFor(fromIndex, fromIndex));\r\n\t\t} else {\r\n\t\t\t// return a BitSet\r\n\t\t\tlet result = new BitSet(toIndex + 1);\r\n\t\t\tfor (let i = fromIndex; i <= toIndex; i++) {\r\n\t\t\t\tresult.set(i, this.get(i));\r\n\t\t\t}\r\n\t\t\treturn result;\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Returns true if the specified `BitSet` has any bits set to `true` that are also set to `true` in this `BitSet`.\r\n\t *\r\n\t * @param set `BitSet` to intersect with\r\n\t */\r\n\tpublic intersects(set: BitSet): boolean {\r\n\t\tlet smallerLength = Math.min(this.length(), set.length());\r\n\t\tif (smallerLength === 0) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tlet bound = getIndex(smallerLength - 1);\r\n\t\tfor (let i = 0; i <= bound; i++) {\r\n\t\t\tif ((this.data[i] & set.data[i]) !== 0) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/**\r\n\t * Returns true if this `BitSet` contains no bits that are set to `true`.\r\n\t */\r\n\tget isEmpty(): boolean {\r\n\t\treturn this.length() === 0;\r\n\t}\r\n\r\n\t/**\r\n\t * Returns the \"logical size\" of this `BitSet`: the index of the highest set bit in the `BitSet` plus one. Returns\r\n\t * zero if the `BitSet` contains no set bits.\r\n\t */\r\n\tpublic length(): number {\r\n\t\tif (!this.data.length) {\r\n\t\t\treturn 0;\r\n\t\t}\r\n\t\treturn this.previousSetBit(unIndex(this.data.length) - 1) + 1;\r\n\t}\r\n\r\n\t/**\r\n\t * Returns the index of the first bit that is set to `false` that occurs on or after the specified starting index,\r\n\t * If no such bit exists then `-1` is returned.\r\n\t *\r\n\t * @param fromIndex the index to start checking from (inclusive)\r\n\t *\r\n\t * @throws RangeError if the specified index is negative\r\n\t */\r\n\tpublic nextClearBit(fromIndex: number): number {\r\n\t\tif (fromIndex < 0) {\r\n\t\t\tthrow new RangeError(\"fromIndex cannot be negative\");\r\n\t\t}\r\n\r\n\t\tconst data = this.data;\r\n\t\tconst length = data.length;\r\n\t\tlet word = getIndex(fromIndex);\r\n\t\tif (word > length) {\r\n\t\t\treturn -1;\r\n\t\t}\r\n\r\n\t\tlet ignore = 0xFFFF ^ bitsFor(fromIndex, 15);\r\n\r\n\t\tif ((data[word] | ignore) === 0xFFFF) {\r\n\t\t\tword++;\r\n\t\t\tignore = 0;\r\n\t\t\tfor (; word < length; word++) {\r\n\t\t\t\tif (data[word] !== 0xFFFF) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\tif (word === length) {\r\n\t\t\t\t// Hit the end\r\n\t\t\t\treturn -1;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn unIndex(word) + findLSBSet((data[word] | ignore) ^ 0xFFFF);\r\n\t}\r\n\r\n\t/**\r\n\t * Returns the index of the first bit that is set to `true` that occurs on or after the specified starting index.\r\n\t * If no such bit exists then `-1` is returned.\r\n\t *\r\n\t * To iterate over the `true` bits in a `BitSet`, use the following loop:\r\n\t *\r\n\t * ```\r\n\t * for (let i = bs.nextSetBit(0); i >= 0; i = bs.nextSetBit(i + 1)) {\r\n\t * // operate on index i here\r\n\t * }\r\n\t * ```\r\n\t *\r\n\t * @param fromIndex the index to start checking from (inclusive)\r\n\t *\r\n\t * @throws RangeError if the specified index is negative\r\n\t */\r\n\tpublic nextSetBit(fromIndex: number): number {\r\n\t\tif (fromIndex < 0) {\r\n\t\t\tthrow new RangeError(\"fromIndex cannot be negative\");\r\n\t\t}\r\n\r\n\t\tconst data = this.data;\r\n\t\tconst length = data.length;\r\n\t\tlet word = getIndex(fromIndex);\r\n\t\tif (word > length) {\r\n\t\t\treturn -1;\r\n\t\t}\r\n\t\tlet mask = bitsFor(fromIndex, 15);\r\n\r\n\t\tif ((data[word] & mask) === 0) {\r\n\t\t\tword++;\r\n\t\t\tmask = 0xFFFF;\r\n\t\t\tfor (; word < length; word++) {\r\n\t\t\t\tif (data[word] !== 0) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\tif (word >= length) {\r\n\t\t\t\treturn -1;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn unIndex(word) + findLSBSet(data[word] & mask);\r\n\t}\r\n\r\n\t/**\r\n\t * Performs a logical **OR** of this bit set with the bit set argument. This bit set is modified so that a bit in it\r\n\t * has the value `true` if and only if it either already had the value `true` or the corresponding bit in the bit\r\n\t * set argument has the value `true`.\r\n\t */\r\n\tpublic or(set: BitSet): void {\r\n\t\tconst data = this.data;\r\n\t\tconst other = set.data;\r\n\t\tconst minWords = Math.min(data.length, other.length);\r\n\t\tconst words = Math.max(data.length, other.length);\r\n\t\tconst dest = data.length === words ? data : new Uint16Array(words);\r\n\r\n\t\tlet lastWord = -1;\r\n\r\n\t\t// Or those words both sets have in common\r\n\r\n\t\tfor (let i = 0; i < minWords; i++) {\r\n\t\t\tlet value = dest[i] = data[i] | other[i];\r\n\t\t\tif (value !== 0) {\r\n\t\t\t\tlastWord = i;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// Copy words from larger set (if there is one)\r\n\r\n\t\tconst longer = data.length > other.length ? data : other;\r\n\t\tfor (let i = minWords; i < words; i++) {\r\n\t\t\tlet value = dest[i] = longer[i];\r\n\t\t\tif (value !== 0) {\r\n\t\t\t\tlastWord = i;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (lastWord === -1) {\r\n\t\t\tthis.data = EMPTY_DATA;\r\n\t\t} else if (dest.length === lastWord + 1) {\r\n\t\t\tthis.data = dest;\r\n\t\t} else {\r\n\t\t\tthis.data = dest.slice(0, lastWord);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Returns the index of the nearest bit that is set to `false` that occurs on or before the specified starting\r\n\t * index. If no such bit exists, or if `-1` is given as the starting index, then `-1` is returned.\r\n\t *\r\n\t * @param fromIndex the index to start checking from (inclusive)\r\n\t *\r\n\t * @throws RangeError if the specified index is less than `-1`\r\n\t */\r\n\tpublic previousClearBit(fromIndex: number): number {\r\n\t\tif (fromIndex < 0) {\r\n\t\t\tthrow new RangeError(\"fromIndex cannot be negative\");\r\n\t\t}\r\n\r\n\t\tconst data = this.data;\r\n\t\tconst length = data.length;\r\n\t\tlet word = getIndex(fromIndex);\r\n\t\tif (word >= length) {\r\n\t\t\tword = length - 1;\r\n\t\t}\r\n\r\n\t\tlet ignore = 0xFFFF ^ bitsFor(0, fromIndex);\r\n\r\n\t\tif ((data[word] | ignore) === 0xFFFF) {\r\n\t\t\tignore = 0;\r\n\t\t\tword--;\r\n\t\t\tfor (; word >= 0; word--) {\r\n\t\t\t\tif (data[word] !== 0xFFFF) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\tif (word < 0) {\r\n\t\t\t\t// Hit the end\r\n\t\t\t\treturn -1;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn unIndex(word) + findMSBSet((data[word] | ignore) ^ 0xFFFF);\r\n\t}\r\n\r\n\r\n\t/**\r\n\t * Returns the index of the nearest bit that is set to `true` that occurs on or before the specified starting index.\r\n\t * If no such bit exists, or if `-1` is given as the starting index, then `-1` is returned.\r\n\t *\r\n\t * To iterate over the `true` bits in a `BitSet`, use the following loop:\r\n\t *\r\n\t * ```\r\n\t * for (let i = bs.length(); (i = bs.previousSetBit(i-1)) >= 0; ) {\r\n\t * // operate on index i here\r\n\t * }\r\n\t * ```\r\n\t *\r\n\t * @param fromIndex the index to start checking from (inclusive)\r\n\t *\r\n\t * @throws RangeError if the specified index is less than `-1`\r\n\t */\r\n\tpublic previousSetBit(fromIndex: number): number {\r\n\t\tif (fromIndex < 0) {\r\n\t\t\tthrow new RangeError(\"fromIndex cannot be negative\");\r\n\t\t}\r\n\r\n\t\tconst data = this.data;\r\n\t\tconst length = data.length;\r\n\t\tlet word = getIndex(fromIndex);\r\n\t\tif (word >= length) {\r\n\t\t\tword = length - 1;\r\n\t\t}\r\n\r\n\t\tlet mask = bitsFor(0, fromIndex);\r\n\r\n\t\tif ((data[word] & mask) === 0) {\r\n\t\t\tword--;\r\n\t\t\tmask = 0xFFFF;\r\n\t\t\tfor (; word >= 0; word--) {\r\n\t\t\t\tif (data[word] !== 0) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\tif (word < 0) {\r\n\t\t\t\treturn -1;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn unIndex(word) + findMSBSet(data[word] & mask);\r\n\t}\r\n\r\n\t/**\r\n\t * Sets the bit at the specified index to `true`.\r\n\t *\r\n\t * @param bitIndex a bit index\r\n\t *\r\n\t * @throws RangeError if the specified index is negative\r\n\t */\r\n\tpublic set(bitIndex: number): void;\r\n\r\n\t/**\r\n\t * Sets the bit at the specified index to the specified value.\r\n\t *\r\n\t * @param bitIndex a bit index\r\n\t * @param value a boolean value to set\r\n\t *\r\n\t * @throws RangeError if the specified index is negative\r\n\t */\r\n\tpublic set(bitIndex: number, value: boolean): void;\r\n\r\n\t/**\r\n\t * Sets the bits from the specified `fromIndex` (inclusive) to the specified `toIndex` (exclusive) to `true`.\r\n\t *\r\n\t * @param fromIndex index of the first bit to be set\r\n\t * @param toIndex index after the last bit to be set\r\n\t *\r\n\t * @throws RangeError if `fromIndex` is negative, or `toIndex` is negative, or `fromIndex` is larger than `toIndex`\r\n\t */\r\n\tpublic set(fromIndex: number, toIndex: number): void;\r\n\r\n\t/**\r\n\t * Sets the bits from the specified `fromIndex` (inclusive) to the specified `toIndex` (exclusive) to the specified\r\n\t * value.\r\n\t *\r\n\t * @param fromIndex index of the first bit to be set\r\n\t * @param toIndex index after the last bit to be set\r\n\t * @param value value to set the selected bits to\r\n\t *\r\n\t * @throws RangeError if `fromIndex` is negative, or `toIndex` is negative, or `fromIndex` is larger than `toIndex`\r\n\t */\r\n\tpublic set(fromIndex: number, toIndex: number, value: boolean): void;\r\n\tpublic set(fromIndex: number, toIndex?: boolean | number, value?: boolean): void {\r\n\t\tif (toIndex === undefined) {\r\n\t\t\ttoIndex = fromIndex;\r\n\t\t\tvalue = true;\r\n\t\t} else if (typeof toIndex === \"boolean\") {\r\n\t\t\tvalue = toIndex;\r\n\t\t\ttoIndex = fromIndex;\r\n\t\t}\r\n\r\n\t\tif (value === undefined) {\r\n\t\t\tvalue = true;\r\n\t\t}\r\n\r\n\t\tif (fromIndex < 0 || fromIndex > toIndex) {\r\n\t\t\tthrow new RangeError();\r\n\t\t}\r\n\r\n\t\tlet word = getIndex(fromIndex);\r\n\t\tlet lastWord = getIndex(toIndex);\r\n\r\n\t\tif (value && lastWord >= this.data.length) {\r\n\t\t\t// Grow array \"just enough\" for bits we need to set\r\n\t\t\tlet temp = new Uint16Array(lastWord + 1);\r\n\t\t\tthis.data.forEach((value, index) => temp[index] = value);\r\n\t\t\tthis.data = temp;\r\n\t\t} else if (!value) {\r\n\t\t\t// But there is no need to grow array to clear bits.\r\n\t\t\tif (word >= this.data.length) {\r\n\t\t\t\t// Early exit\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\t\t\tif (lastWord >= this.data.length) {\r\n\t\t\t\t// Adjust work to fit array\r\n\t\t\t\tlastWord = this.data.length - 1;\r\n\t\t\t\ttoIndex = this.data.length * 16 - 1;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (word === lastWord) {\r\n\t\t\tthis._setBits(word, value, bitsFor(fromIndex, toIndex));\r\n\t\t} else {\r\n\t\t\tthis._setBits(word++, value, bitsFor(fromIndex, 15));\r\n\t\t\twhile (word < lastWord) {\r\n\t\t\t\tthis.data[word++] = value ? 0xFFFF : 0;\r\n\t\t\t}\r\n\t\t\tthis._setBits(word, value, bitsFor(0, toIndex));\r\n\t\t}\r\n\t}\r\n\r\n\tprivate _setBits(word: number, value: boolean, mask: number) {\r\n\t\tif (value) {\r\n\t\t\tthis.data[word] |= mask;\r\n\t\t} else {\r\n\t\t\tthis.data[word] &= 0xFFFF ^ mask;\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Returns the number of bits of space actually in use by this `BitSet` to represent bit values. The maximum element\r\n\t * in the set is the size - 1st element.\r\n\t */\r\n\tget size(): number {\r\n\t\treturn this.data.byteLength * 8;\r\n\t}\r\n\r\n\t/**\r\n\t * Returns a new byte array containing all the bits in this bit set.\r\n\t *\r\n\t * More precisely, if\r\n\t * `let bytes = s.toByteArray();`\r\n\t * then `bytes.length === (s.length()+7)/8` and `s.get(n) === ((bytes[n/8] & (1<<(n%8))) != 0)` for all\r\n\t * `n < 8 * bytes.length`.\r\n\t */\r\n\t// toByteArray(): Int8Array {\r\n\t// \tthrow new Error(\"NOT IMPLEMENTED\");\r\n\t// }\r\n\r\n\t/**\r\n\t * Returns a new integer array containing all the bits in this bit set.\r\n\t *\r\n\t * More precisely, if\r\n\t * `let integers = s.toIntegerArray();`\r\n\t * then `integers.length === (s.length()+31)/32` and `s.get(n) === ((integers[n/32] & (1<<(n%32))) != 0)` for all\r\n\t * `n < 32 * integers.length`.\r\n\t */\r\n\t// toIntegerArray(): Int32Array {\r\n\t// \tthrow new Error(\"NOT IMPLEMENTED\");\r\n\t// }\r\n\r\n\tpublic hashCode(): number {\r\n\t\treturn MurmurHash.hashCode(this.data, 22);\r\n\t}\r\n\r\n\t/**\r\n\t * Compares this object against the specified object. The result is `true` if and only if the argument is not\r\n\t * `undefined` and is a `Bitset` object that has exactly the same set of bits set to `true` as this bit set. That\r\n\t * is, for every nonnegative index `k`,\r\n\t *\r\n\t * ```\r\n\t * ((BitSet)obj).get(k) == this.get(k)\r\n\t * ```\r\n\t *\r\n\t * must be true. The current sizes of the two bit sets are not compared.\r\n\t */\r\n\tpublic equals(obj: any): boolean {\r\n\t\tif (obj === this) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(obj instanceof BitSet)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tconst len = this.length();\r\n\r\n\t\tif (len !== obj.length()) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tif (len === 0) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\tlet bound = getIndex(len - 1);\r\n\t\tfor (let i = 0; i <= bound; i++) {\r\n\t\t\tif (this.data[i] !== obj.data[i]) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn true;\r\n\t}\r\n\r\n\t/**\r\n\t * Returns a string representation of this bit set. For every index for which this `BitSet` contains a bit in the\r\n\t * set state, the decimal representation of that index is included in the result. Such indices are listed in order\r\n\t * from lowest to highest, separated by \", \" (a comma and a space) and surrounded by braces, resulting in the usual\r\n\t * mathematical notation for a set of integers.\r\n\t *\r\n\t * Example:\r\n\t *\r\n\t * BitSet drPepper = new BitSet();\r\n\t *\r\n\t * Now `drPepper.toString()` returns `\"{}\"`.\r\n\t *\r\n\t * drPepper.set(2);\r\n\t *\r\n\t * Now `drPepper.toString()` returns `\"{2}\"`.\r\n\t *\r\n\t * drPepper.set(4);\r\n\t * drPepper.set(10);\r\n\t *\r\n\t * Now `drPepper.toString()` returns `\"{2, 4, 10}\"`.\r\n\t */\r\n\tpublic toString(): string {\r\n\t\tlet result = \"{\";\r\n\r\n\t\tlet first = true;\r\n\t\tfor (let i = this.nextSetBit(0); i >= 0; i = this.nextSetBit(i + 1)) {\r\n\t\t\tif (first) {\r\n\t\t\t\tfirst = false;\r\n\t\t\t} else {\r\n\t\t\t\tresult += \", \";\r\n\t\t\t}\r\n\r\n\t\t\tresult += i;\r\n\t\t}\r\n\r\n\t\tresult += \"}\";\r\n\t\treturn result;\r\n\t}\r\n\r\n\t// static valueOf(bytes: Int8Array): BitSet;\r\n\t// static valueOf(buffer: ArrayBuffer): BitSet;\r\n\t// static valueOf(integers: Int32Array): BitSet;\r\n\t// static valueOf(data: Int8Array | Int32Array | ArrayBuffer): BitSet {\r\n\t// \tthrow new Error(\"NOT IMPLEMENTED\");\r\n\t// }\r\n\r\n\t/**\r\n\t * Performs a logical **XOR** of this bit set with the bit set argument. This bit set is modified so that a bit in\r\n\t * it has the value `true` if and only if one of the following statements holds:\r\n\t *\r\n\t * * The bit initially has the value `true`, and the corresponding bit in the argument has the value `false`.\r\n\t * * The bit initially has the value `false`, and the corresponding bit in the argument has the value `true`.\r\n\t */\r\n\tpublic xor(set: BitSet): void {\r\n\t\tconst data = this.data;\r\n\t\tconst other = set.data;\r\n\t\tconst minWords = Math.min(data.length, other.length);\r\n\t\tconst words = Math.max(data.length, other.length);\r\n\t\tconst dest = data.length === words ? data : new Uint16Array(words);\r\n\r\n\t\tlet lastWord = -1;\r\n\r\n\t\t// Xor those words both sets have in common\r\n\r\n\t\tfor (let i = 0; i < minWords; i++) {\r\n\t\t\tlet value = dest[i] = data[i] ^ other[i];\r\n\t\t\tif (value !== 0) {\r\n\t\t\t\tlastWord = i;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// Copy words from larger set (if there is one)\r\n\r\n\t\tconst longer = data.length > other.length ? data : other;\r\n\t\tfor (let i = minWords; i < words; i++) {\r\n\t\t\tlet value = dest[i] = longer[i];\r\n\t\t\tif (value !== 0) {\r\n\t\t\t\tlastWord = i;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (lastWord === -1) {\r\n\t\t\tthis.data = EMPTY_DATA;\r\n\t\t} else if (dest.length === lastWord + 1) {\r\n\t\t\tthis.data = dest;\r\n\t\t} else {\r\n\t\t\tthis.data = dest.slice(0, lastWord + 1);\r\n\t\t}\r\n\t}\r\n\r\n\tpublic clone() {\r\n\t\treturn new BitSet(this);\r\n\t}\r\n\r\n\tpublic [Symbol.iterator](): IterableIterator {\r\n\t\treturn new BitSetIterator(this.data);\r\n\t}\r\n\r\n\t// Overrides formatting for nodejs assert etc.\r\n\tpublic [(util.inspect as any).custom](): string {\r\n\t\treturn \"BitSet \" + this.toString();\r\n\t}\r\n}\r\n\r\nclass BitSetIterator implements IterableIterator{\r\n\tprivate index = 0;\r\n\tprivate mask = 0xFFFF;\r\n\r\n\tconstructor(private data: Uint16Array) { }\r\n\r\n\tpublic next() {\r\n\t\twhile (this.index < this.data.length) {\r\n\t\t\tconst bits = this.data[this.index] & this.mask;\r\n\t\t\tif (bits !== 0) {\r\n\t\t\t\tconst bitNumber = unIndex(this.index) + findLSBSet(bits);\r\n\t\t\t\tthis.mask = bitsFor(bitNumber + 1, 15);\r\n\t\t\t\treturn { done: false, value: bitNumber };\r\n\t\t\t}\r\n\t\t\tthis.index++;\r\n\t\t\tthis.mask = 0xFFFF;\r\n\t\t}\r\n\t\treturn { done: true, value: -1 };\r\n\t}\r\n\r\n\tpublic [Symbol.iterator](): IterableIterator { return this; }\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:25.5488013-07:00\r\n\r\nimport { Array2DHashMap } from \"../misc/Array2DHashMap\";\r\nimport { Array2DHashSet } from \"../misc/Array2DHashSet\";\r\nimport { ArrayEqualityComparator } from \"../misc/ArrayEqualityComparator\";\r\nimport { ATN } from \"./ATN\";\r\nimport { ATNConfig } from \"./ATNConfig\";\r\nimport { ATNSimulator } from \"./ATNSimulator\";\r\nimport { ATNState } from \"./ATNState\";\r\nimport { BitSet } from \"../misc/BitSet\";\r\nimport { ConflictInfo } from \"./ConflictInfo\";\r\nimport { EqualityComparator } from \"../misc/EqualityComparator\";\r\nimport { JavaSet } from \"../misc/Stubs\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { ObjectEqualityComparator } from \"../misc/ObjectEqualityComparator\";\r\nimport { PredictionContext } from \"./PredictionContext\";\r\nimport { PredictionContextCache } from \"./PredictionContextCache\";\r\nimport { SemanticContext } from \"./SemanticContext\";\r\n\r\nimport * as assert from \"assert\";\r\nimport * as Utils from \"../misc/Utils\";\r\n\r\ninterface KeyType { state: number; alt: number; }\r\n\r\nclass KeyTypeEqualityComparer implements EqualityComparator {\r\n\tpublic hashCode(key: KeyType) {\r\n\t\treturn key.state ^ key.alt;\r\n\t}\r\n\r\n\tpublic equals(a: KeyType, b: KeyType) {\r\n\t\treturn a.state === b.state && a.alt === b.alt;\r\n\t}\r\n\r\n\tpublic static readonly INSTANCE = new KeyTypeEqualityComparer();\r\n}\r\n\r\nfunction NewKeyedConfigMap(map?: Array2DHashMap) {\r\n\tif (map) {\r\n\t\treturn new Array2DHashMap(map);\r\n\t} else {\r\n\t\treturn new Array2DHashMap(KeyTypeEqualityComparer.INSTANCE);\r\n\t}\r\n}\r\n\r\n/**\r\n * Represents a set of ATN configurations (see `ATNConfig`). As configurations are added to the set, they are merged\r\n * with other `ATNConfig` instances already in the set when possible using the graph-structured stack.\r\n *\r\n * An instance of this class represents the complete set of positions (with context) in an ATN which would be associated\r\n * with a single DFA state. Its internal representation is more complex than traditional state used for NFA to DFA\r\n * conversion due to performance requirements (both improving speed and reducing memory overhead) as well as supporting\r\n * features such as semantic predicates and non-greedy operators in a form to support ANTLR's prediction algorithm.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class ATNConfigSet implements JavaSet {\r\n\t/**\r\n\t * This maps (state, alt) -> merged {@link ATNConfig}. The key does not account for\r\n\t * the {@link ATNConfig#getSemanticContext} of the value, which is only a problem if a single\r\n\t * `ATNConfigSet` contains two configs with the same state and alternative\r\n\t * but different semantic contexts. When this case arises, the first config\r\n\t * added to this map stays, and the remaining configs are placed in {@link #unmerged}.\r\n\t *\r\n\t * This map is only used for optimizing the process of adding configs to the set,\r\n\t * and is `undefined` for read-only sets stored in the DFA.\r\n\t */\r\n\tprivate mergedConfigs?: Array2DHashMap;\r\n\r\n\t/**\r\n\t * This is an \"overflow\" list holding configs which cannot be merged with one\r\n\t * of the configs in {@link #mergedConfigs} but have a colliding key. This\r\n\t * occurs when two configs in the set have the same state and alternative but\r\n\t * different semantic contexts.\r\n\t *\r\n\t * This list is only used for optimizing the process of adding configs to the set,\r\n\t * and is `undefined` for read-only sets stored in the DFA.\r\n\t */\r\n\tprivate unmerged?: ATNConfig[];\r\n\r\n\t/**\r\n\t * This is a list of all configs in this set.\r\n\t */\r\n\tprivate configs: ATNConfig[];\r\n\r\n\tprivate _uniqueAlt: number = 0;\r\n\tprivate _conflictInfo?: ConflictInfo;\r\n\t// Used in parser and lexer. In lexer, it indicates we hit a pred\r\n\t// while computing a closure operation. Don't make a DFA state from this.\r\n\tprivate _hasSemanticContext: boolean = false;\r\n\tprivate _dipsIntoOuterContext: boolean = false;\r\n\t/**\r\n\t * When `true`, this config set represents configurations where the entire\r\n\t * outer context has been consumed by the ATN interpreter. This prevents the\r\n\t * {@link ParserATNSimulator#closure} from pursuing the global FOLLOW when a\r\n\t * rule stop state is reached with an empty prediction context.\r\n\t *\r\n\t * Note: `outermostConfigSet` and {@link #dipsIntoOuterContext} should never\r\n\t * be true at the same time.\r\n\t */\r\n\tprivate outermostConfigSet: boolean = false;\r\n\r\n\tprivate cachedHashCode: number = -1;\r\n\r\n\tconstructor();\r\n\tconstructor(set: ATNConfigSet, readonly: boolean);\r\n\tconstructor(set?: ATNConfigSet, readonly?: boolean) {\r\n\t\tif (!set) {\r\n\t\t\tthis.mergedConfigs = NewKeyedConfigMap();\r\n\t\t\tthis.unmerged = [];\r\n\t\t\tthis.configs = [];\r\n\r\n\t\t\tthis._uniqueAlt = ATN.INVALID_ALT_NUMBER;\r\n\t\t} else {\r\n\r\n\t\t\tif (readonly) {\r\n\t\t\t\tthis.mergedConfigs = undefined;\r\n\t\t\t\tthis.unmerged = undefined;\r\n\t\t\t} else if (!set.isReadOnly) {\r\n\t\t\t\tthis.mergedConfigs = NewKeyedConfigMap(set.mergedConfigs);\r\n\t\t\t\tthis.unmerged = (set.unmerged as ATNConfig[]).slice(0);\r\n\t\t\t} else {\r\n\t\t\t\tthis.mergedConfigs = NewKeyedConfigMap();\r\n\t\t\t\tthis.unmerged = [];\r\n\t\t\t}\r\n\r\n\t\t\tthis.configs = set.configs.slice(0);\r\n\r\n\t\t\tthis._dipsIntoOuterContext = set._dipsIntoOuterContext;\r\n\t\t\tthis._hasSemanticContext = set._hasSemanticContext;\r\n\t\t\tthis.outermostConfigSet = set.outermostConfigSet;\r\n\r\n\t\t\tif (readonly || !set.isReadOnly) {\r\n\t\t\t\tthis._uniqueAlt = set._uniqueAlt;\r\n\t\t\t\tthis._conflictInfo = set._conflictInfo;\r\n\t\t\t}\r\n\r\n\t\t\t// if (!readonly && set.isReadOnly) -> addAll is called from clone()\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Get the set of all alternatives represented by configurations in this\r\n\t * set.\r\n\t */\r\n\t@NotNull\r\n\tpublic getRepresentedAlternatives(): BitSet {\r\n\t\tif (this._conflictInfo != null) {\r\n\t\t\treturn this._conflictInfo.conflictedAlts.clone();\r\n\t\t}\r\n\r\n\t\tlet alts: BitSet = new BitSet();\r\n\t\tfor (let config of this) {\r\n\t\t\talts.set(config.alt);\r\n\t\t}\r\n\r\n\t\treturn alts;\r\n\t}\r\n\r\n\tget isReadOnly(): boolean {\r\n\t\treturn this.mergedConfigs == null;\r\n\t}\r\n\r\n\tget isOutermostConfigSet(): boolean {\r\n\t\treturn this.outermostConfigSet;\r\n\t}\r\n\r\n\tset isOutermostConfigSet(outermostConfigSet: boolean) {\r\n\t\tif (this.outermostConfigSet && !outermostConfigSet) {\r\n\t\t\tthrow new Error(\"IllegalStateException\");\r\n\t\t}\r\n\r\n\t\tassert(!outermostConfigSet || !this._dipsIntoOuterContext);\r\n\t\tthis.outermostConfigSet = outermostConfigSet;\r\n\t}\r\n\r\n\tpublic getStates(): Array2DHashSet {\r\n\t\tlet states = new Array2DHashSet(ObjectEqualityComparator.INSTANCE);\r\n\t\tfor (let c of this.configs) {\r\n\t\t\tstates.add(c.state);\r\n\t\t}\r\n\r\n\t\treturn states;\r\n\t}\r\n\r\n\tpublic optimizeConfigs(interpreter: ATNSimulator): void {\r\n\t\tif (this.configs.length === 0) {\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tfor (let config of this.configs) {\r\n\t\t\tconfig.context = interpreter.atn.getCachedContext(config.context);\r\n\t\t}\r\n\t}\r\n\r\n\tpublic clone(readonly: boolean): ATNConfigSet {\r\n\t\tlet copy: ATNConfigSet = new ATNConfigSet(this, readonly);\r\n\t\tif (!readonly && this.isReadOnly) {\r\n\t\t\tcopy.addAll(this.configs);\r\n\t\t}\r\n\r\n\t\treturn copy;\r\n\t}\r\n\r\n\t@Override\r\n\tget size(): number {\r\n\t\treturn this.configs.length;\r\n\t}\r\n\r\n\t@Override\r\n\tget isEmpty(): boolean {\r\n\t\treturn this.configs.length === 0;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic contains(o: any): boolean {\r\n\t\tif (!(o instanceof ATNConfig)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tif (this.mergedConfigs && this.unmerged) {\r\n\t\t\tlet config: ATNConfig = o;\r\n\t\t\tlet configKey = this.getKey(config);\r\n\t\t\tlet mergedConfig = this.mergedConfigs.get(configKey);\r\n\t\t\tif (mergedConfig != null && this.canMerge(config, configKey, mergedConfig)) {\r\n\t\t\t\treturn mergedConfig.contains(config);\r\n\t\t\t}\r\n\r\n\t\t\tfor (let c of this.unmerged) {\r\n\t\t\t\tif (c.contains(o)) {\r\n\t\t\t\t\treturn true;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t} else {\r\n\t\t\tfor (let c of this.configs) {\r\n\t\t\t\tif (c.contains(o)) {\r\n\t\t\t\t\treturn true;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn false;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic *[Symbol.iterator](): IterableIterator {\r\n\t\tyield* this.configs;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toArray(): ATNConfig[] {\r\n\t\treturn this.configs;\r\n\t}\r\n\r\n\tpublic add(e: ATNConfig): boolean;\r\n\tpublic add(e: ATNConfig, contextCache: PredictionContextCache | undefined): boolean;\r\n\tpublic add(e: ATNConfig, contextCache?: PredictionContextCache): boolean {\r\n\t\tthis.ensureWritable();\r\n\t\tif (!this.mergedConfigs || !this.unmerged) {\r\n\t\t\tthrow new Error(\"Covered by ensureWritable but duplicated here for strict null check limitation\");\r\n\t\t}\r\n\r\n\t\tassert(!this.outermostConfigSet || !e.reachesIntoOuterContext);\r\n\r\n\t\tif (contextCache == null) {\r\n\t\t\tcontextCache = PredictionContextCache.UNCACHED;\r\n\t\t}\r\n\r\n\t\tlet addKey: boolean;\r\n\t\tlet key = this.getKey(e);\r\n\t\tlet mergedConfig = this.mergedConfigs.get(key);\r\n\t\taddKey = (mergedConfig == null);\r\n\t\tif (mergedConfig != null && this.canMerge(e, key, mergedConfig)) {\r\n\t\t\tmergedConfig.outerContextDepth = Math.max(mergedConfig.outerContextDepth, e.outerContextDepth);\r\n\t\t\tif (e.isPrecedenceFilterSuppressed) {\r\n\t\t\t\tmergedConfig.isPrecedenceFilterSuppressed = true;\r\n\t\t\t}\r\n\r\n\t\t\tlet joined: PredictionContext = PredictionContext.join(mergedConfig.context, e.context, contextCache);\r\n\t\t\tthis.updatePropertiesForMergedConfig(e);\r\n\t\t\tif (mergedConfig.context === joined) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\r\n\t\t\tmergedConfig.context = joined;\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\tfor (let i = 0; i < this.unmerged.length; i++) {\r\n\t\t\tlet unmergedConfig: ATNConfig = this.unmerged[i];\r\n\t\t\tif (this.canMerge(e, key, unmergedConfig)) {\r\n\t\t\t\tunmergedConfig.outerContextDepth = Math.max(unmergedConfig.outerContextDepth, e.outerContextDepth);\r\n\t\t\t\tif (e.isPrecedenceFilterSuppressed) {\r\n\t\t\t\t\tunmergedConfig.isPrecedenceFilterSuppressed = true;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet joined: PredictionContext = PredictionContext.join(unmergedConfig.context, e.context, contextCache);\r\n\t\t\t\tthis.updatePropertiesForMergedConfig(e);\r\n\t\t\t\tif (unmergedConfig.context === joined) {\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tunmergedConfig.context = joined;\r\n\r\n\t\t\t\tif (addKey) {\r\n\t\t\t\t\tthis.mergedConfigs.put(key, unmergedConfig);\r\n\t\t\t\t\tthis.unmerged.splice(i, 1);\r\n\t\t\t\t}\r\n\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tthis.configs.push(e);\r\n\t\tif (addKey) {\r\n\t\t\tthis.mergedConfigs.put(key, e);\r\n\t\t} else {\r\n\t\t\tthis.unmerged.push(e);\r\n\t\t}\r\n\r\n\t\tthis.updatePropertiesForAddedConfig(e);\r\n\t\treturn true;\r\n\t}\r\n\r\n\tprivate updatePropertiesForMergedConfig(config: ATNConfig): void {\r\n\t\t// merged configs can't change the alt or semantic context\r\n\t\tthis._dipsIntoOuterContext = this._dipsIntoOuterContext || config.reachesIntoOuterContext;\r\n\t\tassert(!this.outermostConfigSet || !this._dipsIntoOuterContext);\r\n\t}\r\n\r\n\tprivate updatePropertiesForAddedConfig(config: ATNConfig): void {\r\n\t\tif (this.configs.length === 1) {\r\n\t\t\tthis._uniqueAlt = config.alt;\r\n\t\t} else if (this._uniqueAlt !== config.alt) {\r\n\t\t\tthis._uniqueAlt = ATN.INVALID_ALT_NUMBER;\r\n\t\t}\r\n\r\n\t\tthis._hasSemanticContext = this._hasSemanticContext || !SemanticContext.NONE.equals(config.semanticContext);\r\n\t\tthis._dipsIntoOuterContext = this._dipsIntoOuterContext || config.reachesIntoOuterContext;\r\n\t\tassert(!this.outermostConfigSet || !this._dipsIntoOuterContext);\r\n\t}\r\n\r\n\tprotected canMerge(left: ATNConfig, leftKey: { state: number, alt: number }, right: ATNConfig): boolean {\r\n\t\tif (left.state.stateNumber !== right.state.stateNumber) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tif (leftKey.alt !== right.alt) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn left.semanticContext.equals(right.semanticContext);\r\n\t}\r\n\r\n\tprotected getKey(e: ATNConfig): { state: number, alt: number } {\r\n\t\treturn { state: e.state.stateNumber, alt: e.alt };\r\n\t}\r\n\r\n\t@Override\r\n\tpublic containsAll(c: Iterable): boolean {\r\n\t\tfor (let o of c) {\r\n\t\t\tif (!(o instanceof ATNConfig)) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\r\n\t\t\tif (!this.contains(o)) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn true;\r\n\t}\r\n\r\n\tpublic addAll(c: Iterable): boolean;\r\n\tpublic addAll(c: Iterable, contextCache: PredictionContextCache): boolean;\r\n\tpublic addAll(c: Iterable, contextCache?: PredictionContextCache): boolean {\r\n\t\tthis.ensureWritable();\r\n\r\n\t\tlet changed: boolean = false;\r\n\t\tfor (let group of c) {\r\n\t\t\tif (this.add(group, contextCache)) {\r\n\t\t\t\tchanged = true;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn changed;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic clear(): void {\r\n\t\tthis.ensureWritable();\r\n\t\tif (!this.mergedConfigs || !this.unmerged) {\r\n\t\t\tthrow new Error(\"Covered by ensureWritable but duplicated here for strict null check limitation\");\r\n\t\t}\r\n\r\n\t\tthis.mergedConfigs.clear();\r\n\t\tthis.unmerged.length = 0;\r\n\t\tthis.configs.length = 0;\r\n\r\n\t\tthis._dipsIntoOuterContext = false;\r\n\t\tthis._hasSemanticContext = false;\r\n\t\tthis._uniqueAlt = ATN.INVALID_ALT_NUMBER;\r\n\t\tthis._conflictInfo = undefined;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\tif (this === obj) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\tif (!(obj instanceof ATNConfigSet)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this.outermostConfigSet === obj.outermostConfigSet\r\n\t\t\t&& Utils.equals(this._conflictInfo, obj._conflictInfo)\r\n\t\t\t&& ArrayEqualityComparator.INSTANCE.equals(this.configs, obj.configs);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tif (this.isReadOnly && this.cachedHashCode !== -1) {\r\n\t\t\treturn this.cachedHashCode;\r\n\t\t}\r\n\r\n\t\tlet hashCode: number = 1;\r\n\t\thashCode = 5 * hashCode ^ (this.outermostConfigSet ? 1 : 0);\r\n\t\thashCode = 5 * hashCode ^ ArrayEqualityComparator.INSTANCE.hashCode(this.configs);\r\n\r\n\t\tif (this.isReadOnly) {\r\n\t\t\tthis.cachedHashCode = hashCode;\r\n\t\t}\r\n\r\n\t\treturn hashCode;\r\n\t}\r\n\r\n\tpublic toString(): string;\r\n\tpublic toString(showContext: boolean): string;\r\n\tpublic toString(showContext?: boolean): string {\r\n\t\tif (showContext == null) {\r\n\t\t\tshowContext = false;\r\n\t\t}\r\n\r\n\t\tlet buf = \"\";\r\n\t\tlet sortedConfigs = this.configs.slice(0);\r\n\t\tsortedConfigs.sort((o1, o2) => {\r\n\t\t\tif (o1.alt !== o2.alt) {\r\n\t\t\t\treturn o1.alt - o2.alt;\r\n\t\t\t}\r\n\t\t\telse if (o1.state.stateNumber !== o2.state.stateNumber) {\r\n\t\t\t\treturn o1.state.stateNumber - o2.state.stateNumber;\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\treturn o1.semanticContext.toString().localeCompare(o2.semanticContext.toString());\r\n\t\t\t}\r\n\t\t});\r\n\r\n\t\tbuf += (\"[\");\r\n\t\tfor (let i = 0; i < sortedConfigs.length; i++) {\r\n\t\t\tif (i > 0) {\r\n\t\t\t\tbuf += (\", \");\r\n\t\t\t}\r\n\t\t\tbuf += (sortedConfigs[i].toString(undefined, true, showContext));\r\n\t\t}\r\n\t\tbuf += (\"]\");\r\n\r\n\t\tif (this._hasSemanticContext) {\r\n\t\t\tbuf += (\",hasSemanticContext=\") + (this._hasSemanticContext);\r\n\t\t}\r\n\t\tif (this._uniqueAlt !== ATN.INVALID_ALT_NUMBER) {\r\n\t\t\tbuf += (\",uniqueAlt=\") + (this._uniqueAlt);\r\n\t\t}\r\n\t\tif (this._conflictInfo != null) {\r\n\t\t\tbuf += (\",conflictingAlts=\") + (this._conflictInfo.conflictedAlts);\r\n\t\t\tif (!this._conflictInfo.isExact) {\r\n\t\t\t\tbuf += (\"*\");\r\n\t\t\t}\r\n\t\t}\r\n\t\tif (this._dipsIntoOuterContext) {\r\n\t\t\tbuf += (\",dipsIntoOuterContext\");\r\n\t\t}\r\n\t\treturn buf.toString();\r\n\t}\r\n\r\n\tget uniqueAlt(): number {\r\n\t\treturn this._uniqueAlt;\r\n\t}\r\n\r\n\tget hasSemanticContext(): boolean {\r\n\t\treturn this._hasSemanticContext;\r\n\t}\r\n\r\n\tset hasSemanticContext(value: boolean) {\r\n\t\tthis.ensureWritable();\r\n\t\tthis._hasSemanticContext = value;\r\n\t}\r\n\r\n\tget conflictInfo(): ConflictInfo | undefined {\r\n\t\treturn this._conflictInfo;\r\n\t}\r\n\r\n\tset conflictInfo(conflictInfo: ConflictInfo | undefined) {\r\n\t\tthis.ensureWritable();\r\n\t\tthis._conflictInfo = conflictInfo;\r\n\t}\r\n\r\n\tget conflictingAlts(): BitSet | undefined {\r\n\t\tif (this._conflictInfo == null) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\treturn this._conflictInfo.conflictedAlts;\r\n\t}\r\n\r\n\tget isExactConflict(): boolean {\r\n\t\tif (this._conflictInfo == null) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this._conflictInfo.isExact;\r\n\t}\r\n\r\n\tget dipsIntoOuterContext(): boolean {\r\n\t\treturn this._dipsIntoOuterContext;\r\n\t}\r\n\r\n\tpublic get(index: number): ATNConfig {\r\n\t\treturn this.configs[index];\r\n\t}\r\n\r\n\tprotected ensureWritable(): void {\r\n\t\tif (this.isReadOnly) {\r\n\t\t\tthrow new Error(\"This ATNConfigSet is read only.\");\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:38.7771056-07:00\r\n\r\nimport { AcceptStateInfo } from \"./AcceptStateInfo\";\r\nimport { ATN } from \"../atn/ATN\";\r\nimport { ATNConfigSet } from \"../atn/ATNConfigSet\";\r\nimport { BitSet } from \"../misc/BitSet\";\r\nimport { LexerActionExecutor } from \"../atn/LexerActionExecutor\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { PredictionContext } from \"../atn/PredictionContext\";\r\nimport { SemanticContext } from \"../atn/SemanticContext\";\r\n\r\nimport * as assert from \"assert\";\r\n\r\n/** A DFA state represents a set of possible ATN configurations.\r\n * As Aho, Sethi, Ullman p. 117 says \"The DFA uses its state\r\n * to keep track of all possible states the ATN can be in after\r\n * reading each input symbol. That is to say, after reading\r\n * input a1a2..an, the DFA is in a state that represents the\r\n * subset T of the states of the ATN that are reachable from the\r\n * ATN's start state along some path labeled a1a2..an.\"\r\n * In conventional NFA→DFA conversion, therefore, the subset T\r\n * would be a bitset representing the set of states the\r\n * ATN could be in. We need to track the alt predicted by each\r\n * state as well, however. More importantly, we need to maintain\r\n * a stack of states, tracking the closure operations as they\r\n * jump from rule to rule, emulating rule invocations (method calls).\r\n * I have to add a stack to simulate the proper lookahead sequences for\r\n * the underlying LL grammar from which the ATN was derived.\r\n *\r\n * I use a set of ATNConfig objects not simple states. An ATNConfig\r\n * is both a state (ala normal conversion) and a RuleContext describing\r\n * the chain of rules (if any) followed to arrive at that state.\r\n *\r\n * A DFA state may have multiple references to a particular state,\r\n * but with different ATN contexts (with same or different alts)\r\n * meaning that state was reached via a different set of rule invocations.\r\n */\r\nexport class DFAState {\r\n\tpublic stateNumber: number = -1;\r\n\r\n\t@NotNull\r\n\tpublic configs: ATNConfigSet;\r\n\r\n\t/** `edges.get(symbol)` points to target of symbol.\r\n\t */\r\n\t@NotNull\r\n\tprivate readonly edges: Map;\r\n\r\n\tprivate _acceptStateInfo: AcceptStateInfo | undefined;\r\n\r\n\t/** These keys for these edges are the top level element of the global context. */\r\n\t@NotNull\r\n\tprivate readonly contextEdges: Map;\r\n\r\n\t/** Symbols in this set require a global context transition before matching an input symbol. */\r\n\tprivate contextSymbols: BitSet | undefined;\r\n\r\n\t/**\r\n\t * This list is computed by {@link ParserATNSimulator#predicateDFAState}.\r\n\t */\r\n\tpublic predicates: DFAState.PredPrediction[] | undefined;\r\n\r\n\t/**\r\n\t * Constructs a new `DFAState`.\r\n\t *\r\n\t * @param configs The set of ATN configurations defining this state.\r\n\t */\r\n\tconstructor(configs: ATNConfigSet) {\r\n\t\tthis.configs = configs;\r\n\t\tthis.edges = new Map();\r\n\t\tthis.contextEdges = new Map();\r\n\t}\r\n\r\n\tget isContextSensitive(): boolean {\r\n\t\treturn !!this.contextSymbols;\r\n\t}\r\n\r\n\tpublic isContextSymbol(symbol: number): boolean {\r\n\t\tif (!this.isContextSensitive) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this.contextSymbols!.get(symbol);\r\n\t}\r\n\r\n\tpublic setContextSymbol(symbol: number): void {\r\n\t\tassert(this.isContextSensitive);\r\n\t\tthis.contextSymbols!.set(symbol);\r\n\t}\r\n\r\n\tpublic setContextSensitive(atn: ATN): void {\r\n\t\tassert(!this.configs.isOutermostConfigSet);\r\n\t\tif (this.isContextSensitive) {\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tif (!this.contextSymbols) {\r\n\t\t\tthis.contextSymbols = new BitSet();\r\n\t\t}\r\n\t}\r\n\r\n\tget acceptStateInfo(): AcceptStateInfo | undefined {\r\n\t\treturn this._acceptStateInfo;\r\n\t}\r\n\r\n\tset acceptStateInfo(acceptStateInfo: AcceptStateInfo | undefined) {\r\n\t\tthis._acceptStateInfo = acceptStateInfo;\r\n\t}\r\n\r\n\tget isAcceptState(): boolean {\r\n\t\treturn !!this._acceptStateInfo;\r\n\t}\r\n\r\n\tget prediction(): number {\r\n\t\tif (!this._acceptStateInfo) {\r\n\t\t\treturn ATN.INVALID_ALT_NUMBER;\r\n\t\t}\r\n\r\n\t\treturn this._acceptStateInfo.prediction;\r\n\t}\r\n\r\n\tget lexerActionExecutor(): LexerActionExecutor | undefined {\r\n\t\tif (!this._acceptStateInfo) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\treturn this._acceptStateInfo.lexerActionExecutor;\r\n\t}\r\n\r\n\tpublic getTarget(symbol: number): DFAState | undefined {\r\n\t\treturn this.edges.get(symbol);\r\n\t}\r\n\r\n\tpublic setTarget(symbol: number, target: DFAState): void {\r\n\t\tthis.edges.set(symbol, target);\r\n\t}\r\n\r\n\tpublic getEdgeMap(): Map {\r\n\t\treturn this.edges;\r\n\t}\r\n\r\n\tpublic getContextTarget(invokingState: number): DFAState | undefined {\r\n\t\tif (invokingState === PredictionContext.EMPTY_FULL_STATE_KEY) {\r\n\t\t\tinvokingState = -1;\r\n\t\t}\r\n\r\n\t\treturn this.contextEdges.get(invokingState);\r\n\t}\r\n\r\n\tpublic setContextTarget(invokingState: number, target: DFAState): void {\r\n\t\tif (!this.isContextSensitive) {\r\n\t\t\tthrow new Error(\"The state is not context sensitive.\");\r\n\t\t}\r\n\r\n\t\tif (invokingState === PredictionContext.EMPTY_FULL_STATE_KEY) {\r\n\t\t\tinvokingState = -1;\r\n\t\t}\r\n\r\n\t\tthis.contextEdges.set(invokingState, target);\r\n\t}\r\n\r\n\tpublic getContextEdgeMap(): Map {\r\n\t\tlet map = new Map(this.contextEdges);\r\n\t\tlet existing = map.get(-1);\r\n\t\tif (existing !== undefined) {\r\n\t\t\tif (map.size === 1) {\r\n\t\t\t\tlet result = new Map();\r\n\t\t\t\tresult.set(PredictionContext.EMPTY_FULL_STATE_KEY, existing);\r\n\t\t\t\treturn result;\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tmap.delete(-1);\r\n\t\t\t\tmap.set(PredictionContext.EMPTY_FULL_STATE_KEY, existing);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn map;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize(7);\r\n\t\thash = MurmurHash.update(hash, this.configs.hashCode());\r\n\t\thash = MurmurHash.finish(hash, 1);\r\n\t\treturn hash;\r\n\t}\r\n\r\n\t/**\r\n\t * Two {@link DFAState} instances are equal if their ATN configuration sets\r\n\t * are the same. This method is used to see if a state already exists.\r\n\t *\r\n\t * Because the number of alternatives and number of ATN configurations are\r\n\t * finite, there is a finite number of DFA states that can be processed.\r\n\t * This is necessary to show that the algorithm terminates.\r\n\t *\r\n\t * Cannot test the DFA state numbers here because in\r\n\t * {@link ParserATNSimulator#addDFAState} we need to know if any other state\r\n\t * exists that has this exact set of ATN configurations. The\r\n\t * {@link #stateNumber} is irrelevant.\r\n\t */\r\n\t@Override\r\n\tpublic equals(o: any): boolean {\r\n\t\t// compare set of ATN configurations in this set with other\r\n\t\tif (this === o) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\tif (!(o instanceof DFAState)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tlet other: DFAState = o;\r\n\t\tlet sameSet: boolean = this.configs.equals(other.configs);\r\n//\t\tSystem.out.println(\"DFAState.equals: \"+configs+(sameSet?\"==\":\"!=\")+other.configs);\r\n\t\treturn sameSet;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\tlet buf = \"\";\r\n\t\tbuf += (this.stateNumber) + (\":\") + (this.configs);\r\n\t\tif (this.isAcceptState) {\r\n\t\t\tbuf += (\"=>\");\r\n\t\t\tif (this.predicates) {\r\n\t\t\t\tbuf += this.predicates;\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tbuf += (this.prediction);\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn buf.toString();\r\n\t}\r\n}\r\n\r\nexport namespace DFAState {\r\n\t/** Map a predicate to a predicted alternative. */\r\n\texport class PredPrediction {\r\n\t\t@NotNull\r\n\t\tpublic pred: SemanticContext; // never null; at least SemanticContext.NONE\r\n\t\tpublic alt: number;\r\n\t\tconstructor(@NotNull pred: SemanticContext, alt: number) {\r\n\t\t\tthis.alt = alt;\r\n\t\t\tthis.pred = pred;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic toString(): string {\r\n\t\t\treturn \"(\" + this.pred + \", \" + this.alt + \")\";\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:27.3184311-07:00\r\n\r\nimport { ATN } from \"./ATN\";\r\nimport { ATNConfigSet } from \"./ATNConfigSet\";\r\nimport { DFAState } from \"../dfa/DFAState\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { PredictionContext } from \"./PredictionContext\";\r\n\r\nexport abstract class ATNSimulator {\r\n\t/** Must distinguish between missing edge and edge we know leads nowhere */\r\n\tprivate static _ERROR: DFAState;\r\n\t@NotNull\r\n\tstatic get ERROR(): DFAState {\r\n\t\tif (!ATNSimulator._ERROR) {\r\n\t\t\tATNSimulator._ERROR = new DFAState(new ATNConfigSet());\r\n\t\t\tATNSimulator._ERROR.stateNumber = PredictionContext.EMPTY_FULL_STATE_KEY;\r\n\t\t}\r\n\r\n\t\treturn ATNSimulator._ERROR;\r\n\t}\r\n\r\n\t@NotNull\r\n\tpublic atn: ATN;\r\n\r\n\tconstructor(@NotNull atn: ATN) {\r\n\t\tthis.atn = atn;\r\n\t}\r\n\r\n\tpublic abstract reset(): void;\r\n\r\n\t/**\r\n\t * Clear the DFA cache used by the current instance. Since the DFA cache may\r\n\t * be shared by multiple ATN simulators, this method may affect the\r\n\t * performance (but not accuracy) of other parsers which are being used\r\n\t * concurrently.\r\n\t *\r\n\t * @ if the current instance does not\r\n\t * support clearing the DFA.\r\n\t *\r\n\t * @since 4.3\r\n\t */\r\n\tpublic clearDFA(): void {\r\n\t\tthis.atn.clearDFA();\r\n\t}\r\n}\r\n\r\nexport namespace ATNSimulator {\r\n\tconst RULE_VARIANT_DELIMITER: string = \"$\";\r\n\tconst RULE_LF_VARIANT_MARKER: string = \"$lf$\";\r\n\tconst RULE_NOLF_VARIANT_MARKER: string = \"$nolf$\";\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:50.5479602-07:00\r\n\r\nimport { ANTLRErrorListener } from \"./ANTLRErrorListener\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { Recognizer } from \"./Recognizer\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class ConsoleErrorListener implements ANTLRErrorListener {\r\n\t/**\r\n\t * Provides a default instance of {@link ConsoleErrorListener}.\r\n\t */\r\n\tpublic static readonly INSTANCE: ConsoleErrorListener = new ConsoleErrorListener();\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This implementation prints messages to {@link System#err} containing the\r\n\t * values of `line`, `charPositionInLine`, and `msg` using\r\n\t * the following format.\r\n\t *\r\n\t *
\r\n\t * line *line*:*charPositionInLine* *msg*\r\n\t * 
\r\n\t */\r\n\tpublic syntaxError(\r\n\t\trecognizer: Recognizer,\r\n\t\toffendingSymbol: T,\r\n\t\tline: number,\r\n\t\tcharPositionInLine: number,\r\n\t\tmsg: string,\r\n\t\te: RecognitionException | undefined): void {\r\n\t\tconsole.error(`line ${line}:${charPositionInLine} ${msg}`);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:56.8126690-07:00\r\nimport { ANTLRErrorListener } from \"./ANTLRErrorListener\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { Recognizer } from \"./Recognizer\";\r\nimport { Override, NotNull } from \"./Decorators\";\r\n\r\n/**\r\n * This implementation of {@link ANTLRErrorListener} dispatches all calls to a\r\n * collection of delegate listeners. This reduces the effort required to support multiple\r\n * listeners.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class ProxyErrorListener> implements ANTLRErrorListener {\r\n\r\n\tconstructor(private delegates: TListener[]) {\r\n\t\tif (!delegates) {\r\n\t\t\tthrow new Error(\"Invalid delegates\");\r\n\t\t}\r\n\t}\r\n\r\n\tprotected getDelegates(): ReadonlyArray {\r\n\t\treturn this.delegates;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic syntaxError(\r\n\t\t@NotNull recognizer: Recognizer,\r\n\t\toffendingSymbol: T | undefined,\r\n\t\tline: number,\r\n\t\tcharPositionInLine: number,\r\n\t\t@NotNull msg: string,\r\n\t\te: RecognitionException | undefined): void {\r\n\t\tthis.delegates.forEach((listener) => {\r\n\t\t\tif (listener.syntaxError) {\r\n\t\t\t\tlistener.syntaxError(recognizer, offendingSymbol, line, charPositionInLine, msg, e);\r\n\t\t\t}\r\n\t\t});\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:57.1954441-07:00\r\nimport { ANTLRErrorListener } from \"./ANTLRErrorListener\";\r\nimport { ATN } from \"./atn/ATN\";\r\nimport { ATNSimulator } from \"./atn/ATNSimulator\";\r\nimport { ConsoleErrorListener } from \"./ConsoleErrorListener\";\r\nimport { IntStream } from \"./IntStream\";\r\nimport { ParseInfo } from \"./atn/ParseInfo\";\r\nimport { ProxyErrorListener } from \"./ProxyErrorListener\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { RuleContext } from \"./RuleContext\";\r\nimport { SuppressWarnings, NotNull } from \"./Decorators\";\r\nimport { Token } from \"./Token\";\r\nimport { Vocabulary } from \"./Vocabulary\";\r\nimport { VocabularyImpl } from \"./VocabularyImpl\";\r\n\r\nimport * as Utils from \"./misc/Utils\";\r\n\r\nexport abstract class Recognizer {\r\n\tpublic static readonly EOF: number = -1;\r\n\r\n\tprivate static tokenTypeMapCache =\r\n\t\tnew WeakMap>();\r\n\tprivate static ruleIndexMapCache =\r\n\t\tnew WeakMap>();\r\n\r\n\t@SuppressWarnings(\"serial\")\r\n\t@NotNull\r\n\tprivate readonly _listeners: Array> = [ConsoleErrorListener.INSTANCE];\r\n\r\n\tprotected _interp!: ATNInterpreter;\r\n\r\n\tprivate _stateNumber = -1;\r\n\r\n\tpublic abstract readonly ruleNames: string[];\r\n\r\n\t/**\r\n\t * Get the vocabulary used by the recognizer.\r\n\t *\r\n\t * @returns A {@link Vocabulary} instance providing information about the\r\n\t * vocabulary used by the grammar.\r\n\t */\r\n\tpublic abstract readonly vocabulary: Vocabulary;\r\n\r\n\t/**\r\n\t * Get a map from token names to token types.\r\n\t *\r\n\t * Used for XPath and tree pattern compilation.\r\n\t */\r\n\t@NotNull\r\n\tpublic getTokenTypeMap(): ReadonlyMap {\r\n\t\tlet vocabulary: Vocabulary = this.vocabulary;\r\n\t\tlet result = Recognizer.tokenTypeMapCache.get(vocabulary);\r\n\t\tif (result == null) {\r\n\t\t\tlet intermediateResult = new Map();\r\n\t\t\tfor (let i = 0; i <= this.atn.maxTokenType; i++) {\r\n\t\t\t\tlet literalName = vocabulary.getLiteralName(i);\r\n\t\t\t\tif (literalName != null) {\r\n\t\t\t\t\tintermediateResult.set(literalName, i);\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet symbolicName = vocabulary.getSymbolicName(i);\r\n\t\t\t\tif (symbolicName != null) {\r\n\t\t\t\t\tintermediateResult.set(symbolicName, i);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tintermediateResult.set(\"EOF\", Token.EOF);\r\n\t\t\tresult = intermediateResult;\r\n\t\t\tRecognizer.tokenTypeMapCache.set(vocabulary, result);\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\t/**\r\n\t * Get a map from rule names to rule indexes.\r\n\t *\r\n\t * Used for XPath and tree pattern compilation.\r\n\t */\r\n\t@NotNull\r\n\tpublic getRuleIndexMap(): ReadonlyMap {\r\n\t\tlet ruleNames: string[] = this.ruleNames;\r\n\t\tif (ruleNames == null) {\r\n\t\t\tthrow new Error(\"The current recognizer does not provide a list of rule names.\");\r\n\t\t}\r\n\r\n\t\tlet result: ReadonlyMap | undefined = Recognizer.ruleIndexMapCache.get(ruleNames);\r\n\t\tif (result == null) {\r\n\t\t\tresult = Utils.toMap(ruleNames);\r\n\t\t\tRecognizer.ruleIndexMapCache.set(ruleNames, result);\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\tpublic getTokenType(tokenName: string): number {\r\n\t\tlet ttype = this.getTokenTypeMap().get(tokenName);\r\n\t\tif (ttype != null) {\r\n\t\t\treturn ttype;\r\n\t\t}\r\n\t\treturn Token.INVALID_TYPE;\r\n\t}\r\n\r\n\t/**\r\n\t * If this recognizer was generated, it will have a serialized ATN\r\n\t * representation of the grammar.\r\n\t *\r\n\t * For interpreters, we don't know their serialized ATN despite having\r\n\t * created the interpreter from it.\r\n\t */\r\n\t@NotNull\r\n\tget serializedATN(): string {\r\n\t\tthrow new Error(\"there is no serialized ATN\");\r\n\t}\r\n\r\n\t/** For debugging and other purposes, might want the grammar name.\r\n\t * Have ANTLR generate an implementation for this method.\r\n\t */\r\n\tpublic abstract readonly grammarFileName: string;\r\n\r\n\t/**\r\n\t * Get the {@link ATN} used by the recognizer for prediction.\r\n\t *\r\n\t * @returns The {@link ATN} used by the recognizer for prediction.\r\n\t */\r\n\t@NotNull\r\n\tget atn(): ATN {\r\n\t\treturn this._interp.atn;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the ATN interpreter used by the recognizer for prediction.\r\n\t *\r\n\t * @returns The ATN interpreter used by the recognizer for prediction.\r\n\t */\r\n\t@NotNull\r\n\tget interpreter(): ATNInterpreter {\r\n\t\treturn this._interp;\r\n\t}\r\n\r\n\t/**\r\n\t * Set the ATN interpreter used by the recognizer for prediction.\r\n\t *\r\n\t * @param interpreter The ATN interpreter used by the recognizer for\r\n\t * prediction.\r\n\t */\r\n\tset interpreter(@NotNull interpreter: ATNInterpreter) {\r\n\t\tthis._interp = interpreter;\r\n\t}\r\n\r\n\t/** If profiling during the parse/lex, this will return DecisionInfo records\r\n\t * for each decision in recognizer in a ParseInfo object.\r\n\t *\r\n\t * @since 4.3\r\n\t */\r\n\tget parseInfo(): Promise {\r\n\t\treturn Promise.resolve(undefined);\r\n\t}\r\n\r\n\t/** What is the error header, normally line/character position information? */\r\n\t@NotNull\r\n\tpublic getErrorHeader(@NotNull e: RecognitionException): string {\r\n\t\tlet token = e.getOffendingToken();\r\n\t\tif (!token) {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\t\tlet line = token.line;\r\n\t\tlet charPositionInLine: number = token.charPositionInLine;\r\n\t\treturn \"line \" + line + \":\" + charPositionInLine;\r\n\t}\r\n\r\n\t/**\r\n\t * @exception NullPointerException if `listener` is `undefined`.\r\n\t */\r\n\tpublic addErrorListener(@NotNull listener: ANTLRErrorListener): void {\r\n\t\tif (!listener) {\r\n\t\t\tthrow new TypeError(\"listener must not be null\");\r\n\t\t}\r\n\t\tthis._listeners.push(listener);\r\n\t}\r\n\r\n\tpublic removeErrorListener(@NotNull listener: ANTLRErrorListener): void {\r\n\t\tlet position = this._listeners.indexOf(listener);\r\n\t\tif (position !== -1) {\r\n\t\t\tthis._listeners.splice(position, 1);\r\n\t\t}\r\n\t}\r\n\r\n\tpublic removeErrorListeners(): void {\r\n\t\tthis._listeners.length = 0;\r\n\t}\r\n\r\n\t@NotNull\r\n\tpublic getErrorListeners(): Array> {\r\n\t\treturn this._listeners.slice(0);\r\n\t}\r\n\r\n\tpublic getErrorListenerDispatch(): ANTLRErrorListener {\r\n\t\treturn new ProxyErrorListener>(this.getErrorListeners());\r\n\t}\r\n\r\n\t// subclass needs to override these if there are sempreds or actions\r\n\t// that the ATN interp needs to execute\r\n\tpublic sempred(\r\n\t\t_localctx: RuleContext | undefined,\r\n\t\truleIndex: number,\r\n\t\tactionIndex: number): boolean {\r\n\t\treturn true;\r\n\t}\r\n\r\n\tpublic precpred(\r\n\t\tlocalctx: RuleContext | undefined,\r\n\t\tprecedence: number): boolean {\r\n\t\treturn true;\r\n\t}\r\n\r\n\tpublic action(\r\n\t\t_localctx: RuleContext | undefined,\r\n\t\truleIndex: number,\r\n\t\tactionIndex: number): void {\r\n\t\t// intentionally empty\r\n\t}\r\n\r\n\tget state(): number {\r\n\t\treturn this._stateNumber;\r\n\t}\r\n\r\n\t/** Indicate that the recognizer has changed internal state that is\r\n\t * consistent with the ATN state passed in. This way we always know\r\n\t * where we are in the ATN as the parser goes along. The rule\r\n\t * context objects form a stack that lets us see the stack of\r\n\t * invoking rules. Combine this and we have complete ATN\r\n\t * configuration information.\r\n\t */\r\n\tset state(atnState: number) {\r\n//\t\tSystem.err.println(\"setState \"+atnState);\r\n\t\tthis._stateNumber = atnState;\r\n//\t\tif ( traceATNStates ) _ctx.trace(atnState);\r\n\t}\r\n\r\n\tpublic abstract readonly inputStream: IntStream | undefined;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:59.5829654-07:00\r\n\r\nimport { NotNull, Override } from \"./Decorators\";\r\nimport { Token } from \"./Token\";\r\nimport { Vocabulary } from \"./Vocabulary\";\r\n\r\n/**\r\n * This class provides a default implementation of the {@link Vocabulary}\r\n * interface.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class VocabularyImpl implements Vocabulary {\r\n\t/**\r\n\t * Gets an empty {@link Vocabulary} instance.\r\n\t *\r\n\t * No literal or symbol names are assigned to token types, so\r\n\t * {@link #getDisplayName(int)} returns the numeric value for all tokens\r\n\t * except {@link Token#EOF}.\r\n\t */\r\n\t@NotNull\r\n\tpublic static readonly EMPTY_VOCABULARY: VocabularyImpl = new VocabularyImpl([], [], []);\r\n\r\n\t@NotNull\r\n\tprivate readonly literalNames: Array;\r\n\t@NotNull\r\n\tprivate readonly symbolicNames: Array;\r\n\t@NotNull\r\n\tprivate readonly displayNames: Array;\r\n\r\n\tprivate _maxTokenType: number;\r\n\r\n\t/**\r\n\t * Constructs a new instance of {@link VocabularyImpl} from the specified\r\n\t * literal, symbolic, and display token names.\r\n\t *\r\n\t * @param literalNames The literal names assigned to tokens, or an empty array\r\n\t * if no literal names are assigned.\r\n\t * @param symbolicNames The symbolic names assigned to tokens, or\r\n\t * an empty array if no symbolic names are assigned.\r\n\t * @param displayNames The display names assigned to tokens, or an empty array\r\n\t * to use the values in `literalNames` and `symbolicNames` as\r\n\t * the source of display names, as described in\r\n\t * {@link #getDisplayName(int)}.\r\n\t *\r\n\t * @see #getLiteralName(int)\r\n\t * @see #getSymbolicName(int)\r\n\t * @see #getDisplayName(int)\r\n\t */\r\n\tconstructor(literalNames: Array, symbolicNames: Array, displayNames: Array) {\r\n\t\tthis.literalNames = literalNames;\r\n\t\tthis.symbolicNames = symbolicNames;\r\n\t\tthis.displayNames = displayNames;\r\n\t\t// See note here on -1 part: https://github.com/antlr/antlr4/pull/1146\r\n\t\tthis._maxTokenType =\r\n\t\t\tMath.max(this.displayNames.length,\r\n\t\t\t\tMath.max(this.literalNames.length, this.symbolicNames.length)) - 1;\r\n\t}\r\n\r\n\t@Override\r\n\tget maxTokenType(): number {\r\n\t\treturn this._maxTokenType;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getLiteralName(tokenType: number): string | undefined {\r\n\t\tif (tokenType >= 0 && tokenType < this.literalNames.length) {\r\n\t\t\treturn this.literalNames[tokenType];\r\n\t\t}\r\n\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getSymbolicName(tokenType: number): string | undefined {\r\n\t\tif (tokenType >= 0 && tokenType < this.symbolicNames.length) {\r\n\t\t\treturn this.symbolicNames[tokenType];\r\n\t\t}\r\n\r\n\t\tif (tokenType === Token.EOF) {\r\n\t\t\treturn \"EOF\";\r\n\t\t}\r\n\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\t@Override\r\n\t@NotNull\r\n\tpublic getDisplayName(tokenType: number): string {\r\n\t\tif (tokenType >= 0 && tokenType < this.displayNames.length) {\r\n\t\t\tlet displayName = this.displayNames[tokenType];\r\n\t\t\tif (displayName) {\r\n\t\t\t\treturn displayName;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet literalName = this.getLiteralName(tokenType);\r\n\t\tif (literalName) {\r\n\t\t\treturn literalName;\r\n\t\t}\r\n\r\n\t\tlet symbolicName = this.getSymbolicName(tokenType);\r\n\t\tif (symbolicName) {\r\n\t\t\treturn symbolicName;\r\n\t\t}\r\n\r\n\t\treturn String(tokenType);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:38.5097925-07:00\r\n\r\nimport { ATN } from \"../atn/ATN\";\r\nimport { ATNSimulator } from \"../atn/ATNSimulator\";\r\nimport { ATNState } from \"../atn/ATNState\";\r\nimport { DFA } from \"./DFA\";\r\nimport { DFAState } from \"./DFAState\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { PredictionContext } from \"../atn/PredictionContext\";\r\nimport { Recognizer } from \"../Recognizer\";\r\nimport { Vocabulary } from \"../Vocabulary\";\r\nimport { VocabularyImpl } from \"../VocabularyImpl\";\r\n\r\n/** A DFA walker that knows how to dump them to serialized strings. */\r\nexport class DFASerializer {\r\n\t@NotNull\r\n\tprivate dfa: DFA;\r\n\t@NotNull\r\n\tprivate vocabulary: Vocabulary;\r\n\r\n\tpublic ruleNames?: string[];\r\n\r\n\tpublic atn?: ATN;\r\n\r\n\tconstructor(/*@NotNull*/ dfa: DFA, /*@NotNull*/ vocabulary: Vocabulary);\r\n\tconstructor(/*@NotNull*/ dfa: DFA, /*@Nullable*/ parser: Recognizer | undefined);\r\n\tconstructor(/*@NotNull*/ dfa: DFA, /*@NotNull*/ vocabulary: Vocabulary, /*@Nullable*/ ruleNames: string[] | undefined, /*@Nullable*/ atn: ATN | undefined);\r\n\tconstructor(dfa: DFA, vocabulary: Vocabulary | Recognizer | undefined, ruleNames?: string[], atn?: ATN) {\r\n\t\tif (vocabulary instanceof Recognizer) {\r\n\t\t\truleNames = vocabulary.ruleNames;\r\n\t\t\tatn = vocabulary.atn;\r\n\t\t\tvocabulary = vocabulary.vocabulary;\r\n\t\t} else if (!vocabulary) {\r\n\t\t\tvocabulary = VocabularyImpl.EMPTY_VOCABULARY;\r\n\t\t}\r\n\r\n\t\tthis.dfa = dfa;\r\n\t\tthis.vocabulary = vocabulary;\r\n\t\tthis.ruleNames = ruleNames;\r\n\t\tthis.atn = atn;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\tif (!this.dfa.s0) {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\r\n\t\tlet buf = \"\";\r\n\r\n\t\tif (this.dfa.states) {\r\n\t\t\tlet states: DFAState[] = new Array(...this.dfa.states.toArray());\r\n\t\t\tstates.sort((o1, o2) => o1.stateNumber - o2.stateNumber);\r\n\r\n\t\t\tfor (let s of states) {\r\n\t\t\t\tlet edges: Map = s.getEdgeMap();\r\n\t\t\t\tlet edgeKeys = [...edges.keys()].sort((a, b) => a - b);\r\n\t\t\t\tlet contextEdges: Map = s.getContextEdgeMap();\r\n\t\t\t\tlet contextEdgeKeys = [...contextEdges.keys()].sort((a, b) => a - b);\r\n\t\t\t\tfor (let entry of edgeKeys) {\r\n\t\t\t\t\tlet value = edges.get(entry);\r\n\t\t\t\t\tif ((value == null || value === ATNSimulator.ERROR) && !s.isContextSymbol(entry)) {\r\n\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tlet contextSymbol: boolean = false;\r\n\t\t\t\t\tbuf += (this.getStateString(s)) + (\"-\") + (this.getEdgeLabel(entry)) + (\"->\");\r\n\t\t\t\t\tif (s.isContextSymbol(entry)) {\r\n\t\t\t\t\t\tbuf += (\"!\");\r\n\t\t\t\t\t\tcontextSymbol = true;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tlet t: DFAState | undefined = value;\r\n\t\t\t\t\tif (t && t.stateNumber !== ATNSimulator.ERROR.stateNumber) {\r\n\t\t\t\t\t\tbuf += (this.getStateString(t)) + (\"\\n\");\r\n\t\t\t\t\t}\r\n\t\t\t\t\telse if (contextSymbol) {\r\n\t\t\t\t\t\tbuf += (\"ctx\\n\");\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (s.isContextSensitive) {\r\n\t\t\t\t\tfor (let entry of contextEdgeKeys) {\r\n\t\t\t\t\t\tbuf += (this.getStateString(s))\r\n\t\t\t\t\t\t\t+ (\"-\")\r\n\t\t\t\t\t\t\t+ (this.getContextLabel(entry))\r\n\t\t\t\t\t\t\t+ (\"->\")\r\n\t\t\t\t\t\t\t+ (this.getStateString(contextEdges.get(entry)!))\r\n\t\t\t\t\t\t\t+ (\"\\n\");\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t\tlet output: string = buf;\r\n\t\tif (output.length === 0) {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\t\t//return Utils.sortLinesInString(output);\r\n\t\treturn output;\r\n\t}\r\n\r\n\tprotected getContextLabel(i: number): string {\r\n\t\tif (i === PredictionContext.EMPTY_FULL_STATE_KEY) {\r\n\t\t\treturn \"ctx:EMPTY_FULL\";\r\n\t\t}\r\n\t\telse if (i === PredictionContext.EMPTY_LOCAL_STATE_KEY) {\r\n\t\t\treturn \"ctx:EMPTY_LOCAL\";\r\n\t\t}\r\n\r\n\t\tif (this.atn && i > 0 && i <= this.atn.states.length) {\r\n\t\t\tlet state: ATNState = this.atn.states[i];\r\n\t\t\tlet ruleIndex: number = state.ruleIndex;\r\n\t\t\tif (this.ruleNames && ruleIndex >= 0 && ruleIndex < this.ruleNames.length) {\r\n\t\t\t\treturn \"ctx:\" + String(i) + \"(\" + this.ruleNames[ruleIndex] + \")\";\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn \"ctx:\" + String(i);\r\n\t}\r\n\r\n\tprotected getEdgeLabel(i: number): string {\r\n\t\treturn this.vocabulary.getDisplayName(i);\r\n\t}\r\n\r\n\tpublic getStateString(s: DFAState): string {\r\n\t\tif (s === ATNSimulator.ERROR) {\r\n\t\t\treturn \"ERROR\";\r\n\t\t}\r\n\r\n\t\tlet n: number = s.stateNumber;\r\n\t\tlet stateStr: string = \"s\" + n;\r\n\t\tif (s.isAcceptState) {\r\n\t\t\tif (s.predicates) {\r\n\t\t\t\tstateStr = \":s\" + n + \"=>\" + s.predicates;\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tstateStr = \":s\" + n + \"=>\" + s.prediction;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (s.isContextSensitive) {\r\n\t\t\tstateStr += \"*\";\r\n\t\t\tfor (let config of s.configs) {\r\n\t\t\t\tif (config.reachesIntoOuterContext) {\r\n\t\t\t\t\tstateStr += \"*\";\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn stateStr;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:39.2167238-07:00\r\n\r\nimport { DFA } from \"./DFA\";\r\nimport { DFASerializer } from \"./DFASerializer\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { VocabularyImpl } from \"../VocabularyImpl\";\r\n\r\nexport class LexerDFASerializer extends DFASerializer {\r\n\tconstructor( @NotNull dfa: DFA) {\r\n\t\tsuper(dfa, VocabularyImpl.EMPTY_VOCABULARY);\r\n\t}\r\n\r\n\t@Override\r\n\t@NotNull\r\n\tprotected getEdgeLabel(i: number): string {\r\n\t\treturn \"'\" + String.fromCodePoint(i) + \"'\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:37.7099201-07:00\r\n\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { BitSet } from \"../misc/BitSet\";\r\nimport { DecisionState } from \"./DecisionState\";\r\nimport { Override } from \"../Decorators\";\r\nimport { StarLoopbackState } from \"./StarLoopbackState\";\r\n\r\nexport class StarLoopEntryState extends DecisionState {\r\n\t// This is always set during ATN deserialization\r\n\tpublic loopBackState!: StarLoopbackState;\r\n\r\n\t/**\r\n\t * Indicates whether this state can benefit from a precedence DFA during SLL\r\n\t * decision making.\r\n\t *\r\n\t * This is a computed property that is calculated during ATN deserialization\r\n\t * and stored for use in {@link ParserATNSimulator} and\r\n\t * {@link ParserInterpreter}.\r\n\t *\r\n\t * @see `DFA.isPrecedenceDfa`\r\n\t */\r\n\tpublic precedenceRuleDecision: boolean = false;\r\n\r\n\t/**\r\n\t * For precedence decisions, this set marks states *S* which have all\r\n\t * of the following characteristics:\r\n\t *\r\n\t * * One or more invocation sites of the current rule returns to\r\n\t * *S*.\r\n\t * * The closure from *S* includes the current decision without\r\n\t * passing through any rule invocations or stepping out of the current\r\n\t * rule.\r\n\t *\r\n\t * This field is not used when {@link #precedenceRuleDecision} is\r\n\t * `false`.\r\n\t */\r\n\tpublic precedenceLoopbackStates: BitSet = new BitSet();\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.STAR_LOOP_ENTRY;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:38.3567094-07:00\r\n\r\nimport { Array2DHashSet } from \"../misc/Array2DHashSet\";\r\nimport { ATN } from \"../atn/ATN\";\r\nimport { ATNConfigSet } from \"../atn/ATNConfigSet\";\r\nimport { ATNState } from \"../atn/ATNState\";\r\nimport { ATNType } from \"../atn/ATNType\";\r\nimport { DecisionState } from \"../atn/DecisionState\";\r\nimport { DFASerializer } from \"./DFASerializer\";\r\nimport { DFAState } from \"./DFAState\";\r\nimport { LexerATNSimulator } from \"../atn/LexerATNSimulator\";\r\nimport { LexerDFASerializer } from \"./LexerDFASerializer\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { ObjectEqualityComparator } from \"../misc/ObjectEqualityComparator\";\r\nimport { StarLoopEntryState } from \"../atn/StarLoopEntryState\";\r\nimport { Token } from \"../Token\";\r\nimport { TokensStartState } from \"../atn/TokensStartState\";\r\nimport { Vocabulary } from \"../Vocabulary\";\r\nimport { VocabularyImpl } from \"../VocabularyImpl\";\r\n\r\nexport class DFA {\r\n\t/**\r\n\t * A set of all states in the `DFA`.\r\n\t *\r\n\t * Note that this collection of states holds the DFA states for both SLL and LL prediction. Only the start state\r\n\t * needs to be differentiated for these cases, which is tracked by the `s0` and `s0full` fields.\r\n\t */\r\n\t@NotNull\r\n\tpublic readonly states: Array2DHashSet = new Array2DHashSet(ObjectEqualityComparator.INSTANCE);\r\n\r\n\tpublic s0: DFAState | undefined;\r\n\r\n\tpublic s0full: DFAState | undefined;\r\n\r\n\tpublic readonly decision: number;\r\n\r\n\t/** From which ATN state did we create this DFA? */\r\n\t@NotNull\r\n\tpublic atnStartState: ATNState;\r\n\t/**\r\n\t * Note: this field is accessed as `atnStartState.atn` in other targets. The TypeScript target keeps a separate copy\r\n\t * to avoid a number of additional null/undefined checks each time the ATN is accessed.\r\n\t */\r\n\t@NotNull\r\n\tpublic atn: ATN;\r\n\r\n\tprivate nextStateNumber: number = 0;\r\n\r\n\t/**\r\n\t * `true` if this DFA is for a precedence decision; otherwise,\r\n\t * `false`. This is the backing field for {@link #isPrecedenceDfa}.\r\n\t */\r\n\tprivate precedenceDfa: boolean;\r\n\r\n\t/**\r\n\t * Constructs a `DFA` instance associated with a lexer mode.\r\n\t *\r\n\t * The start state for a `DFA` constructed with this constructor should be a `TokensStartState`, which is the start\r\n\t * state for a lexer mode. The prediction made by this DFA determines the lexer rule which matches the current\r\n\t * input.\r\n\t *\r\n\t * @param atnStartState The start state for the mode.\r\n\t */\r\n\tconstructor(atnStartState: TokensStartState);\r\n\t/**\r\n\t * Constructs a `DFA` instance associated with a decision.\r\n\t *\r\n\t * @param atnStartState The decision associated with this DFA.\r\n\t * @param decision The decision number.\r\n\t */\r\n\tconstructor(atnStartState: DecisionState, decision: number);\r\n\tconstructor(@NotNull atnStartState: ATNState, decision: number = 0) {\r\n\t\tif (!atnStartState.atn) {\r\n\t\t\tthrow new Error(\"The ATNState must be associated with an ATN\");\r\n\t\t}\r\n\r\n\t\tthis.atnStartState = atnStartState;\r\n\t\tthis.atn = atnStartState.atn;\r\n\t\tthis.decision = decision;\r\n\r\n\t\t// Precedence DFAs are associated with the special precedence decision created for left-recursive rules which\r\n\t\t// evaluate their alternatives using a precedence hierarchy. When such a decision is encountered, we mark this\r\n\t\t// DFA instance as a precedence DFA and initialize the initial states s0 and s0full to special DFAState\r\n\t\t// instances which use outgoing edges to link to the actual start state used for each precedence level.\r\n\t\tlet isPrecedenceDfa: boolean = false;\r\n\t\tif (atnStartState instanceof StarLoopEntryState) {\r\n\t\t\tif (atnStartState.precedenceRuleDecision) {\r\n\t\t\t\tisPrecedenceDfa = true;\r\n\t\t\t\tthis.s0 = new DFAState(new ATNConfigSet());\r\n\t\t\t\tthis.s0full = new DFAState(new ATNConfigSet());\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tthis.precedenceDfa = isPrecedenceDfa;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets whether this DFA is a precedence DFA. Precedence DFAs use a special\r\n\t * start state {@link #s0} which is not stored in {@link #states}. The\r\n\t * {@link DFAState#edges} array for this start state contains outgoing edges\r\n\t * supplying individual start states corresponding to specific precedence\r\n\t * values.\r\n\t *\r\n\t * @returns `true` if this is a precedence DFA; otherwise,\r\n\t * `false`.\r\n\t * @see Parser.precedence\r\n\t */\r\n\tget isPrecedenceDfa(): boolean {\r\n\t\treturn this.precedenceDfa;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the start state for a specific precedence value.\r\n\t *\r\n\t * @param precedence The current precedence.\r\n\t * @returns The start state corresponding to the specified precedence, or\r\n\t * `undefined` if no start state exists for the specified precedence.\r\n\t *\r\n\t * @ if this is not a precedence DFA.\r\n\t * @see `isPrecedenceDfa`\r\n\t */\r\n\tpublic getPrecedenceStartState(precedence: number, fullContext: boolean): DFAState | undefined {\r\n\t\tif (!this.isPrecedenceDfa) {\r\n\t\t\tthrow new Error(\"Only precedence DFAs may contain a precedence start state.\");\r\n\t\t}\r\n\r\n\t\t// s0 and s0full are never null for a precedence DFA\r\n\t\tif (fullContext) {\r\n\t\t\treturn (this.s0full as DFAState).getTarget(precedence);\r\n\t\t}\r\n\t\telse {\r\n\t\t\treturn (this.s0 as DFAState).getTarget(precedence);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Set the start state for a specific precedence value.\r\n\t *\r\n\t * @param precedence The current precedence.\r\n\t * @param startState The start state corresponding to the specified\r\n\t * precedence.\r\n\t *\r\n\t * @ if this is not a precedence DFA.\r\n\t * @see `isPrecedenceDfa`\r\n\t */\r\n\tpublic setPrecedenceStartState(precedence: number, fullContext: boolean, startState: DFAState): void {\r\n\t\tif (!this.isPrecedenceDfa) {\r\n\t\t\tthrow new Error(\"Only precedence DFAs may contain a precedence start state.\");\r\n\t\t}\r\n\r\n\t\tif (precedence < 0) {\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tif (fullContext) {\r\n\t\t\t// s0full is never null for a precedence DFA\r\n\t\t\t(this.s0full as DFAState).setTarget(precedence, startState);\r\n\t\t}\r\n\t\telse {\r\n\t\t\t// s0 is never null for a precedence DFA\r\n\t\t\t(this.s0 as DFAState).setTarget(precedence, startState);\r\n\t\t}\r\n\t}\r\n\r\n\tget isEmpty(): boolean {\r\n\t\tif (this.isPrecedenceDfa) {\r\n\t\t\t// s0 and s0full are never null for a precedence DFA\r\n\t\t\treturn this.s0!.getEdgeMap().size === 0 && this.s0full!.getEdgeMap().size === 0;\r\n\t\t}\r\n\r\n\t\treturn this.s0 == null && this.s0full == null;\r\n\t}\r\n\r\n\tget isContextSensitive(): boolean {\r\n\t\tif (this.isPrecedenceDfa) {\r\n\t\t\t// s0full is never null for a precedence DFA\r\n\t\t\treturn (this.s0full as DFAState).getEdgeMap().size > 0;\r\n\t\t}\r\n\r\n\t\treturn this.s0full != null;\r\n\t}\r\n\r\n\tpublic addState(state: DFAState): DFAState {\r\n\t\tstate.stateNumber = this.nextStateNumber++;\r\n\t\treturn this.states.getOrAdd(state);\r\n\t}\r\n\r\n\tpublic toString(): string;\r\n\tpublic toString(/*@NotNull*/ vocabulary: Vocabulary): string;\r\n\tpublic toString(/*@NotNull*/ vocabulary: Vocabulary, ruleNames: string[] | undefined): string;\r\n\tpublic toString(vocabulary?: Vocabulary, ruleNames?: string[]): string {\r\n\t\tif (!vocabulary) {\r\n\t\t\tvocabulary = VocabularyImpl.EMPTY_VOCABULARY;\r\n\t\t}\r\n\r\n\t\tif (!this.s0) {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\r\n\t\tlet serializer: DFASerializer;\r\n\t\tif (ruleNames) {\r\n\t\t\tserializer = new DFASerializer(this, vocabulary, ruleNames, this.atnStartState.atn);\r\n\t\t} else {\r\n\t\t\tserializer = new DFASerializer(this, vocabulary);\r\n\t\t}\r\n\r\n\t\treturn serializer.toString();\r\n\t}\r\n\r\n\tpublic toLexerString(): string {\r\n\t\tif (!this.s0) {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\r\n\t\tlet serializer: DFASerializer = new LexerDFASerializer(this);\r\n\t\treturn serializer.toString();\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:27.8389930-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { Override } from \"../Decorators\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class BasicState extends ATNState {\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.BASIC;\r\n\t}\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { BasicState } from \"./BasicState\";\r\nimport { Override } from \"../Decorators\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class InvalidState extends BasicState {\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.INVALID_TYPE;\r\n\t}\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:37.3060135-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { IntervalSet } from \"../misc/IntervalSet\";\r\nimport { Override, NotNull, Nullable } from \"../Decorators\";\r\nimport { Token } from \"../Token\";\r\nimport { Transition } from \"./Transition\";\r\nimport { TransitionType } from \"./TransitionType\";\r\n\r\n/** A transition containing a set of values. */\r\nexport class SetTransition extends Transition {\r\n\t@NotNull\r\n\tpublic set: IntervalSet;\r\n\r\n\t// TODO (sam): should we really allow undefined here?\r\n\tconstructor(@NotNull target: ATNState, @Nullable set: IntervalSet) {\r\n\t\tsuper(target);\r\n\t\tif (set == null) {\r\n\t\t\tset = IntervalSet.of(Token.INVALID_TYPE);\r\n\t\t}\r\n\r\n\t\tthis.set = set;\r\n\t}\r\n\r\n\t@Override\r\n\tget serializationType(): TransitionType {\r\n\t\treturn TransitionType.SET;\r\n\t}\r\n\r\n\t@Override\r\n\t@NotNull\r\n\tget label(): IntervalSet {\r\n\t\treturn this.set;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic matches(symbol: number, minVocabSymbol: number, maxVocabSymbol: number): boolean {\r\n\t\treturn this.set.contains(symbol);\r\n\t}\r\n\r\n\t@Override\r\n\t@NotNull\r\n\tpublic toString(): string {\r\n\t\treturn this.set.toString();\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:30.8483617-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { IntervalSet } from \"../misc/IntervalSet\";\r\nimport { Override, NotNull, Nullable } from \"../Decorators\";\r\nimport { SetTransition } from \"./SetTransition\";\r\nimport { Transition } from \"./Transition\";\r\nimport { TransitionType } from \"./TransitionType\";\r\n\r\nexport class NotSetTransition extends SetTransition {\r\n\tconstructor(@NotNull target: ATNState, @Nullable set: IntervalSet) {\r\n\t\tsuper(target, set);\r\n\t}\r\n\r\n\t@Override\r\n\tget serializationType(): TransitionType {\r\n\t\treturn TransitionType.NOT_SET;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic matches(symbol: number, minVocabSymbol: number, maxVocabSymbol: number): boolean {\r\n\t\treturn symbol >= minVocabSymbol\r\n\t\t\t&& symbol <= maxVocabSymbol\r\n\t\t\t&& !super.matches(symbol, minVocabSymbol, maxVocabSymbol);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn \"~\" + super.toString();\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:36.7513856-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { Override } from \"../Decorators\";\r\n\r\n/** The last node in the ATN for a rule, unless that rule is the start symbol.\r\n * In that case, there is one transition to EOF. Later, we might encode\r\n * references to all calls to this rule to compute FOLLOW sets for\r\n * error handling.\r\n */\r\nexport class RuleStopState extends ATNState {\r\n\r\n\t@Override\r\n\tget nonStopStateNumber(): number {\r\n\t\treturn -1;\r\n\t}\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.RULE_STOP;\r\n\t}\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:36.8294453-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { Override, NotNull } from \"../Decorators\";\r\nimport { RuleStartState } from \"./RuleStartState\";\r\nimport { Transition } from \"./Transition\";\r\nimport { TransitionType } from \"./TransitionType\";\r\n\r\n/** */\r\nexport class RuleTransition extends Transition {\r\n\t/** Ptr to the rule definition object for this rule ref */\r\n\tpublic ruleIndex: number; // no Rule object at runtime\r\n\r\n\tpublic precedence: number;\r\n\r\n\t/** What node to begin computations following ref to rule */\r\n\t@NotNull\r\n\tpublic followState: ATNState;\r\n\r\n\tpublic tailCall: boolean = false;\r\n\tpublic optimizedTailCall: boolean = false;\r\n\r\n\tconstructor(@NotNull ruleStart: RuleStartState, ruleIndex: number, precedence: number, @NotNull followState: ATNState) {\r\n\t\tsuper(ruleStart);\r\n\t\tthis.ruleIndex = ruleIndex;\r\n\t\tthis.precedence = precedence;\r\n\t\tthis.followState = followState;\r\n\t}\r\n\r\n\t@Override\r\n\tget serializationType(): TransitionType {\r\n\t\treturn TransitionType.RULE;\r\n\t}\r\n\r\n\t@Override\r\n\tget isEpsilon(): boolean {\r\n\t\treturn true;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic matches(symbol: number, minVocabSymbol: number, maxVocabSymbol: number): boolean {\r\n\t\treturn false;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:37.9456839-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { Override, NotNull } from \"../Decorators\";\r\nimport { Transition } from \"./Transition\";\r\nimport { TransitionType } from \"./TransitionType\";\r\n\r\nexport class WildcardTransition extends Transition {\r\n\tconstructor(@NotNull target: ATNState) {\r\n\t\tsuper(target);\r\n\t}\r\n\r\n\t@Override\r\n\tget serializationType(): TransitionType {\r\n\t\treturn TransitionType.WILDCARD;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic matches(symbol: number, minVocabSymbol: number, maxVocabSymbol: number): boolean {\r\n\t\treturn symbol >= minVocabSymbol && symbol <= maxVocabSymbol;\r\n\t}\r\n\r\n\t@Override\r\n\t@NotNull\r\n\tpublic toString(): string {\r\n\t\treturn \".\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:30.4445360-07:00\r\n\r\nimport { AbstractPredicateTransition } from \"./AbstractPredicateTransition\";\r\nimport { Array2DHashSet } from \"../misc/Array2DHashSet\";\r\nimport { ATN } from \"./ATN\";\r\nimport { ATNConfig } from \"./ATNConfig\";\r\nimport { ATNState } from \"./ATNState\";\r\nimport { BitSet } from \"../misc/BitSet\";\r\nimport { IntervalSet } from \"../misc/IntervalSet\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { NotSetTransition } from \"./NotSetTransition\";\r\nimport { ObjectEqualityComparator } from \"../misc/ObjectEqualityComparator\";\r\nimport { PredictionContext } from \"./PredictionContext\";\r\nimport { RuleStopState } from \"./RuleStopState\";\r\nimport { RuleTransition } from \"./RuleTransition\";\r\nimport { SetTransition } from \"./SetTransition\";\r\nimport { Token } from \"../Token\";\r\nimport { Transition } from \"./Transition\";\r\nimport { WildcardTransition } from \"./WildcardTransition\";\r\n\r\nexport class LL1Analyzer {\r\n\t/** Special value added to the lookahead sets to indicate that we hit\r\n\t * a predicate during analysis if `seeThruPreds==false`.\r\n\t */\r\n\tpublic static readonly HIT_PRED: number = Token.INVALID_TYPE;\r\n\r\n\t@NotNull\r\n\tpublic atn: ATN;\r\n\r\n\tconstructor(@NotNull atn: ATN) { this.atn = atn; }\r\n\r\n\t/**\r\n\t * Calculates the SLL(1) expected lookahead set for each outgoing transition\r\n\t * of an {@link ATNState}. The returned array has one element for each\r\n\t * outgoing transition in `s`. If the closure from transition\r\n\t * *i* leads to a semantic predicate before matching a symbol, the\r\n\t * element at index *i* of the result will be `undefined`.\r\n\t *\r\n\t * @param s the ATN state\r\n\t * @returns the expected symbols for each outgoing transition of `s`.\r\n\t */\r\n\tpublic getDecisionLookahead(s: ATNState | undefined): Array | undefined {\r\n//\t\tSystem.out.println(\"LOOK(\"+s.stateNumber+\")\");\r\n\t\tif (s == null) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\tlet look: Array = new Array(s.numberOfTransitions);\r\n\t\tfor (let alt = 0; alt < s.numberOfTransitions; alt++) {\r\n\t\t\tlet current: IntervalSet | undefined = new IntervalSet();\r\n\t\t\tlook[alt] = current;\r\n\t\t\tlet lookBusy: Array2DHashSet = new Array2DHashSet(ObjectEqualityComparator.INSTANCE);\r\n\t\t\tlet seeThruPreds: boolean = false; // fail to get lookahead upon pred\r\n\t\t\tthis._LOOK(s.transition(alt).target, undefined, PredictionContext.EMPTY_LOCAL,\r\n\t\t\t\tcurrent, lookBusy, new BitSet(), seeThruPreds, false);\r\n\t\t\t// Wipe out lookahead for this alternative if we found nothing\r\n\t\t\t// or we had a predicate when we !seeThruPreds\r\n\t\t\tif (current.size === 0 || current.contains(LL1Analyzer.HIT_PRED)) {\r\n\t\t\t\tcurrent = undefined;\r\n\t\t\t\tlook[alt] = current;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn look;\r\n\t}\r\n\r\n\t/**\r\n\t * Compute set of tokens that can follow `s` in the ATN in the\r\n\t * specified `ctx`.\r\n\t *\r\n\t * If `ctx` is `undefined` and the end of the rule containing\r\n\t * `s` is reached, {@link Token#EPSILON} is added to the result set.\r\n\t * If `ctx` is not `undefined` and the end of the outermost rule is\r\n\t * reached, {@link Token#EOF} is added to the result set.\r\n\t *\r\n\t * @param s the ATN state\r\n\t * @param ctx the complete parser context, or `undefined` if the context\r\n\t * should be ignored\r\n\t *\r\n\t * @returns The set of tokens that can follow `s` in the ATN in the\r\n\t * specified `ctx`.\r\n\t */\r\n\t// @NotNull\r\n\tpublic LOOK(/*@NotNull*/ s: ATNState, /*@NotNull*/ ctx: PredictionContext): IntervalSet;\r\n\r\n\t/**\r\n\t * Compute set of tokens that can follow `s` in the ATN in the\r\n\t * specified `ctx`.\r\n\t *\r\n\t * If `ctx` is `undefined` and the end of the rule containing\r\n\t * `s` is reached, {@link Token#EPSILON} is added to the result set.\r\n\t * If `ctx` is not `PredictionContext#EMPTY_LOCAL` and the end of the outermost rule is\r\n\t * reached, {@link Token#EOF} is added to the result set.\r\n\t *\r\n\t * @param s the ATN state\r\n\t * @param stopState the ATN state to stop at. This can be a\r\n\t * {@link BlockEndState} to detect epsilon paths through a closure.\r\n\t * @param ctx the complete parser context, or `undefined` if the context\r\n\t * should be ignored\r\n\t *\r\n\t * @returns The set of tokens that can follow `s` in the ATN in the\r\n\t * specified `ctx`.\r\n\t */\r\n\t// @NotNull\r\n\tpublic LOOK(/*@NotNull*/ s: ATNState, /*@NotNull*/ ctx: PredictionContext, stopState: ATNState | null): IntervalSet;\r\n\r\n\t@NotNull\r\n\tpublic LOOK(@NotNull s: ATNState, @NotNull ctx: PredictionContext, stopState?: ATNState | null): IntervalSet {\r\n\t\tif (stopState === undefined) {\r\n\t\t\tif (s.atn == null) {\r\n\t\t\t\tthrow new Error(\"Illegal state\");\r\n\t\t\t}\r\n\r\n\t\t\tstopState = s.atn.ruleToStopState[s.ruleIndex];\r\n\t\t} else if (stopState === null) {\r\n\t\t\t// This is an explicit request to pass undefined as the stopState to _LOOK. Used to distinguish an overload\r\n\t\t\t// from the method which simply omits the stopState parameter.\r\n\t\t\tstopState = undefined;\r\n\t\t}\r\n\r\n\t\tlet r: IntervalSet = new IntervalSet();\r\n\t\tlet seeThruPreds: boolean = true; // ignore preds; get all lookahead\r\n\t\tlet addEOF: boolean = true;\r\n\t\tthis._LOOK(s, stopState, ctx, r, new Array2DHashSet(), new BitSet(), seeThruPreds, addEOF);\r\n\t\treturn r;\r\n\t}\r\n\r\n\t/**\r\n\t * Compute set of tokens that can follow `s` in the ATN in the\r\n\t * specified `ctx`.\r\n\t *

\r\n\t * If `ctx` is {@link PredictionContext#EMPTY_LOCAL} and\r\n\t * `stopState` or the end of the rule containing `s` is reached,\r\n\t * {@link Token#EPSILON} is added to the result set. If `ctx` is not\r\n\t * {@link PredictionContext#EMPTY_LOCAL} and `addEOF` is `true`\r\n\t * and `stopState` or the end of the outermost rule is reached,\r\n\t * {@link Token#EOF} is added to the result set.\r\n\t *\r\n\t * @param s the ATN state.\r\n\t * @param stopState the ATN state to stop at. This can be a\r\n\t * {@link BlockEndState} to detect epsilon paths through a closure.\r\n\t * @param ctx The outer context, or {@link PredictionContext#EMPTY_LOCAL} if\r\n\t * the outer context should not be used.\r\n\t * @param look The result lookahead set.\r\n\t * @param lookBusy A set used for preventing epsilon closures in the ATN\r\n\t * from causing a stack overflow. Outside code should pass\r\n\t * `new HashSet` for this argument.\r\n\t * @param calledRuleStack A set used for preventing left recursion in the\r\n\t * ATN from causing a stack overflow. Outside code should pass\r\n\t * `new BitSet()` for this argument.\r\n\t * @param seeThruPreds `true` to true semantic predicates as\r\n\t * implicitly `true` and \"see through them\", otherwise `false`\r\n\t * to treat semantic predicates as opaque and add {@link #HIT_PRED} to the\r\n\t * result if one is encountered.\r\n\t * @param addEOF Add {@link Token#EOF} to the result if the end of the\r\n\t * outermost context is reached. This parameter has no effect if `ctx`\r\n\t * is {@link PredictionContext#EMPTY_LOCAL}.\r\n\t */\r\n\tprotected _LOOK(\r\n\t\t@NotNull s: ATNState,\r\n\t\tstopState: ATNState | undefined,\r\n\t\t@NotNull ctx: PredictionContext,\r\n\t\t@NotNull look: IntervalSet,\r\n\t\t@NotNull lookBusy: Array2DHashSet,\r\n\t\t@NotNull calledRuleStack: BitSet,\r\n\t\tseeThruPreds: boolean,\r\n\t\taddEOF: boolean): void {\r\n//\t\tSystem.out.println(\"_LOOK(\"+s.stateNumber+\", ctx=\"+ctx);\r\n\t\tlet c: ATNConfig = ATNConfig.create(s, 0, ctx);\r\n\t\tif (!lookBusy.add(c)) {\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tif (s === stopState) {\r\n\t\t\tif (PredictionContext.isEmptyLocal(ctx)) {\r\n\t\t\t\tlook.add(Token.EPSILON);\r\n\t\t\t\treturn;\r\n\t\t\t} else if (ctx.isEmpty) {\r\n\t\t\t\tif (addEOF) {\r\n\t\t\t\t\tlook.add(Token.EOF);\r\n\t\t\t\t}\r\n\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (s instanceof RuleStopState) {\r\n\t\t\tif (ctx.isEmpty && !PredictionContext.isEmptyLocal(ctx)) {\r\n\t\t\t\tif (addEOF) {\r\n\t\t\t\t\tlook.add(Token.EOF);\r\n\t\t\t\t}\r\n\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\r\n\t\t\tlet removed: boolean = calledRuleStack.get(s.ruleIndex);\r\n\t\t\ttry {\r\n\t\t\t\tcalledRuleStack.clear(s.ruleIndex);\r\n\t\t\t\tfor (let i = 0; i < ctx.size; i++) {\r\n\t\t\t\t\tif (ctx.getReturnState(i) === PredictionContext.EMPTY_FULL_STATE_KEY) {\r\n\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tlet returnState: ATNState = this.atn.states[ctx.getReturnState(i)];\r\n//\t\t\t\t\tSystem.out.println(\"popping back to \"+retState);\r\n\t\t\t\t\tthis._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\tfinally {\r\n\t\t\t\tif (removed) {\r\n\t\t\t\t\tcalledRuleStack.set(s.ruleIndex);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet n: number = s.numberOfTransitions;\r\n\t\tfor (let i = 0; i < n; i++) {\r\n\t\t\tlet t: Transition = s.transition(i);\r\n\t\t\tif (t instanceof RuleTransition) {\r\n\t\t\t\tif (calledRuleStack.get(t.ruleIndex)) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet newContext: PredictionContext = ctx.getChild(t.followState.stateNumber);\r\n\r\n\t\t\t\ttry {\r\n\t\t\t\t\tcalledRuleStack.set(t.ruleIndex);\r\n\t\t\t\t\tthis._LOOK(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);\r\n\t\t\t\t}\r\n\t\t\t\tfinally {\r\n\t\t\t\t\tcalledRuleStack.clear(t.ruleIndex);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\telse if (t instanceof AbstractPredicateTransition) {\r\n\t\t\t\tif (seeThruPreds) {\r\n\t\t\t\t\tthis._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\tlook.add(LL1Analyzer.HIT_PRED);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\telse if (t.isEpsilon) {\r\n\t\t\t\tthis._LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF);\r\n\t\t\t}\r\n\t\t\telse if (t instanceof WildcardTransition) {\r\n\t\t\t\tlook.addAll(IntervalSet.of(Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType));\r\n\t\t\t}\r\n\t\t\telse {\r\n//\t\t\t\tSystem.out.println(\"adding \"+ t);\r\n\t\t\t\tlet set: IntervalSet | undefined = t.label;\r\n\t\t\t\tif (set != null) {\r\n\t\t\t\t\tif (t instanceof NotSetTransition) {\r\n\t\t\t\t\t\tset = set.complement(IntervalSet.of(Token.MIN_USER_TOKEN_TYPE, this.atn.maxTokenType));\r\n\t\t\t\t\t}\r\n\t\t\t\t\tlook.addAll(set);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:25.1063510-07:00\r\n\r\nimport { Array2DHashMap } from \"../misc/Array2DHashMap\";\r\nimport { ATNState } from \"./ATNState\";\r\nimport { ATNType } from \"./ATNType\";\r\nimport { DecisionState } from \"./DecisionState\";\r\nimport { DFA } from \"../dfa/DFA\";\r\nimport { IntervalSet } from \"../misc/IntervalSet\";\r\nimport { InvalidState } from \"./InvalidState\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LL1Analyzer } from \"./LL1Analyzer\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { ObjectEqualityComparator } from \"../misc/ObjectEqualityComparator\";\r\nimport { PredictionContext } from \"./PredictionContext\";\r\nimport { RuleContext } from \"../RuleContext\";\r\nimport { RuleStartState } from \"./RuleStartState\";\r\nimport { RuleStopState } from \"./RuleStopState\";\r\nimport { RuleTransition } from \"./RuleTransition\";\r\nimport { Token } from \"../Token\";\r\nimport { TokensStartState } from \"./TokensStartState\";\r\n\r\nimport * as assert from \"assert\";\r\n\r\n/** */\r\nexport class ATN {\r\n\t@NotNull\r\n\tpublic readonly states: ATNState[] = [];\r\n\r\n\t/** Each subrule/rule is a decision point and we must track them so we\r\n\t * can go back later and build DFA predictors for them. This includes\r\n\t * all the rules, subrules, optional blocks, ()+, ()* etc...\r\n\t */\r\n\t@NotNull\r\n\tpublic decisionToState: DecisionState[] = [];\r\n\r\n\t/**\r\n\t * Maps from rule index to starting state number.\r\n\t */\r\n\tpublic ruleToStartState!: RuleStartState[];\r\n\r\n\t/**\r\n\t * Maps from rule index to stop state number.\r\n\t */\r\n\tpublic ruleToStopState!: RuleStopState[];\r\n\r\n\t@NotNull\r\n\tpublic modeNameToStartState: Map =\r\n\t\tnew Map();\r\n\r\n\t/**\r\n\t * The type of the ATN.\r\n\t */\r\n\tpublic grammarType: ATNType;\r\n\r\n\t/**\r\n\t * The maximum value for any symbol recognized by a transition in the ATN.\r\n\t */\r\n\tpublic maxTokenType: number;\r\n\r\n\t/**\r\n\t * For lexer ATNs, this maps the rule index to the resulting token type.\r\n\t * For parser ATNs, this maps the rule index to the generated bypass token\r\n\t * type if the\r\n\t * {@link ATNDeserializationOptions#isGenerateRuleBypassTransitions}\r\n\t * deserialization option was specified; otherwise, this is `undefined`.\r\n\t */\r\n\tpublic ruleToTokenType!: Int32Array;\r\n\r\n\t/**\r\n\t * For lexer ATNs, this is an array of {@link LexerAction} objects which may\r\n\t * be referenced by action transitions in the ATN.\r\n\t */\r\n\tpublic lexerActions!: LexerAction[];\r\n\r\n\t@NotNull\r\n\tpublic modeToStartState: TokensStartState[] = [];\r\n\r\n\tprivate contextCache: Array2DHashMap =\r\n\t\tnew Array2DHashMap(ObjectEqualityComparator.INSTANCE);\r\n\r\n\t@NotNull\r\n\tpublic decisionToDFA: DFA[] = [];\r\n\t@NotNull\r\n\tpublic modeToDFA: DFA[] = [];\r\n\r\n\tpublic LL1Table: Map = new Map();\r\n\r\n\t/** Used for runtime deserialization of ATNs from strings */\r\n\tconstructor(@NotNull grammarType: ATNType, maxTokenType: number) {\r\n\t\tthis.grammarType = grammarType;\r\n\t\tthis.maxTokenType = maxTokenType;\r\n\t}\r\n\r\n\tpublic clearDFA(): void {\r\n\t\tthis.decisionToDFA = new Array(this.decisionToState.length);\r\n\t\tfor (let i = 0; i < this.decisionToDFA.length; i++) {\r\n\t\t\tthis.decisionToDFA[i] = new DFA(this.decisionToState[i], i);\r\n\t\t}\r\n\r\n\t\tthis.modeToDFA = new Array(this.modeToStartState.length);\r\n\t\tfor (let i = 0; i < this.modeToDFA.length; i++) {\r\n\t\t\tthis.modeToDFA[i] = new DFA(this.modeToStartState[i]);\r\n\t\t}\r\n\r\n\t\tthis.contextCache.clear();\r\n\t\tthis.LL1Table.clear();\r\n\t}\r\n\r\n\tget contextCacheSize(): number {\r\n\t\treturn this.contextCache.size;\r\n\t}\r\n\r\n\tpublic getCachedContext(context: PredictionContext): PredictionContext {\r\n\t\treturn PredictionContext.getCachedContext(context, this.contextCache, new PredictionContext.IdentityHashMap());\r\n\t}\r\n\r\n\tpublic getDecisionToDFA(): DFA[] {\r\n\t\tassert(this.decisionToDFA != null && this.decisionToDFA.length === this.decisionToState.length);\r\n\t\treturn this.decisionToDFA;\r\n\t}\r\n\r\n\t/** Compute the set of valid tokens that can occur starting in state `s`.\r\n\t * If `ctx` is {@link PredictionContext#EMPTY_LOCAL}, the set of tokens will not include what can follow\r\n\t * the rule surrounding `s`. In other words, the set will be\r\n\t * restricted to tokens reachable staying within `s`'s rule.\r\n\t */\r\n\t// @NotNull\r\n\tpublic nextTokens(s: ATNState, /*@NotNull*/ ctx: PredictionContext): IntervalSet;\r\n\r\n\t/**\r\n\t * Compute the set of valid tokens that can occur starting in `s` and\r\n\t * staying in same rule. {@link Token#EPSILON} is in set if we reach end of\r\n\t * rule.\r\n\t */\r\n\t// @NotNull\r\n\tpublic nextTokens(/*@NotNull*/ s: ATNState): IntervalSet;\r\n\r\n\t@NotNull\r\n\tpublic nextTokens(s: ATNState, ctx?: PredictionContext): IntervalSet {\r\n\t\tif (ctx) {\r\n\t\t\tlet anal: LL1Analyzer = new LL1Analyzer(this);\r\n\t\t\tlet next: IntervalSet = anal.LOOK(s, ctx);\r\n\t\t\treturn next;\r\n\t\t} else {\r\n\t\t\tif (s.nextTokenWithinRule) {\r\n\t\t\t\treturn s.nextTokenWithinRule;\r\n\t\t\t}\r\n\r\n\t\t\ts.nextTokenWithinRule = this.nextTokens(s, PredictionContext.EMPTY_LOCAL);\r\n\t\t\ts.nextTokenWithinRule.setReadonly(true);\r\n\t\t\treturn s.nextTokenWithinRule;\r\n\t\t}\r\n\t}\r\n\r\n\tpublic addState(state: ATNState): void {\r\n\t\tstate.atn = this;\r\n\t\tstate.stateNumber = this.states.length;\r\n\t\tthis.states.push(state);\r\n\t}\r\n\r\n\tpublic removeState(@NotNull state: ATNState): void {\r\n\t\t// just replace the state, don't shift states in list\r\n\t\tlet invalidState = new InvalidState();\r\n\t\tinvalidState.atn = this;\r\n\t\tinvalidState.stateNumber = state.stateNumber;\r\n\t\tthis.states[state.stateNumber] = invalidState;\r\n\t}\r\n\r\n\tpublic defineMode(@NotNull name: string, @NotNull s: TokensStartState): void {\r\n\t\tthis.modeNameToStartState.set(name, s);\r\n\t\tthis.modeToStartState.push(s);\r\n\t\tthis.modeToDFA.push(new DFA(s));\r\n\t\tthis.defineDecisionState(s);\r\n\t}\r\n\r\n\tpublic defineDecisionState(@NotNull s: DecisionState): number {\r\n\t\tthis.decisionToState.push(s);\r\n\t\ts.decision = this.decisionToState.length - 1;\r\n\t\tthis.decisionToDFA.push(new DFA(s, s.decision));\r\n\t\treturn s.decision;\r\n\t}\r\n\r\n\tpublic getDecisionState(decision: number): DecisionState | undefined {\r\n\t\tif (this.decisionToState.length > 0) {\r\n\t\t\treturn this.decisionToState[decision];\r\n\t\t}\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\tget numberOfDecisions(): number {\r\n\t\treturn this.decisionToState.length;\r\n\t}\r\n\r\n\t/**\r\n\t * Computes the set of input symbols which could follow ATN state number\r\n\t * `stateNumber` in the specified full `context`. This method\r\n\t * considers the complete parser context, but does not evaluate semantic\r\n\t * predicates (i.e. all predicates encountered during the calculation are\r\n\t * assumed true). If a path in the ATN exists from the starting state to the\r\n\t * {@link RuleStopState} of the outermost context without matching any\r\n\t * symbols, {@link Token#EOF} is added to the returned set.\r\n\t *\r\n\t * If `context` is `undefined`, it is treated as\r\n\t * {@link ParserRuleContext#EMPTY}.\r\n\t *\r\n\t * Note that this does NOT give you the set of all tokens that could\r\n\t * appear at a given token position in the input phrase. In other words, it\r\n\t * does not answer:\r\n\t *\r\n\t * > Given a specific partial input phrase, return the set of all\r\n\t * > tokens that can follow the last token in the input phrase.\r\n\t *\r\n\t * The big difference is that with just the input, the parser could land\r\n\t * right in the middle of a lookahead decision. Getting all\r\n\t * *possible* tokens given a partial input stream is a separate\r\n\t * computation. See https://github.com/antlr/antlr4/issues/1428\r\n\t *\r\n\t * For this function, we are specifying an ATN state and call stack to\r\n\t * compute what token(s) can come next and specifically: outside of a\r\n\t * lookahead decision. That is what you want for error reporting and\r\n\t * recovery upon parse error.\r\n\t *\r\n\t * @param stateNumber the ATN state number\r\n\t * @param context the full parse context\r\n\t * @returns The set of potentially valid input symbols which could follow the\r\n\t * specified state in the specified context.\r\n\t * @ if the ATN does not contain a state with\r\n\t * number `stateNumber`\r\n\t */\r\n\t@NotNull\r\n\tpublic getExpectedTokens(stateNumber: number, context: RuleContext | undefined): IntervalSet {\r\n\t\tif (stateNumber < 0 || stateNumber >= this.states.length) {\r\n\t\t\tthrow new RangeError(\"Invalid state number.\");\r\n\t\t}\r\n\r\n\t\tlet ctx: RuleContext | undefined = context;\r\n\t\tlet s: ATNState = this.states[stateNumber];\r\n\t\tlet following: IntervalSet = this.nextTokens(s);\r\n\t\tif (!following.contains(Token.EPSILON)) {\r\n\t\t\treturn following;\r\n\t\t}\r\n\r\n\t\tlet expected: IntervalSet = new IntervalSet();\r\n\t\texpected.addAll(following);\r\n\t\texpected.remove(Token.EPSILON);\r\n\t\twhile (ctx != null && ctx.invokingState >= 0 && following.contains(Token.EPSILON)) {\r\n\t\t\tlet invokingState: ATNState = this.states[ctx.invokingState];\r\n\t\t\tlet rt: RuleTransition = invokingState.transition(0) as RuleTransition;\r\n\t\t\tfollowing = this.nextTokens(rt.followState);\r\n\t\t\texpected.addAll(following);\r\n\t\t\texpected.remove(Token.EPSILON);\r\n\t\t\tctx = ctx._parent;\r\n\t\t}\r\n\r\n\t\tif (following.contains(Token.EPSILON)) {\r\n\t\t\texpected.add(Token.EOF);\r\n\t\t}\r\n\r\n\t\treturn expected;\r\n\t}\r\n}\r\n\r\nexport namespace ATN {\r\n\texport const INVALID_ALT_NUMBER: number = 0;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:29.7613038-07:00\r\n\r\nimport { Lexer } from \"../Lexer\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LexerActionType } from \"./LexerActionType\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\n\r\n/**\r\n * This implementation of {@link LexerAction} is used for tracking input offsets\r\n * for position-dependent actions within a {@link LexerActionExecutor}.\r\n *\r\n * This action is not serialized as part of the ATN, and is only required for\r\n * position-dependent lexer actions which appear at a location other than the\r\n * end of a rule. For more information about DFA optimizations employed for\r\n * lexer actions, see {@link LexerActionExecutor#append} and\r\n * {@link LexerActionExecutor#fixOffsetBeforeMatch}.\r\n *\r\n * @author Sam Harwell\r\n * @since 4.2\r\n */\r\nexport class LexerIndexedCustomAction implements LexerAction {\r\n\tprivate readonly _offset: number;\r\n\tprivate readonly _action: LexerAction;\r\n\r\n\t/**\r\n\t * Constructs a new indexed custom action by associating a character offset\r\n\t * with a {@link LexerAction}.\r\n\t *\r\n\t * Note: This class is only required for lexer actions for which\r\n\t * {@link LexerAction#isPositionDependent} returns `true`.\r\n\t *\r\n\t * @param offset The offset into the input {@link CharStream}, relative to\r\n\t * the token start index, at which the specified lexer action should be\r\n\t * executed.\r\n\t * @param action The lexer action to execute at a particular offset in the\r\n\t * input {@link CharStream}.\r\n\t */\r\n\tconstructor(offset: number, @NotNull action: LexerAction) {\r\n\t\tthis._offset = offset;\r\n\t\tthis._action = action;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the location in the input {@link CharStream} at which the lexer\r\n\t * action should be executed. The value is interpreted as an offset relative\r\n\t * to the token start index.\r\n\t *\r\n\t * @returns The location in the input {@link CharStream} at which the lexer\r\n\t * action should be executed.\r\n\t */\r\n\tget offset(): number {\r\n\t\treturn this._offset;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the lexer action to execute.\r\n\t *\r\n\t * @returns A {@link LexerAction} object which executes the lexer action.\r\n\t */\r\n\t@NotNull\r\n\tget action(): LexerAction {\r\n\t\treturn this._action;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * @returns This method returns the result of calling {@link #getActionType}\r\n\t * on the {@link LexerAction} returned by {@link #getAction}.\r\n\t */\r\n\t@Override\r\n\tget actionType(): LexerActionType {\r\n\t\treturn this._action.actionType;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns `true`.\r\n\t */\r\n\t@Override\r\n\tget isPositionDependent(): boolean {\r\n\t\treturn true;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This method calls {@link #execute} on the result of {@link #getAction}\r\n\t * using the provided `lexer`.\r\n\t */\r\n\t@Override\r\n\tpublic execute(lexer: Lexer): void {\r\n\t\t// assume the input stream position was properly set by the calling code\r\n\t\tthis._action.execute(lexer);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\thash = MurmurHash.update(hash, this._offset);\r\n\t\thash = MurmurHash.update(hash, this._action);\r\n\t\treturn MurmurHash.finish(hash, 2);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\tif (obj === this) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(obj instanceof LexerIndexedCustomAction)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this._offset === obj._offset\r\n\t\t\t&& this._action.equals(obj._action);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:28.8810453-07:00\r\n\r\nimport { ArrayEqualityComparator } from \"../misc/ArrayEqualityComparator\";\r\nimport { CharStream } from \"../CharStream\";\r\nimport { Lexer } from \"../Lexer\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LexerIndexedCustomAction } from \"./LexerIndexedCustomAction\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\n\r\n/**\r\n * Represents an executor for a sequence of lexer actions which traversed during\r\n * the matching operation of a lexer rule (token).\r\n *\r\n * The executor tracks position information for position-dependent lexer actions\r\n * efficiently, ensuring that actions appearing only at the end of the rule do\r\n * not cause bloating of the {@link DFA} created for the lexer.\r\n *\r\n * @author Sam Harwell\r\n * @since 4.2\r\n */\r\nexport class LexerActionExecutor {\r\n\t@NotNull\r\n\tprivate _lexerActions: LexerAction[];\r\n\r\n\t/**\r\n\t * Caches the result of {@link #hashCode} since the hash code is an element\r\n\t * of the performance-critical {@link LexerATNConfig#hashCode} operation.\r\n\t */\r\n\tprivate cachedHashCode: number;\r\n\r\n\t/**\r\n\t * Constructs an executor for a sequence of {@link LexerAction} actions.\r\n\t * @param lexerActions The lexer actions to execute.\r\n\t */\r\n\tconstructor(@NotNull lexerActions: LexerAction[]) {\r\n\t\tthis._lexerActions = lexerActions;\r\n\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\tfor (let lexerAction of lexerActions) {\r\n\t\t\thash = MurmurHash.update(hash, lexerAction);\r\n\t\t}\r\n\r\n\t\tthis.cachedHashCode = MurmurHash.finish(hash, lexerActions.length);\r\n\t}\r\n\r\n\t/**\r\n\t * Creates a {@link LexerActionExecutor} which executes the actions for\r\n\t * the input `lexerActionExecutor` followed by a specified\r\n\t * `lexerAction`.\r\n\t *\r\n\t * @param lexerActionExecutor The executor for actions already traversed by\r\n\t * the lexer while matching a token within a particular\r\n\t * {@link ATNConfig}. If this is `undefined`, the method behaves as though\r\n\t * it were an empty executor.\r\n\t * @param lexerAction The lexer action to execute after the actions\r\n\t * specified in `lexerActionExecutor`.\r\n\t *\r\n\t * @returns A {@link LexerActionExecutor} for executing the combine actions\r\n\t * of `lexerActionExecutor` and `lexerAction`.\r\n\t */\r\n\t@NotNull\r\n\tpublic static append(lexerActionExecutor: LexerActionExecutor | undefined, @NotNull lexerAction: LexerAction): LexerActionExecutor {\r\n\t\tif (!lexerActionExecutor) {\r\n\t\t\treturn new LexerActionExecutor([lexerAction]);\r\n\t\t}\r\n\r\n\t\tlet lexerActions = lexerActionExecutor._lexerActions.slice(0);\r\n\t\tlexerActions.push(lexerAction);\r\n\t\treturn new LexerActionExecutor(lexerActions);\r\n\t}\r\n\r\n\t/**\r\n\t * Creates a {@link LexerActionExecutor} which encodes the current offset\r\n\t * for position-dependent lexer actions.\r\n\t *\r\n\t * Normally, when the executor encounters lexer actions where\r\n\t * {@link LexerAction#isPositionDependent} returns `true`, it calls\r\n\t * {@link IntStream#seek} on the input {@link CharStream} to set the input\r\n\t * position to the *end* of the current token. This behavior provides\r\n\t * for efficient DFA representation of lexer actions which appear at the end\r\n\t * of a lexer rule, even when the lexer rule matches a variable number of\r\n\t * characters.\r\n\t *\r\n\t * Prior to traversing a match transition in the ATN, the current offset\r\n\t * from the token start index is assigned to all position-dependent lexer\r\n\t * actions which have not already been assigned a fixed offset. By storing\r\n\t * the offsets relative to the token start index, the DFA representation of\r\n\t * lexer actions which appear in the middle of tokens remains efficient due\r\n\t * to sharing among tokens of the same length, regardless of their absolute\r\n\t * position in the input stream.\r\n\t *\r\n\t * If the current executor already has offsets assigned to all\r\n\t * position-dependent lexer actions, the method returns `this`.\r\n\t *\r\n\t * @param offset The current offset to assign to all position-dependent\r\n\t * lexer actions which do not already have offsets assigned.\r\n\t *\r\n\t * @returns A {@link LexerActionExecutor} which stores input stream offsets\r\n\t * for all position-dependent lexer actions.\r\n\t */\r\n\tpublic fixOffsetBeforeMatch(offset: number): LexerActionExecutor {\r\n\t\tlet updatedLexerActions: LexerAction[] | undefined;\r\n\t\tfor (let i = 0; i < this._lexerActions.length; i++) {\r\n\t\t\tif (this._lexerActions[i].isPositionDependent && !(this._lexerActions[i] instanceof LexerIndexedCustomAction)) {\r\n\t\t\t\tif (!updatedLexerActions) {\r\n\t\t\t\t\tupdatedLexerActions = this._lexerActions.slice(0);\r\n\t\t\t\t}\r\n\r\n\t\t\t\tupdatedLexerActions[i] = new LexerIndexedCustomAction(offset, this._lexerActions[i]);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (!updatedLexerActions) {\r\n\t\t\treturn this;\r\n\t\t}\r\n\r\n\t\treturn new LexerActionExecutor(updatedLexerActions);\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the lexer actions to be executed by this executor.\r\n\t * @returns The lexer actions to be executed by this executor.\r\n\t */\r\n\t@NotNull\r\n\tget lexerActions(): LexerAction[] {\r\n\t\treturn this._lexerActions;\r\n\t}\r\n\r\n\t/**\r\n\t * Execute the actions encapsulated by this executor within the context of a\r\n\t * particular {@link Lexer}.\r\n\t *\r\n\t * This method calls {@link IntStream#seek} to set the position of the\r\n\t * `input` {@link CharStream} prior to calling\r\n\t * {@link LexerAction#execute} on a position-dependent action. Before the\r\n\t * method returns, the input position will be restored to the same position\r\n\t * it was in when the method was invoked.\r\n\t *\r\n\t * @param lexer The lexer instance.\r\n\t * @param input The input stream which is the source for the current token.\r\n\t * When this method is called, the current {@link IntStream#index} for\r\n\t * `input` should be the start of the following token, i.e. 1\r\n\t * character past the end of the current token.\r\n\t * @param startIndex The token start index. This value may be passed to\r\n\t * {@link IntStream#seek} to set the `input` position to the beginning\r\n\t * of the token.\r\n\t */\r\n\tpublic execute(@NotNull lexer: Lexer, input: CharStream, startIndex: number): void {\r\n\t\tlet requiresSeek: boolean = false;\r\n\t\tlet stopIndex: number = input.index;\r\n\t\ttry {\r\n\t\t\tfor (let lexerAction of this._lexerActions) {\r\n\t\t\t\tif (lexerAction instanceof LexerIndexedCustomAction) {\r\n\t\t\t\t\tlet offset: number = lexerAction.offset;\r\n\t\t\t\t\tinput.seek(startIndex + offset);\r\n\t\t\t\t\tlexerAction = lexerAction.action;\r\n\t\t\t\t\trequiresSeek = (startIndex + offset) !== stopIndex;\r\n\t\t\t\t} else if (lexerAction.isPositionDependent) {\r\n\t\t\t\t\tinput.seek(stopIndex);\r\n\t\t\t\t\trequiresSeek = false;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlexerAction.execute(lexer);\r\n\t\t\t}\r\n\t\t} finally {\r\n\t\t\tif (requiresSeek) {\r\n\t\t\t\tinput.seek(stopIndex);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\treturn this.cachedHashCode;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\tif (obj === this) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(obj instanceof LexerActionExecutor)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this.cachedHashCode === obj.cachedHashCode\r\n\t\t\t&& ArrayEqualityComparator.INSTANCE.equals(this._lexerActions, obj._lexerActions);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:52.0961136-07:00\r\n\r\nimport { ATNConfigSet } from \"./atn/ATNConfigSet\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { NotNull, Override } from \"./Decorators\";\r\nimport { Lexer } from \"./Lexer\";\r\nimport { CharStream } from \"./CharStream\";\r\nimport { Interval } from \"./misc/Interval\";\r\nimport * as Utils from \"./misc/Utils\";\r\n\r\nexport class LexerNoViableAltException extends RecognitionException {\r\n\t//private static serialVersionUID: number = -730999203913001726L;\r\n\r\n\t/** Matching attempted at what input index? */\r\n\tprivate _startIndex: number;\r\n\r\n\t/** Which configurations did we try at input.index that couldn't match input.LA(1)? */\r\n\tprivate _deadEndConfigs?: ATNConfigSet;\r\n\r\n\tconstructor(\r\n\t\tlexer: Lexer | undefined,\r\n\t\t@NotNull input: CharStream,\r\n\t\tstartIndex: number,\r\n\t\tdeadEndConfigs: ATNConfigSet | undefined) {\r\n\t\tsuper(lexer, input);\r\n\t\tthis._startIndex = startIndex;\r\n\t\tthis._deadEndConfigs = deadEndConfigs;\r\n\t}\r\n\r\n\tget startIndex(): number {\r\n\t\treturn this._startIndex;\r\n\t}\r\n\r\n\tget deadEndConfigs(): ATNConfigSet | undefined {\r\n\t\treturn this._deadEndConfigs;\r\n\t}\r\n\r\n\t@Override\r\n\tget inputStream(): CharStream {\r\n\t\treturn super.inputStream as CharStream;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\tlet symbol = \"\";\r\n\t\tif (this._startIndex >= 0 && this._startIndex < this.inputStream.size) {\r\n\t\t\tsymbol = this.inputStream.getText(Interval.of(this._startIndex, this._startIndex));\r\n\t\t\tsymbol = Utils.escapeWhitespace(symbol, false);\r\n\t\t}\r\n\r\n\t\t// return String.format(Locale.getDefault(), \"%s('%s')\", LexerNoViableAltException.class.getSimpleName(), symbol);\r\n\t\treturn `LexerNoViableAltException('${symbol}')`;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:30.9444556-07:00\r\n\r\nimport { ATNConfig } from \"./ATNConfig\";\r\nimport { ATNConfigSet } from \"./ATNConfigSet\";\r\nimport { Override } from \"../Decorators\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class OrderedATNConfigSet extends ATNConfigSet {\r\n\r\n\tconstructor();\r\n\tconstructor(set: ATNConfigSet, readonly: boolean);\r\n\tconstructor(set?: ATNConfigSet, readonly?: boolean) {\r\n\t\tif (set != null && readonly != null) {\r\n\t\t\tsuper(set, readonly);\r\n\t\t} else {\r\n\t\t\tsuper();\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\tpublic clone(readonly: boolean): ATNConfigSet {\r\n\t\tlet copy: OrderedATNConfigSet = new OrderedATNConfigSet(this, readonly);\r\n\t\tif (!readonly && this.isReadOnly) {\r\n\t\t\tcopy.addAll(this);\r\n\t\t}\r\n\r\n\t\treturn copy;\r\n\t}\r\n\r\n\t@Override\r\n\tprotected getKey(e: ATNConfig): { state: number, alt: number } {\r\n\t\t// This is a specially crafted key to ensure configurations are only merged if they are equal\r\n\t\treturn { state: 0, alt: e.hashCode() };\r\n\t}\r\n\r\n\t@Override\r\n\tprotected canMerge(left: ATNConfig, leftKey: { state: number, alt: number }, right: ATNConfig): boolean {\r\n\t\treturn left.equals(right);\r\n\t}\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:29.1083066-07:00\r\n\r\nimport { AcceptStateInfo } from \"../dfa/AcceptStateInfo\";\r\nimport { ActionTransition } from \"./ActionTransition\";\r\nimport { ATN } from \"./ATN\";\r\nimport { ATNConfig } from \"./ATNConfig\";\r\nimport { ATNConfigSet } from \"./ATNConfigSet\";\r\nimport { ATNSimulator } from \"./ATNSimulator\";\r\nimport { ATNState } from \"./ATNState\";\r\nimport { CharStream } from \"../CharStream\";\r\nimport { DFA } from \"../dfa/DFA\";\r\nimport { DFAState } from \"../dfa/DFAState\";\r\nimport { Interval } from \"../misc/Interval\";\r\nimport { IntStream } from \"../IntStream\";\r\nimport { Lexer } from \"../Lexer\";\r\nimport { LexerActionExecutor } from \"./LexerActionExecutor\";\r\nimport { LexerNoViableAltException } from \"../LexerNoViableAltException\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { OrderedATNConfigSet } from \"./OrderedATNConfigSet\";\r\nimport { PredictionContext } from \"./PredictionContext\";\r\nimport { PredicateTransition } from \"./PredicateTransition\";\r\nimport { RuleStopState } from \"./RuleStopState\";\r\nimport { RuleTransition } from \"./RuleTransition\";\r\nimport { Token } from \"../Token\";\r\nimport { Transition } from \"./Transition\";\r\nimport { TransitionType } from \"./TransitionType\";\r\nimport * as assert from \"assert\";\r\n\r\n/** \"dup\" of ParserInterpreter */\r\nexport class LexerATNSimulator extends ATNSimulator {\r\n\tpublic optimize_tail_calls: boolean = true;\r\n\r\n\tprotected recog: Lexer | undefined;\r\n\r\n\t/** The current token's starting index into the character stream.\r\n\t * Shared across DFA to ATN simulation in case the ATN fails and the\r\n\t * DFA did not have a previous accept state. In this case, we use the\r\n\t * ATN-generated exception object.\r\n\t */\r\n\tprotected startIndex: number = -1;\r\n\r\n\t/** line number 1..n within the input */\r\n\tprivate _line: number = 1;\r\n\r\n\t/** The index of the character relative to the beginning of the line 0..n-1 */\r\n\tprivate _charPositionInLine: number = 0;\r\n\r\n\tprotected mode: number = Lexer.DEFAULT_MODE;\r\n\r\n\t/** Used during DFA/ATN exec to record the most recent accept configuration info */\r\n\t@NotNull\r\n\tprotected prevAccept: LexerATNSimulator.SimState = new LexerATNSimulator.SimState();\r\n\r\n\tconstructor(/*@NotNull*/ atn: ATN);\r\n\tconstructor(/*@NotNull*/ atn: ATN, recog: Lexer | undefined);\r\n\tconstructor(@NotNull atn: ATN, recog?: Lexer) {\r\n\t\tsuper(atn);\r\n\t\tthis.recog = recog;\r\n\t}\r\n\r\n\tpublic copyState(@NotNull simulator: LexerATNSimulator): void {\r\n\t\tthis._charPositionInLine = simulator.charPositionInLine;\r\n\t\tthis._line = simulator._line;\r\n\t\tthis.mode = simulator.mode;\r\n\t\tthis.startIndex = simulator.startIndex;\r\n\t}\r\n\r\n\tpublic match(@NotNull input: CharStream, mode: number): number {\r\n\t\tthis.mode = mode;\r\n\t\tlet mark: number = input.mark();\r\n\t\ttry {\r\n\t\t\tthis.startIndex = input.index;\r\n\t\t\tthis.prevAccept.reset();\r\n\t\t\tlet s0: DFAState | undefined = this.atn.modeToDFA[mode].s0;\r\n\t\t\tif (s0 == null) {\r\n\t\t\t\treturn this.matchATN(input);\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\treturn this.execATN(input, s0);\r\n\t\t\t}\r\n\t\t}\r\n\t\tfinally {\r\n\t\t\tinput.release(mark);\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\tpublic reset(): void {\r\n\t\tthis.prevAccept.reset();\r\n\t\tthis.startIndex = -1;\r\n\t\tthis._line = 1;\r\n\t\tthis._charPositionInLine = 0;\r\n\t\tthis.mode = Lexer.DEFAULT_MODE;\r\n\t}\r\n\r\n\tprotected matchATN(@NotNull input: CharStream): number {\r\n\t\tlet startState: ATNState = this.atn.modeToStartState[this.mode];\r\n\r\n\t\tif (LexerATNSimulator.debug) {\r\n\t\t\tconsole.log(`matchATN mode ${this.mode} start: ${startState}`);\r\n\t\t}\r\n\r\n\t\tlet old_mode: number = this.mode;\r\n\r\n\t\tlet s0_closure: ATNConfigSet = this.computeStartState(input, startState);\r\n\t\tlet suppressEdge: boolean = s0_closure.hasSemanticContext;\r\n\t\tif (suppressEdge) {\r\n\t\t\ts0_closure.hasSemanticContext = false;\r\n\t\t}\r\n\r\n\t\tlet next: DFAState = this.addDFAState(s0_closure);\r\n\t\tif (!suppressEdge) {\r\n\t\t\tlet dfa = this.atn.modeToDFA[this.mode];\r\n\t\t\tif (!dfa.s0) {\r\n\t\t\t\tdfa.s0 = next;\r\n\t\t\t} else {\r\n\t\t\t\tnext = dfa.s0;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet predict: number = this.execATN(input, next);\r\n\r\n\t\tif (LexerATNSimulator.debug) {\r\n\t\t\tconsole.log(`DFA after matchATN: ${this.atn.modeToDFA[old_mode].toLexerString()}`);\r\n\t\t}\r\n\r\n\t\treturn predict;\r\n\t}\r\n\r\n\tprotected execATN(@NotNull input: CharStream, @NotNull ds0: DFAState): number {\r\n\t\t// console.log(\"enter exec index \"+input.index+\" from \"+ds0.configs);\r\n\t\tif (LexerATNSimulator.debug) {\r\n\t\t\tconsole.log(`start state closure=${ds0.configs}`);\r\n\t\t}\r\n\r\n\t\tif (ds0.isAcceptState) {\r\n\t\t\t// allow zero-length tokens\r\n\t\t\tthis.captureSimState(this.prevAccept, input, ds0);\r\n\t\t}\r\n\r\n\t\tlet t: number = input.LA(1);\r\n\t\t// @NotNull\r\n\t\tlet s: DFAState = ds0; // s is current/from DFA state\r\n\r\n\t\twhile (true) { // while more work\r\n\t\t\tif (LexerATNSimulator.debug) {\r\n\t\t\t\tconsole.log(`execATN loop starting closure: ${s.configs}`);\r\n\t\t\t}\r\n\r\n\t\t\t// As we move src->trg, src->trg, we keep track of the previous trg to\r\n\t\t\t// avoid looking up the DFA state again, which is expensive.\r\n\t\t\t// If the previous target was already part of the DFA, we might\r\n\t\t\t// be able to avoid doing a reach operation upon t. If s!=null,\r\n\t\t\t// it means that semantic predicates didn't prevent us from\r\n\t\t\t// creating a DFA state. Once we know s!=null, we check to see if\r\n\t\t\t// the DFA state has an edge already for t. If so, we can just reuse\r\n\t\t\t// it's configuration set; there's no point in re-computing it.\r\n\t\t\t// This is kind of like doing DFA simulation within the ATN\r\n\t\t\t// simulation because DFA simulation is really just a way to avoid\r\n\t\t\t// computing reach/closure sets. Technically, once we know that\r\n\t\t\t// we have a previously added DFA state, we could jump over to\r\n\t\t\t// the DFA simulator. But, that would mean popping back and forth\r\n\t\t\t// a lot and making things more complicated algorithmically.\r\n\t\t\t// This optimization makes a lot of sense for loops within DFA.\r\n\t\t\t// A character will take us back to an existing DFA state\r\n\t\t\t// that already has lots of edges out of it. e.g., .* in comments.\r\n\t\t\tlet target: DFAState | undefined = this.getExistingTargetState(s, t);\r\n\t\t\tif (target == null) {\r\n\t\t\t\ttarget = this.computeTargetState(input, s, t);\r\n\t\t\t}\r\n\r\n\t\t\tif (target === ATNSimulator.ERROR) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\t// If this is a consumable input element, make sure to consume before\r\n\t\t\t// capturing the accept state so the input index, line, and char\r\n\t\t\t// position accurately reflect the state of the interpreter at the\r\n\t\t\t// end of the token.\r\n\t\t\tif (t !== IntStream.EOF) {\r\n\t\t\t\tthis.consume(input);\r\n\t\t\t}\r\n\r\n\t\t\tif (target.isAcceptState) {\r\n\t\t\t\tthis.captureSimState(this.prevAccept, input, target);\r\n\t\t\t\tif (t === IntStream.EOF) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tt = input.LA(1);\r\n\t\t\ts = target; // flip; current DFA target becomes new src/from state\r\n\t\t}\r\n\r\n\t\treturn this.failOrAccept(this.prevAccept, input, s.configs, t);\r\n\t}\r\n\r\n\t/**\r\n\t * Get an existing target state for an edge in the DFA. If the target state\r\n\t * for the edge has not yet been computed or is otherwise not available,\r\n\t * this method returns `undefined`.\r\n\t *\r\n\t * @param s The current DFA state\r\n\t * @param t The next input symbol\r\n\t * @returns The existing target DFA state for the given input symbol\r\n\t * `t`, or `undefined` if the target state for this edge is not\r\n\t * already cached\r\n\t */\r\n\tprotected getExistingTargetState(@NotNull s: DFAState, t: number): DFAState | undefined {\r\n\t\tlet target: DFAState | undefined = s.getTarget(t);\r\n\t\tif (LexerATNSimulator.debug && target != null) {\r\n\t\t\tconsole.log(\"reuse state \" + s.stateNumber +\r\n\t\t\t\t\" edge to \" + target.stateNumber);\r\n\t\t}\r\n\r\n\t\treturn target;\r\n\t}\r\n\r\n\t/**\r\n\t * Compute a target state for an edge in the DFA, and attempt to add the\r\n\t * computed state and corresponding edge to the DFA.\r\n\t *\r\n\t * @param input The input stream\r\n\t * @param s The current DFA state\r\n\t * @param t The next input symbol\r\n\t *\r\n\t * @returns The computed target DFA state for the given input symbol\r\n\t * `t`. If `t` does not lead to a valid DFA state, this method\r\n\t * returns {@link #ERROR}.\r\n\t */\r\n\t@NotNull\r\n\tprotected computeTargetState(@NotNull input: CharStream, @NotNull s: DFAState, t: number): DFAState {\r\n\t\tlet reach: ATNConfigSet = new OrderedATNConfigSet();\r\n\r\n\t\t// if we don't find an existing DFA state\r\n\t\t// Fill reach starting from closure, following t transitions\r\n\t\tthis.getReachableConfigSet(input, s.configs, reach, t);\r\n\r\n\t\tif (reach.isEmpty) { // we got nowhere on t from s\r\n\t\t\tif (!reach.hasSemanticContext) {\r\n\t\t\t\t// we got nowhere on t, don't throw out this knowledge; it'd\r\n\t\t\t\t// cause a failover from DFA later.\r\n\t\t\t\tthis.addDFAEdge(s, t, ATNSimulator.ERROR);\r\n\t\t\t}\r\n\r\n\t\t\t// stop when we can't match any more char\r\n\t\t\treturn ATNSimulator.ERROR;\r\n\t\t}\r\n\r\n\t\t// Add an edge from s to target DFA found/created for reach\r\n\t\treturn this.addDFAEdge(s, t, reach);\r\n\t}\r\n\r\n\tprotected failOrAccept(\r\n\t\tprevAccept: LexerATNSimulator.SimState, input: CharStream,\r\n\t\treach: ATNConfigSet, t: number): number {\r\n\t\tif (prevAccept.dfaState != null) {\r\n\t\t\tlet lexerActionExecutor: LexerActionExecutor | undefined = prevAccept.dfaState.lexerActionExecutor;\r\n\t\t\tthis.accept(input, lexerActionExecutor, this.startIndex,\r\n\t\t\t\tprevAccept.index, prevAccept.line, prevAccept.charPos);\r\n\t\t\treturn prevAccept.dfaState.prediction;\r\n\t\t}\r\n\t\telse {\r\n\t\t\t// if no accept and EOF is first char, return EOF\r\n\t\t\tif (t === IntStream.EOF && input.index === this.startIndex) {\r\n\t\t\t\treturn Token.EOF;\r\n\t\t\t}\r\n\r\n\t\t\tthrow new LexerNoViableAltException(this.recog, input, this.startIndex, reach);\r\n\t\t}\r\n\t}\r\n\r\n\t/** Given a starting configuration set, figure out all ATN configurations\r\n\t * we can reach upon input `t`. Parameter `reach` is a return\r\n\t * parameter.\r\n\t */\r\n\tprotected getReachableConfigSet(@NotNull input: CharStream, @NotNull closure: ATNConfigSet, @NotNull reach: ATNConfigSet, t: number): void {\r\n\t\t// this is used to skip processing for configs which have a lower priority\r\n\t\t// than a config that already reached an accept state for the same rule\r\n\t\tlet skipAlt: number = ATN.INVALID_ALT_NUMBER;\r\n\t\tfor (let c of closure) {\r\n\t\t\tlet currentAltReachedAcceptState: boolean = c.alt === skipAlt;\r\n\t\t\tif (currentAltReachedAcceptState && c.hasPassedThroughNonGreedyDecision) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tif (LexerATNSimulator.debug) {\r\n\t\t\t\tconsole.log(`testing ${this.getTokenName(t)} at ${c.toString(this.recog, true)}`);\r\n\t\t\t}\r\n\r\n\t\t\tlet n: number = c.state.numberOfOptimizedTransitions;\r\n\t\t\tfor (let ti = 0; ti < n; ti++) { // for each optimized transition\r\n\t\t\t\tlet trans: Transition = c.state.getOptimizedTransition(ti);\r\n\t\t\t\tlet target: ATNState | undefined = this.getReachableTarget(trans, t);\r\n\t\t\t\tif (target != null) {\r\n\t\t\t\t\tlet lexerActionExecutor: LexerActionExecutor | undefined = c.lexerActionExecutor;\r\n\t\t\t\t\tlet config: ATNConfig;\r\n\t\t\t\t\tif (lexerActionExecutor != null) {\r\n\t\t\t\t\t\tlexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.index - this.startIndex);\r\n\t\t\t\t\t\tconfig = c.transform(target, true, lexerActionExecutor);\r\n\t\t\t\t\t} else {\r\n\t\t\t\t\t\tassert(c.lexerActionExecutor == null);\r\n\t\t\t\t\t\tconfig = c.transform(target, true);\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tlet treatEofAsEpsilon: boolean = t === IntStream.EOF;\r\n\t\t\t\t\tif (this.closure(input, config, reach, currentAltReachedAcceptState, true, treatEofAsEpsilon)) {\r\n\t\t\t\t\t\t// any remaining configs for this alt have a lower priority than\r\n\t\t\t\t\t\t// the one that just reached an accept state.\r\n\t\t\t\t\t\tskipAlt = c.alt;\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\tprotected accept(\r\n\t\t@NotNull input: CharStream, lexerActionExecutor: LexerActionExecutor | undefined,\r\n\t\tstartIndex: number, index: number, line: number, charPos: number): void {\r\n\t\tif (LexerATNSimulator.debug) {\r\n\t\t\tconsole.log(`ACTION ${lexerActionExecutor}`);\r\n\t\t}\r\n\r\n\t\t// seek to after last char in token\r\n\t\tinput.seek(index);\r\n\t\tthis._line = line;\r\n\t\tthis._charPositionInLine = charPos;\r\n\r\n\t\tif (lexerActionExecutor != null && this.recog != null) {\r\n\t\t\tlexerActionExecutor.execute(this.recog, input, startIndex);\r\n\t\t}\r\n\t}\r\n\r\n\tprotected getReachableTarget(trans: Transition, t: number): ATNState | undefined {\r\n\t\tif (trans.matches(t, Lexer.MIN_CHAR_VALUE, Lexer.MAX_CHAR_VALUE)) {\r\n\t\t\treturn trans.target;\r\n\t\t}\r\n\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\t@NotNull\r\n\tprotected computeStartState(\r\n\t\t@NotNull input: CharStream,\r\n\t\t@NotNull p: ATNState): ATNConfigSet {\r\n\t\tlet initialContext: PredictionContext = PredictionContext.EMPTY_FULL;\r\n\t\tlet configs: ATNConfigSet = new OrderedATNConfigSet();\r\n\t\tfor (let i = 0; i < p.numberOfTransitions; i++) {\r\n\t\t\tlet target: ATNState = p.transition(i).target;\r\n\t\t\tlet c: ATNConfig = ATNConfig.create(target, i + 1, initialContext);\r\n\t\t\tthis.closure(input, c, configs, false, false, false);\r\n\t\t}\r\n\t\treturn configs;\r\n\t}\r\n\r\n\t/**\r\n\t * Since the alternatives within any lexer decision are ordered by\r\n\t * preference, this method stops pursuing the closure as soon as an accept\r\n\t * state is reached. After the first accept state is reached by depth-first\r\n\t * search from `config`, all other (potentially reachable) states for\r\n\t * this rule would have a lower priority.\r\n\t *\r\n\t * @returns `true` if an accept state is reached, otherwise\r\n\t * `false`.\r\n\t */\r\n\tprotected closure(@NotNull input: CharStream, @NotNull config: ATNConfig, @NotNull configs: ATNConfigSet, currentAltReachedAcceptState: boolean, speculative: boolean, treatEofAsEpsilon: boolean): boolean {\r\n\t\tif (LexerATNSimulator.debug) {\r\n\t\t\tconsole.log(\"closure(\" + config.toString(this.recog, true) + \")\");\r\n\t\t}\r\n\r\n\t\tif (config.state instanceof RuleStopState) {\r\n\t\t\tif (LexerATNSimulator.debug) {\r\n\t\t\t\tif (this.recog != null) {\r\n\t\t\t\t\tconsole.log(`closure at ${this.recog.ruleNames[config.state.ruleIndex]} rule stop ${config}`);\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\tconsole.log(`closure at rule stop ${config}`);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tlet context: PredictionContext = config.context;\r\n\t\t\tif (context.isEmpty) {\r\n\t\t\t\tconfigs.add(config);\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t\telse if (context.hasEmpty) {\r\n\t\t\t\tconfigs.add(config.transform(config.state, true, PredictionContext.EMPTY_FULL));\r\n\t\t\t\tcurrentAltReachedAcceptState = true;\r\n\t\t\t}\r\n\r\n\t\t\tfor (let i = 0; i < context.size; i++) {\r\n\t\t\t\tlet returnStateNumber: number = context.getReturnState(i);\r\n\t\t\t\tif (returnStateNumber === PredictionContext.EMPTY_FULL_STATE_KEY) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet newContext: PredictionContext = context.getParent(i); // \"pop\" return state\r\n\t\t\t\tlet returnState: ATNState = this.atn.states[returnStateNumber];\r\n\t\t\t\tlet c: ATNConfig = config.transform(returnState, false, newContext);\r\n\t\t\t\tcurrentAltReachedAcceptState = this.closure(input, c, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon);\r\n\t\t\t}\r\n\r\n\t\t\treturn currentAltReachedAcceptState;\r\n\t\t}\r\n\r\n\t\t// optimization\r\n\t\tif (!config.state.onlyHasEpsilonTransitions) {\r\n\t\t\tif (!currentAltReachedAcceptState || !config.hasPassedThroughNonGreedyDecision) {\r\n\t\t\t\tconfigs.add(config);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet p: ATNState = config.state;\r\n\t\tfor (let i = 0; i < p.numberOfOptimizedTransitions; i++) {\r\n\t\t\tlet t: Transition = p.getOptimizedTransition(i);\r\n\t\t\tlet c: ATNConfig | undefined = this.getEpsilonTarget(input, config, t, configs, speculative, treatEofAsEpsilon);\r\n\t\t\tif (c != null) {\r\n\t\t\t\tcurrentAltReachedAcceptState = this.closure(input, c, configs, currentAltReachedAcceptState, speculative, treatEofAsEpsilon);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn currentAltReachedAcceptState;\r\n\t}\r\n\r\n\t// side-effect: can alter configs.hasSemanticContext\r\n\tprotected getEpsilonTarget(\r\n\t\t@NotNull input: CharStream,\r\n\t\t@NotNull config: ATNConfig,\r\n\t\t@NotNull t: Transition,\r\n\t\t@NotNull configs: ATNConfigSet,\r\n\t\tspeculative: boolean,\r\n\t\ttreatEofAsEpsilon: boolean): ATNConfig | undefined {\r\n\t\tlet c: ATNConfig | undefined;\r\n\r\n\t\tswitch (t.serializationType) {\r\n\t\tcase TransitionType.RULE:\r\n\t\t\tlet ruleTransition: RuleTransition = t as RuleTransition;\r\n\t\t\tif (this.optimize_tail_calls && ruleTransition.optimizedTailCall && !config.context.hasEmpty) {\r\n\t\t\t\tc = config.transform(t.target, true);\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tlet newContext: PredictionContext = config.context.getChild(ruleTransition.followState.stateNumber);\r\n\t\t\t\tc = config.transform(t.target, true, newContext);\r\n\t\t\t}\r\n\r\n\t\t\tbreak;\r\n\r\n\t\tcase TransitionType.PRECEDENCE:\r\n\t\t\tthrow new Error(\"Precedence predicates are not supported in lexers.\");\r\n\r\n\t\tcase TransitionType.PREDICATE:\r\n\t\t\t/* Track traversing semantic predicates. If we traverse,\r\n\t\t\t\twe cannot add a DFA state for this \"reach\" computation\r\n\t\t\t\tbecause the DFA would not test the predicate again in the\r\n\t\t\t\tfuture. Rather than creating collections of semantic predicates\r\n\t\t\t\tlike v3 and testing them on prediction, v4 will test them on the\r\n\t\t\t\tfly all the time using the ATN not the DFA. This is slower but\r\n\t\t\t\tsemantically it's not used that often. One of the key elements to\r\n\t\t\t\tthis predicate mechanism is not adding DFA states that see\r\n\t\t\t\tpredicates immediately afterwards in the ATN. For example,\r\n\r\n\t\t\t\ta : ID {p1}? | ID {p2}? ;\r\n\r\n\t\t\t\tshould create the start state for rule 'a' (to save start state\r\n\t\t\t\tcompetition), but should not create target of ID state. The\r\n\t\t\t\tcollection of ATN states the following ID references includes\r\n\t\t\t\tstates reached by traversing predicates. Since this is when we\r\n\t\t\t\ttest them, we cannot cash the DFA state target of ID.\r\n\t\t\t*/\r\n\t\t\tlet pt: PredicateTransition = t as PredicateTransition;\r\n\t\t\tif (LexerATNSimulator.debug) {\r\n\t\t\t\tconsole.log(\"EVAL rule \" + pt.ruleIndex + \":\" + pt.predIndex);\r\n\t\t\t}\r\n\t\t\tconfigs.hasSemanticContext = true;\r\n\t\t\tif (this.evaluatePredicate(input, pt.ruleIndex, pt.predIndex, speculative)) {\r\n\t\t\t\tc = config.transform(t.target, true);\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tc = undefined;\r\n\t\t\t}\r\n\r\n\t\t\tbreak;\r\n\r\n\t\tcase TransitionType.ACTION:\r\n\t\t\tif (config.context.hasEmpty) {\r\n\t\t\t\t// execute actions anywhere in the start rule for a token.\r\n\t\t\t\t//\r\n\t\t\t\t// TODO: if the entry rule is invoked recursively, some\r\n\t\t\t\t// actions may be executed during the recursive call. The\r\n\t\t\t\t// problem can appear when hasEmpty is true but\r\n\t\t\t\t// isEmpty is false. In this case, the config needs to be\r\n\t\t\t\t// split into two contexts - one with just the empty path\r\n\t\t\t\t// and another with everything but the empty path.\r\n\t\t\t\t// Unfortunately, the current algorithm does not allow\r\n\t\t\t\t// getEpsilonTarget to return two configurations, so\r\n\t\t\t\t// additional modifications are needed before we can support\r\n\t\t\t\t// the split operation.\r\n\t\t\t\tlet lexerActionExecutor: LexerActionExecutor = LexerActionExecutor.append(config.lexerActionExecutor, this.atn.lexerActions[(t as ActionTransition).actionIndex]);\r\n\t\t\t\tc = config.transform(t.target, true, lexerActionExecutor);\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\t// ignore actions in referenced rules\r\n\t\t\t\tc = config.transform(t.target, true);\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\tcase TransitionType.EPSILON:\r\n\t\t\tc = config.transform(t.target, true);\r\n\t\t\tbreak;\r\n\r\n\t\tcase TransitionType.ATOM:\r\n\t\tcase TransitionType.RANGE:\r\n\t\tcase TransitionType.SET:\r\n\t\t\tif (treatEofAsEpsilon) {\r\n\t\t\t\tif (t.matches(IntStream.EOF, Lexer.MIN_CHAR_VALUE, Lexer.MAX_CHAR_VALUE)) {\r\n\t\t\t\t\tc = config.transform(t.target, false);\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tc = undefined;\r\n\t\t\tbreak;\r\n\r\n\t\tdefault:\r\n\t\t\tc = undefined;\r\n\t\t\tbreak;\r\n\t\t}\r\n\r\n\t\treturn c;\r\n\t}\r\n\r\n\t/**\r\n\t * Evaluate a predicate specified in the lexer.\r\n\t *\r\n\t * If `speculative` is `true`, this method was called before\r\n\t * {@link #consume} for the matched character. This method should call\r\n\t * {@link #consume} before evaluating the predicate to ensure position\r\n\t * sensitive values, including {@link Lexer#getText}, {@link Lexer#getLine},\r\n\t * and {@link Lexer#getCharPositionInLine}, properly reflect the current\r\n\t * lexer state. This method should restore `input` and the simulator\r\n\t * to the original state before returning (i.e. undo the actions made by the\r\n\t * call to {@link #consume}.\r\n\t *\r\n\t * @param input The input stream.\r\n\t * @param ruleIndex The rule containing the predicate.\r\n\t * @param predIndex The index of the predicate within the rule.\r\n\t * @param speculative `true` if the current index in `input` is\r\n\t * one character before the predicate's location.\r\n\t *\r\n\t * @returns `true` if the specified predicate evaluates to\r\n\t * `true`.\r\n\t */\r\n\tprotected evaluatePredicate(@NotNull input: CharStream, ruleIndex: number, predIndex: number, speculative: boolean): boolean {\r\n\t\t// assume true if no recognizer was provided\r\n\t\tif (this.recog == null) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\tif (!speculative) {\r\n\t\t\treturn this.recog.sempred(undefined, ruleIndex, predIndex);\r\n\t\t}\r\n\r\n\t\tlet savedCharPositionInLine: number = this._charPositionInLine;\r\n\t\tlet savedLine: number = this._line;\r\n\t\tlet index: number = input.index;\r\n\t\tlet marker: number = input.mark();\r\n\t\ttry {\r\n\t\t\tthis.consume(input);\r\n\t\t\treturn this.recog.sempred(undefined, ruleIndex, predIndex);\r\n\t\t}\r\n\t\tfinally {\r\n\t\t\tthis._charPositionInLine = savedCharPositionInLine;\r\n\t\t\tthis._line = savedLine;\r\n\t\t\tinput.seek(index);\r\n\t\t\tinput.release(marker);\r\n\t\t}\r\n\t}\r\n\r\n\tprotected captureSimState(\r\n\t\t@NotNull settings: LexerATNSimulator.SimState,\r\n\t\t@NotNull input: CharStream,\r\n\t\t@NotNull dfaState: DFAState): void {\r\n\t\tsettings.index = input.index;\r\n\t\tsettings.line = this._line;\r\n\t\tsettings.charPos = this._charPositionInLine;\r\n\t\tsettings.dfaState = dfaState;\r\n\t}\r\n\r\n\t// @NotNull\r\n\tprotected addDFAEdge(/*@NotNull*/ p: DFAState, t: number, /*@NotNull*/ q: ATNConfigSet): DFAState;\r\n\tprotected addDFAEdge(/*@NotNull*/ p: DFAState, t: number, /*@NotNull*/ q: DFAState): void;\r\n\tprotected addDFAEdge(p: DFAState, t: number, q: ATNConfigSet | DFAState): DFAState | void {\r\n\t\tif (q instanceof ATNConfigSet) {\r\n\t\t\t/* leading to this call, ATNConfigSet.hasSemanticContext is used as a\r\n\t\t\t* marker indicating dynamic predicate evaluation makes this edge\r\n\t\t\t* dependent on the specific input sequence, so the static edge in the\r\n\t\t\t* DFA should be omitted. The target DFAState is still created since\r\n\t\t\t* execATN has the ability to resynchronize with the DFA state cache\r\n\t\t\t* following the predicate evaluation step.\r\n\t\t\t*\r\n\t\t\t* TJP notes: next time through the DFA, we see a pred again and eval.\r\n\t\t\t* If that gets us to a previously created (but dangling) DFA\r\n\t\t\t* state, we can continue in pure DFA mode from there.\r\n\t\t\t*/\r\n\t\t\tlet suppressEdge: boolean = q.hasSemanticContext;\r\n\t\t\tif (suppressEdge) {\r\n\t\t\t\tq.hasSemanticContext = false;\r\n\t\t\t}\r\n\r\n\t\t\t// @NotNull\r\n\t\t\tlet to: DFAState = this.addDFAState(q);\r\n\r\n\t\t\tif (suppressEdge) {\r\n\t\t\t\treturn to;\r\n\t\t\t}\r\n\r\n\t\t\tthis.addDFAEdge(p, t, to);\r\n\t\t\treturn to;\r\n\t\t} else {\r\n\t\t\tif (LexerATNSimulator.debug) {\r\n\t\t\t\tconsole.log(\"EDGE \" + p + \" -> \" + q + \" upon \" + String.fromCharCode(t));\r\n\t\t\t}\r\n\r\n\t\t\tif (p != null) {\r\n\t\t\t\tp.setTarget(t, q);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t/** Add a new DFA state if there isn't one with this set of\r\n\t * \tconfigurations already. This method also detects the first\r\n\t * \tconfiguration containing an ATN rule stop state. Later, when\r\n\t * \ttraversing the DFA, we will know which rule to accept.\r\n\t */\r\n\t@NotNull\r\n\tprotected addDFAState(@NotNull configs: ATNConfigSet): DFAState {\r\n\t\t/* the lexer evaluates predicates on-the-fly; by this point configs\r\n\t\t * should not contain any configurations with unevaluated predicates.\r\n\t\t */\r\n\t\tassert(!configs.hasSemanticContext);\r\n\r\n\t\tlet proposed: DFAState = new DFAState(configs);\r\n\t\tlet existing: DFAState | undefined = this.atn.modeToDFA[this.mode].states.get(proposed);\r\n\t\tif (existing != null) {\r\n\t\t\treturn existing;\r\n\t\t}\r\n\r\n\t\tconfigs.optimizeConfigs(this);\r\n\t\tlet newState: DFAState = new DFAState(configs.clone(true));\r\n\r\n\t\tlet firstConfigWithRuleStopState: ATNConfig | undefined;\r\n\t\tfor (let c of configs) {\r\n\t\t\tif (c.state instanceof RuleStopState) {\r\n\t\t\t\tfirstConfigWithRuleStopState = c;\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (firstConfigWithRuleStopState != null) {\r\n\t\t\tlet prediction: number = this.atn.ruleToTokenType[firstConfigWithRuleStopState.state.ruleIndex];\r\n\t\t\tlet lexerActionExecutor: LexerActionExecutor | undefined = firstConfigWithRuleStopState.lexerActionExecutor;\r\n\t\t\tnewState.acceptStateInfo = new AcceptStateInfo(prediction, lexerActionExecutor);\r\n\t\t}\r\n\r\n\t\treturn this.atn.modeToDFA[this.mode].addState(newState);\r\n\t}\r\n\r\n\t@NotNull\r\n\tpublic getDFA(mode: number): DFA {\r\n\t\treturn this.atn.modeToDFA[mode];\r\n\t}\r\n\r\n\t/** Get the text matched so far for the current token.\r\n\t */\r\n\t@NotNull\r\n\tpublic getText(@NotNull input: CharStream): string {\r\n\t\t// index is first lookahead char, don't include.\r\n\t\treturn input.getText(Interval.of(this.startIndex, input.index - 1));\r\n\t}\r\n\r\n\tget line(): number {\r\n\t\treturn this._line;\r\n\t}\r\n\r\n\tset line(line: number) {\r\n\t\tthis._line = line;\r\n\t}\r\n\r\n\tget charPositionInLine(): number {\r\n\t\treturn this._charPositionInLine;\r\n\t}\r\n\r\n\tset charPositionInLine(charPositionInLine: number) {\r\n\t\tthis._charPositionInLine = charPositionInLine;\r\n\t}\r\n\r\n\tpublic consume(@NotNull input: CharStream): void {\r\n\t\tlet curChar: number = input.LA(1);\r\n\t\tif (curChar === \"\\n\".charCodeAt(0)) {\r\n\t\t\tthis._line++;\r\n\t\t\tthis._charPositionInLine = 0;\r\n\t\t} else {\r\n\t\t\tthis._charPositionInLine++;\r\n\t\t}\r\n\t\tinput.consume();\r\n\t}\r\n\r\n\t@NotNull\r\n\tpublic getTokenName(t: number): string {\r\n\t\tif (t === -1) {\r\n\t\t\treturn \"EOF\";\r\n\t\t}\r\n\t\t//if ( atn.g!=null ) return atn.g.getTokenDisplayName(t);\r\n\t\treturn \"'\" + String.fromCharCode(t) + \"'\";\r\n\t}\r\n}\r\n\r\nexport namespace LexerATNSimulator {\r\n\texport const debug: boolean = false;\r\n\texport const dfa_debug: boolean = false;\r\n\r\n\t/** When we hit an accept state in either the DFA or the ATN, we\r\n\t * have to notify the character stream to start buffering characters\r\n\t * via {@link IntStream#mark} and record the current state. The current sim state\r\n\t * includes the current index into the input, the current line,\r\n\t * and current character position in that line. Note that the Lexer is\r\n\t * tracking the starting line and characterization of the token. These\r\n\t * variables track the \"state\" of the simulator when it hits an accept state.\r\n\t *\r\n\t * We track these variables separately for the DFA and ATN simulation\r\n\t * because the DFA simulation often has to fail over to the ATN\r\n\t * simulation. If the ATN simulation fails, we need the DFA to fall\r\n\t * back to its previously accepted state, if any. If the ATN succeeds,\r\n\t * then the ATN does the accept and the DFA simulator that invoked it\r\n\t * can simply return the predicted token type.\r\n\t */\r\n\texport class SimState {\r\n\t\tpublic index: number = -1;\r\n\t\tpublic line: number = 0;\r\n\t\tpublic charPos: number = -1;\r\n\t\tpublic dfaState?: DFAState;\r\n\r\n\t\tpublic reset(): void {\r\n\t\t\tthis.index = -1;\r\n\t\t\tthis.line = 0;\r\n\t\t\tthis.charPos = -1;\r\n\t\t\tthis.dfaState = undefined;\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:51.7913318-07:00\r\n\r\nimport { ANTLRErrorListener } from \"./ANTLRErrorListener\";\r\nimport { CharStream } from \"./CharStream\";\r\nimport { CommonTokenFactory } from \"./CommonTokenFactory\";\r\nimport { IntegerStack } from \"./misc/IntegerStack\";\r\nimport { Interval } from \"./misc/Interval\";\r\nimport { IntStream } from \"./IntStream\";\r\nimport { LexerATNSimulator } from \"./atn/LexerATNSimulator\";\r\nimport { LexerNoViableAltException } from \"./LexerNoViableAltException\";\r\nimport { Override } from \"./Decorators\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { Recognizer } from \"./Recognizer\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenFactory } from \"./TokenFactory\";\r\nimport { TokenSource } from \"./TokenSource\";\r\n\r\n/** A lexer is recognizer that draws input symbols from a character stream.\r\n * lexer grammars result in a subclass of this object. A Lexer object\r\n * uses simplified match() and error recovery mechanisms in the interest\r\n * of speed.\r\n */\r\nexport abstract class Lexer extends Recognizer\r\n\timplements TokenSource {\r\n\tpublic static readonly DEFAULT_MODE: number = 0;\r\n\tpublic static readonly MORE: number = -2;\r\n\tpublic static readonly SKIP: number = -3;\r\n\r\n\tstatic get DEFAULT_TOKEN_CHANNEL(): number {\r\n\t\treturn Token.DEFAULT_CHANNEL;\r\n\t}\r\n\r\n\tstatic get HIDDEN(): number {\r\n\t\treturn Token.HIDDEN_CHANNEL;\r\n\t}\r\n\r\n\tpublic static readonly MIN_CHAR_VALUE: number = 0x0000;\r\n\tpublic static readonly MAX_CHAR_VALUE: number = 0x10FFFF;\r\n\r\n\tpublic _input: CharStream;\r\n\r\n\tprotected _tokenFactorySourcePair: { source: TokenSource, stream: CharStream };\r\n\r\n\t/** How to create token objects */\r\n\tprotected _factory: TokenFactory = CommonTokenFactory.DEFAULT;\r\n\r\n\t/** The goal of all lexer rules/methods is to create a token object.\r\n\t * This is an instance variable as multiple rules may collaborate to\r\n\t * create a single token. nextToken will return this object after\r\n\t * matching lexer rule(s). If you subclass to allow multiple token\r\n\t * emissions, then set this to the last token to be matched or\r\n\t * something non-undefined so that the auto token emit mechanism will not\r\n\t * emit another token.\r\n\t */\r\n\tpublic _token: Token | undefined;\r\n\r\n\t/** What character index in the stream did the current token start at?\r\n\t * Needed, for example, to get the text for current token. Set at\r\n\t * the start of nextToken.\r\n\t */\r\n\tpublic _tokenStartCharIndex: number = -1;\r\n\r\n\t/** The line on which the first character of the token resides */\r\n\tpublic _tokenStartLine: number = 0;\r\n\r\n\t/** The character position of first character within the line */\r\n\tpublic _tokenStartCharPositionInLine: number = 0;\r\n\r\n\t/** Once we see EOF on char stream, next token will be EOF.\r\n\t * If you have DONE : EOF ; then you see DONE EOF.\r\n\t */\r\n\tpublic _hitEOF: boolean = false;\r\n\r\n\t/** The channel number for the current token */\r\n\tpublic _channel: number = 0;\r\n\r\n\t/** The token type for the current token */\r\n\tpublic _type: number = 0;\r\n\r\n\tpublic readonly _modeStack: IntegerStack = new IntegerStack();\r\n\tpublic _mode: number = Lexer.DEFAULT_MODE;\r\n\r\n\t/** You can set the text for the current token to override what is in\r\n\t * the input char buffer. Set `text` or can set this instance var.\r\n\t */\r\n\tpublic _text: string | undefined;\r\n\r\n\tconstructor(input: CharStream) {\r\n\t\tsuper();\r\n\t\tthis._input = input;\r\n\t\tthis._tokenFactorySourcePair = { source: this, stream: input };\r\n\t}\r\n\r\n\tpublic reset(): void;\r\n\tpublic reset(resetInput: boolean): void;\r\n\tpublic reset(resetInput?: boolean): void {\r\n\t\t// wack Lexer state variables\r\n\t\tif (resetInput === undefined || resetInput) {\r\n\t\t\tthis._input.seek(0); // rewind the input\r\n\t\t}\r\n\r\n\t\tthis._token = undefined;\r\n\t\tthis._type = Token.INVALID_TYPE;\r\n\t\tthis._channel = Token.DEFAULT_CHANNEL;\r\n\t\tthis._tokenStartCharIndex = -1;\r\n\t\tthis._tokenStartCharPositionInLine = -1;\r\n\t\tthis._tokenStartLine = -1;\r\n\t\tthis._text = undefined;\r\n\r\n\t\tthis._hitEOF = false;\r\n\t\tthis._mode = Lexer.DEFAULT_MODE;\r\n\t\tthis._modeStack.clear();\r\n\r\n\t\tthis.interpreter.reset();\r\n\t}\r\n\r\n\t/** Return a token from this source; i.e., match a token on the char\r\n\t * stream.\r\n\t */\r\n\t@Override\r\n\tpublic nextToken(): Token {\r\n\t\tif (this._input == null) {\r\n\t\t\tthrow new Error(\"nextToken requires a non-null input stream.\");\r\n\t\t}\r\n\r\n\t\t// Mark start location in char stream so unbuffered streams are\r\n\t\t// guaranteed at least have text of current token\r\n\t\tlet tokenStartMarker: number = this._input.mark();\r\n\t\ttry {\r\n\t\t\touter:\r\n\t\t\twhile (true) {\r\n\t\t\t\tif (this._hitEOF) {\r\n\t\t\t\t\treturn this.emitEOF();\r\n\t\t\t\t}\r\n\r\n\t\t\t\tthis._token = undefined;\r\n\t\t\t\tthis._channel = Token.DEFAULT_CHANNEL;\r\n\t\t\t\tthis._tokenStartCharIndex = this._input.index;\r\n\t\t\t\tthis._tokenStartCharPositionInLine = this.interpreter.charPositionInLine;\r\n\t\t\t\tthis._tokenStartLine = this.interpreter.line;\r\n\t\t\t\tthis._text = undefined;\r\n\t\t\t\tdo {\r\n\t\t\t\t\tthis._type = Token.INVALID_TYPE;\r\n//\t\t\t\tSystem.out.println(\"nextToken line \"+tokenStartLine+\" at \"+((char)input.LA(1))+\r\n//\t\t\t\t\t\t\t\t \" in mode \"+mode+\r\n//\t\t\t\t\t\t\t\t \" at index \"+input.index);\r\n\t\t\t\t\tlet ttype: number;\r\n\t\t\t\t\ttry {\r\n\t\t\t\t\t\tttype = this.interpreter.match(this._input, this._mode);\r\n\t\t\t\t\t}\r\n\t\t\t\t\tcatch (e) {\r\n\t\t\t\t\t\tif (e instanceof LexerNoViableAltException) {\r\n\t\t\t\t\t\t\tthis.notifyListeners(e);\t\t// report error\r\n\t\t\t\t\t\t\tthis.recover(e);\r\n\t\t\t\t\t\t\tttype = Lexer.SKIP;\r\n\t\t\t\t\t\t} else {\r\n\t\t\t\t\t\t\tthrow e;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif (this._input.LA(1) === IntStream.EOF) {\r\n\t\t\t\t\t\tthis._hitEOF = true;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif (this._type === Token.INVALID_TYPE) {\r\n\t\t\t\t\t\tthis._type = ttype;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tif (this._type === Lexer.SKIP) {\r\n\t\t\t\t\t\tcontinue outer;\r\n\t\t\t\t\t}\r\n\t\t\t\t} while (this._type === Lexer.MORE);\r\n\t\t\t\tif (this._token == null) {\r\n\t\t\t\t\treturn this.emit();\r\n\t\t\t\t}\r\n\t\t\t\treturn this._token;\r\n\t\t\t}\r\n\t\t}\r\n\t\tfinally {\r\n\t\t\t// make sure we release marker after match or\r\n\t\t\t// unbuffered char stream will keep buffering\r\n\t\t\tthis._input.release(tokenStartMarker);\r\n\t\t}\r\n\t}\r\n\r\n\t/** Instruct the lexer to skip creating a token for current lexer rule\r\n\t * and look for another token. nextToken() knows to keep looking when\r\n\t * a lexer rule finishes with token set to SKIP_TOKEN. Recall that\r\n\t * if token==undefined at end of any token rule, it creates one for you\r\n\t * and emits it.\r\n\t */\r\n\tpublic skip(): void {\r\n\t\tthis._type = Lexer.SKIP;\r\n\t}\r\n\r\n\tpublic more(): void {\r\n\t\tthis._type = Lexer.MORE;\r\n\t}\r\n\r\n\tpublic mode(m: number): void {\r\n\t\tthis._mode = m;\r\n\t}\r\n\r\n\tpublic pushMode(m: number): void {\r\n\t\tif (LexerATNSimulator.debug) {\r\n\t\t\tconsole.log(\"pushMode \" + m);\r\n\t\t}\r\n\t\tthis._modeStack.push(this._mode);\r\n\t\tthis.mode(m);\r\n\t}\r\n\r\n\tpublic popMode(): number {\r\n\t\tif (this._modeStack.isEmpty) {\r\n\t\t\tthrow new Error(\"EmptyStackException\");\r\n\t\t}\r\n\t\tif (LexerATNSimulator.debug) {\r\n\t\t\tconsole.log(\"popMode back to \" + this._modeStack.peek());\r\n\t\t}\r\n\t\tthis.mode(this._modeStack.pop());\r\n\t\treturn this._mode;\r\n\t}\r\n\r\n\t@Override\r\n\tget tokenFactory(): TokenFactory {\r\n\t\treturn this._factory;\r\n\t}\r\n\r\n\t// @Override\r\n\tset tokenFactory(factory: TokenFactory) {\r\n\t\tthis._factory = factory;\r\n\t}\r\n\r\n\t@Override\r\n\tget inputStream(): CharStream {\r\n\t\treturn this._input;\r\n\t}\r\n\r\n\t/** Set the char stream and reset the lexer */\r\n\tset inputStream(input: CharStream) {\r\n\t\tthis.reset(false);\r\n\t\tthis._input = input;\r\n\t\tthis._tokenFactorySourcePair = { source: this, stream: this._input };\r\n\t}\r\n\r\n\t@Override\r\n\tget sourceName(): string {\r\n\t\treturn this._input.sourceName;\r\n\t}\r\n\r\n\r\n\t/** The standard method called to automatically emit a token at the\r\n\t * outermost lexical rule. The token object should point into the\r\n\t * char buffer start..stop. If there is a text override in 'text',\r\n\t * use that to set the token's text. Override this method to emit\r\n\t * custom Token objects or provide a new factory.\r\n\t */\r\n\tpublic emit(token: Token): Token;\r\n\r\n\t/** By default does not support multiple emits per nextToken invocation\r\n\t * for efficiency reasons. Subclass and override this method, nextToken,\r\n\t * and getToken (to push tokens into a list and pull from that list\r\n\t * rather than a single variable as this implementation does).\r\n\t */\r\n\tpublic emit(): Token;\r\n\r\n\tpublic emit(token?: Token): Token {\r\n\t\tif (!token) {\r\n\t\t\ttoken = this._factory.create(\r\n\t\t\t\tthis._tokenFactorySourcePair, this._type, this._text, this._channel,\r\n\t\t\t\tthis._tokenStartCharIndex, this.charIndex - 1, this._tokenStartLine,\r\n\t\t\t\tthis._tokenStartCharPositionInLine);\r\n\t\t}\r\n\t\tthis._token = token;\r\n\t\treturn token;\r\n\t}\r\n\r\n\tpublic emitEOF(): Token {\r\n\t\tlet cpos: number = this.charPositionInLine;\r\n\t\tlet line: number = this.line;\r\n\t\tlet eof: Token = this._factory.create(\r\n\t\t\tthis._tokenFactorySourcePair, Token.EOF, undefined,\r\n\t\t\tToken.DEFAULT_CHANNEL, this._input.index, this._input.index - 1,\r\n\t\t\tline, cpos);\r\n\t\tthis.emit(eof);\r\n\t\treturn eof;\r\n\t}\r\n\r\n\t@Override\r\n\tget line(): number {\r\n\t\treturn this.interpreter.line;\r\n\t}\r\n\r\n\tset line(line: number) {\r\n\t\tthis.interpreter.line = line;\r\n\t}\r\n\r\n\t@Override\r\n\tget charPositionInLine(): number {\r\n\t\treturn this.interpreter.charPositionInLine;\r\n\t}\r\n\r\n\tset charPositionInLine(charPositionInLine: number) {\r\n\t\tthis.interpreter.charPositionInLine = charPositionInLine;\r\n\t}\r\n\r\n\t/** What is the index of the current character of lookahead? */\r\n\tget charIndex(): number {\r\n\t\treturn this._input.index;\r\n\t}\r\n\r\n\t/** Return the text matched so far for the current token or any\r\n\t * text override.\r\n\t */\r\n\tget text(): string {\r\n\t\tif (this._text != null) {\r\n\t\t\treturn this._text;\r\n\t\t}\r\n\t\treturn this.interpreter.getText(this._input);\r\n\t}\r\n\r\n\t/** Set the complete text of this token; it wipes any previous\r\n\t * changes to the text.\r\n\t */\r\n\tset text(text: string) {\r\n\t\tthis._text = text;\r\n\t}\r\n\r\n\t/** Override if emitting multiple tokens. */\r\n\tget token(): Token | undefined { return this._token; }\r\n\r\n\tset token(_token: Token | undefined) {\r\n\t\tthis._token = _token;\r\n\t}\r\n\r\n\tset type(ttype: number) {\r\n\t\tthis._type = ttype;\r\n\t}\r\n\r\n\tget type(): number {\r\n\t\treturn this._type;\r\n\t}\r\n\r\n\tset channel(channel: number) {\r\n\t\tthis._channel = channel;\r\n\t}\r\n\r\n\tget channel(): number {\r\n\t\treturn this._channel;\r\n\t}\r\n\r\n\tpublic abstract readonly channelNames: string[];\r\n\r\n\tpublic abstract readonly modeNames: string[];\r\n\r\n\t/** Return a list of all Token objects in input char stream.\r\n\t * Forces load of all tokens. Does not include EOF token.\r\n\t */\r\n\tpublic getAllTokens(): Token[] {\r\n\t\tlet tokens: Token[] = [];\r\n\t\tlet t: Token = this.nextToken();\r\n\t\twhile (t.type !== Token.EOF) {\r\n\t\t\ttokens.push(t);\r\n\t\t\tt = this.nextToken();\r\n\t\t}\r\n\t\treturn tokens;\r\n\t}\r\n\r\n\tpublic notifyListeners(e: LexerNoViableAltException): void {\r\n\t\tlet text: string = this._input.getText(\r\n\t\t\tInterval.of(this._tokenStartCharIndex, this._input.index));\r\n\t\tlet msg: string = \"token recognition error at: '\" +\r\n\t\t\tthis.getErrorDisplay(text) + \"'\";\r\n\r\n\t\tlet listener: ANTLRErrorListener = this.getErrorListenerDispatch();\r\n\t\tif (listener.syntaxError) {\r\n\t\t\tlistener.syntaxError(this, undefined, this._tokenStartLine, this._tokenStartCharPositionInLine, msg, e);\r\n\t\t}\r\n\t}\r\n\r\n\tpublic getErrorDisplay(s: string | number): string {\r\n\t\tif (typeof s === \"number\") {\r\n\t\t\tswitch (s) {\r\n\t\t\tcase Token.EOF:\r\n\t\t\t\treturn \"\";\r\n\t\t\tcase 0x0a:\r\n\t\t\t\treturn \"\\\\n\";\r\n\t\t\tcase 0x09:\r\n\t\t\t\treturn \"\\\\t\";\r\n\t\t\tcase 0x0d:\r\n\t\t\t\treturn \"\\\\r\";\r\n\t\t\t}\r\n\t\t\treturn String.fromCharCode(s);\r\n\t\t}\r\n\t\treturn s.replace(/\\n/g, \"\\\\n\")\r\n\t\t\t.replace(/\\t/g, \"\\\\t\")\r\n\t\t\t.replace(/\\r/g, \"\\\\r\");\r\n\t}\r\n\r\n\tpublic getCharErrorDisplay(c: number): string {\r\n\t\tlet s: string = this.getErrorDisplay(c);\r\n\t\treturn \"'\" + s + \"'\";\r\n\t}\r\n\r\n\t/** Lexers can normally match any char in it's vocabulary after matching\r\n\t * a token, so do the easy thing and just kill a character and hope\r\n\t * it all works out. You can instead use the rule invocation stack\r\n\t * to do sophisticated error recovery if you are in a fragment rule.\r\n\t */\r\n\tpublic recover(re: RecognitionException): void;\r\n\tpublic recover(re: LexerNoViableAltException): void;\r\n\tpublic recover(re: RecognitionException): void {\r\n\t\tif (re instanceof LexerNoViableAltException) {\r\n\t\t\tif (this._input.LA(1) !== IntStream.EOF) {\r\n\t\t\t\t// skip a char and try again\r\n\t\t\t\tthis.interpreter.consume(this._input);\r\n\t\t\t}\r\n\t\t} else {\r\n\t\t\t//System.out.println(\"consuming char \"+(char)input.LA(1)+\" during recovery\");\r\n\t\t\t//re.printStackTrace();\r\n\t\t\t// TODO: Do we lose character or line position information?\r\n\t\t\tthis._input.consume();\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:40.8683480-07:00\r\n\r\nimport { ArrayEqualityComparator } from \"./ArrayEqualityComparator\";\r\nimport { IntegerList } from \"./IntegerList\";\r\nimport { Interval } from \"./Interval\";\r\nimport { IntSet } from \"./IntSet\";\r\nimport { Lexer } from \"../Lexer\";\r\nimport { MurmurHash } from \"./MurmurHash\";\r\nimport { Override, NotNull } from \"../Decorators\";\r\nimport { Token } from \"../Token\";\r\nimport { Vocabulary } from \"../Vocabulary\";\r\n\r\n/**\r\n * This class implements the {@link IntSet} backed by a sorted array of\r\n * non-overlapping intervals. It is particularly efficient for representing\r\n * large collections of numbers, where the majority of elements appear as part\r\n * of a sequential range of numbers that are all part of the set. For example,\r\n * the set { 1, 2, 3, 4, 7, 8 } may be represented as { [1, 4], [7, 8] }.\r\n *\r\n * This class is able to represent sets containing any combination of values in\r\n * the range {@link Integer#MIN_VALUE} to {@link Integer#MAX_VALUE}\r\n * (inclusive).\r\n */\r\nexport class IntervalSet implements IntSet {\r\n\tprivate static _COMPLETE_CHAR_SET: IntervalSet;\r\n\tstatic get COMPLETE_CHAR_SET(): IntervalSet {\r\n\t\tif (IntervalSet._COMPLETE_CHAR_SET === undefined) {\r\n\t\t\tIntervalSet._COMPLETE_CHAR_SET = IntervalSet.of(Lexer.MIN_CHAR_VALUE, Lexer.MAX_CHAR_VALUE);\r\n\t\t\tIntervalSet._COMPLETE_CHAR_SET.setReadonly(true);\r\n\t\t}\r\n\r\n\t\treturn IntervalSet._COMPLETE_CHAR_SET;\r\n\t}\r\n\r\n\tprivate static _EMPTY_SET: IntervalSet;\r\n\tstatic get EMPTY_SET(): IntervalSet {\r\n\t\tif (IntervalSet._EMPTY_SET == null) {\r\n\t\t\tIntervalSet._EMPTY_SET = new IntervalSet();\r\n\t\t\tIntervalSet._EMPTY_SET.setReadonly(true);\r\n\t\t}\r\n\r\n\t\treturn IntervalSet._EMPTY_SET;\r\n\t}\r\n\r\n\t/** The list of sorted, disjoint intervals. */\r\n\tprivate _intervals: Interval[];\r\n\r\n\tprivate readonly: boolean = false;\r\n\r\n\tconstructor(intervals?: Interval[]) {\r\n\t\tif (intervals != null) {\r\n\t\t\tthis._intervals = intervals.slice(0);\r\n\t\t} else {\r\n\t\t\tthis._intervals = [];\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Create a set with all ints within range [a..b] (inclusive). If b is omitted, the set contains the single element\r\n\t * a.\r\n\t */\r\n\t@NotNull\r\n\tpublic static of(a: number, b: number = a): IntervalSet {\r\n\t\tlet s: IntervalSet = new IntervalSet();\r\n\t\ts.add(a, b);\r\n\t\treturn s;\r\n\t}\r\n\r\n\tpublic clear(): void {\r\n\t\tif (this.readonly) {\r\n\t\t\tthrow new Error(\"can't alter readonly IntervalSet\");\r\n\t\t}\r\n\r\n\t\tthis._intervals.length = 0;\r\n\t}\r\n\r\n\t/** Add interval; i.e., add all integers from a to b to set.\r\n\t * If b<a, do nothing.\r\n\t * Keep list in sorted order (by left range value).\r\n\t * If overlap, combine ranges. For example,\r\n\t * If this is {1..5, 10..20}, adding 6..7 yields\r\n\t * {1..5, 6..7, 10..20}. Adding 4..8 yields {1..8, 10..20}.\r\n\t */\r\n\tpublic add(a: number, b: number = a): void {\r\n\t\tthis.addRange(Interval.of(a, b));\r\n\t}\r\n\r\n\t// copy on write so we can cache a..a intervals and sets of that\r\n\tprotected addRange(addition: Interval): void {\r\n\t\tif (this.readonly) {\r\n\t\t\tthrow new Error(\"can't alter readonly IntervalSet\");\r\n\t\t}\r\n\r\n\t\t//System.out.println(\"add \"+addition+\" to \"+intervals.toString());\r\n\t\tif (addition.b < addition.a) {\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\t// find position in list\r\n\t\t// Use iterators as we modify list in place\r\n\t\tfor (let i: number = 0; i < this._intervals.length; i++) {\r\n\t\t\tlet r: Interval = this._intervals[i];\r\n\t\t\tif (addition.equals(r)) {\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\r\n\t\t\tif (addition.adjacent(r) || !addition.disjoint(r)) {\r\n\t\t\t\t// next to each other, make a single larger interval\r\n\t\t\t\tlet bigger: Interval = addition.union(r);\r\n\t\t\t\tthis._intervals[i] = bigger;\r\n\t\t\t\t// make sure we didn't just create an interval that\r\n\t\t\t\t// should be merged with next interval in list\r\n\t\t\t\twhile (i < this._intervals.length - 1) {\r\n\t\t\t\t\ti++;\r\n\t\t\t\t\tlet next: Interval = this._intervals[i];\r\n\t\t\t\t\tif (!bigger.adjacent(next) && bigger.disjoint(next)) {\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\t// if we bump up against or overlap next, merge\r\n\t\t\t\t\t// remove this one\r\n\t\t\t\t\tthis._intervals.splice(i, 1);\r\n\t\t\t\t\ti--;\r\n\t\t\t\t\t// move backwards to what we just set\r\n\t\t\t\t\tthis._intervals[i] = bigger.union(next);\r\n\t\t\t\t\t// set to 3 merged ones\r\n\t\t\t\t}\r\n\r\n\t\t\t\t// first call to next after previous duplicates the result\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\r\n\t\t\tif (addition.startsBeforeDisjoint(r)) {\r\n\t\t\t\t// insert before r\r\n\t\t\t\tthis._intervals.splice(i, 0, addition);\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\r\n\t\t\t// if disjoint and after r, a future iteration will handle it\r\n\t\t}\r\n\r\n\t\t// ok, must be after last interval (and disjoint from last interval)\r\n\t\t// just add it\r\n\t\tthis._intervals.push(addition);\r\n\t}\r\n\r\n\t/** combine all sets in the array returned the or'd value */\r\n\tpublic static or(sets: IntervalSet[]): IntervalSet {\r\n\t\tlet r: IntervalSet = new IntervalSet();\r\n\t\tfor (let s of sets) {\r\n\t\t\tr.addAll(s);\r\n\t\t}\r\n\r\n\t\treturn r;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic addAll(set: IntSet): IntervalSet {\r\n\t\tif (set == null) {\r\n\t\t\treturn this;\r\n\t\t}\r\n\r\n\t\tif (set instanceof IntervalSet) {\r\n\t\t\tlet other: IntervalSet = set;\r\n\t\t\t// walk set and add each interval\r\n\t\t\tlet n: number = other._intervals.length;\r\n\t\t\tfor (let i = 0; i < n; i++) {\r\n\t\t\t\tlet I: Interval = other._intervals[i];\r\n\t\t\t\tthis.add(I.a, I.b);\r\n\t\t\t}\r\n\t\t}\r\n\t\telse {\r\n\t\t\tfor (let value of set.toArray()) {\r\n\t\t\t\tthis.add(value);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn this;\r\n\t}\r\n\r\n\tpublic complementRange(minElement: number, maxElement: number): IntervalSet {\r\n\t\treturn this.complement(IntervalSet.of(minElement, maxElement));\r\n\t}\r\n\r\n\t/** {@inheritDoc} */\r\n\t@Override\r\n\tpublic complement(vocabulary: IntSet): IntervalSet {\r\n\t\tif (vocabulary.isNil) {\r\n\t\t\t// nothing in common with null set\r\n\t\t\treturn IntervalSet.EMPTY_SET;\r\n\t\t}\r\n\r\n\t\tlet vocabularyIS: IntervalSet;\r\n\t\tif (vocabulary instanceof IntervalSet) {\r\n\t\t\tvocabularyIS = vocabulary;\r\n\t\t} else {\r\n\t\t\tvocabularyIS = new IntervalSet();\r\n\t\t\tvocabularyIS.addAll(vocabulary);\r\n\t\t}\r\n\r\n\t\treturn vocabularyIS.subtract(this);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic subtract(a: IntSet): IntervalSet {\r\n\t\tif (a == null || a.isNil) {\r\n\t\t\treturn new IntervalSet(this._intervals);\r\n\t\t}\r\n\r\n\t\tif (a instanceof IntervalSet) {\r\n\t\t\treturn IntervalSet.subtract(this, a);\r\n\t\t}\r\n\r\n\t\tlet other: IntervalSet = new IntervalSet();\r\n\t\tother.addAll(a);\r\n\t\treturn IntervalSet.subtract(this, other);\r\n\t}\r\n\r\n\t/**\r\n\t * Compute the set difference between two interval sets. The specific\r\n\t * operation is `left - right`.\r\n\t */\r\n\t@NotNull\r\n\tpublic static subtract(left: IntervalSet, right: IntervalSet): IntervalSet {\r\n\t\tif (left.isNil) {\r\n\t\t\treturn new IntervalSet();\r\n\t\t}\r\n\r\n\t\tlet result: IntervalSet = new IntervalSet(left._intervals);\r\n\t\tif (right.isNil) {\r\n\t\t\t// right set has no elements; just return the copy of the current set\r\n\t\t\treturn result;\r\n\t\t}\r\n\r\n\t\tlet resultI: number = 0;\r\n\t\tlet rightI: number = 0;\r\n\t\twhile (resultI < result._intervals.length && rightI < right._intervals.length) {\r\n\t\t\tlet resultInterval: Interval = result._intervals[resultI];\r\n\t\t\tlet rightInterval: Interval = right._intervals[rightI];\r\n\r\n\t\t\t// operation: (resultInterval - rightInterval) and update indexes\r\n\r\n\t\t\tif (rightInterval.b < resultInterval.a) {\r\n\t\t\t\trightI++;\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tif (rightInterval.a > resultInterval.b) {\r\n\t\t\t\tresultI++;\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tlet beforeCurrent: Interval | undefined;\r\n\t\t\tlet afterCurrent: Interval | undefined;\r\n\t\t\tif (rightInterval.a > resultInterval.a) {\r\n\t\t\t\tbeforeCurrent = new Interval(resultInterval.a, rightInterval.a - 1);\r\n\t\t\t}\r\n\r\n\t\t\tif (rightInterval.b < resultInterval.b) {\r\n\t\t\t\tafterCurrent = new Interval(rightInterval.b + 1, resultInterval.b);\r\n\t\t\t}\r\n\r\n\t\t\tif (beforeCurrent) {\r\n\t\t\t\tif (afterCurrent) {\r\n\t\t\t\t\t// split the current interval into two\r\n\t\t\t\t\tresult._intervals[resultI] = beforeCurrent;\r\n\t\t\t\t\tresult._intervals.splice(resultI + 1, 0, afterCurrent);\r\n\t\t\t\t\tresultI++;\r\n\t\t\t\t\trightI++;\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\t// replace the current interval\r\n\t\t\t\t\tresult._intervals[resultI] = beforeCurrent;\r\n\t\t\t\t\tresultI++;\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tif (afterCurrent) {\r\n\t\t\t\t\t// replace the current interval\r\n\t\t\t\t\tresult._intervals[resultI] = afterCurrent;\r\n\t\t\t\t\trightI++;\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\t// remove the current interval (thus no need to increment resultI)\r\n\t\t\t\t\tresult._intervals.splice(resultI, 1);\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// If rightI reached right.intervals.size, no more intervals to subtract from result.\r\n\t\t// If resultI reached result.intervals.size, we would be subtracting from an empty set.\r\n\t\t// Either way, we are done.\r\n\t\treturn result;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic or(a: IntSet): IntervalSet {\r\n\t\tlet o: IntervalSet = new IntervalSet();\r\n\t\to.addAll(this);\r\n\t\to.addAll(a);\r\n\t\treturn o;\r\n\t}\r\n\r\n\t/** {@inheritDoc} */\r\n\t@Override\r\n\tpublic and(other: IntSet): IntervalSet {\r\n\t\tif (other.isNil) { //|| !(other instanceof IntervalSet) ) {\r\n\t\t\t// nothing in common with null set\r\n\t\t\treturn new IntervalSet();\r\n\t\t}\r\n\r\n\t\tlet myIntervals: Interval[] = this._intervals;\r\n\t\tlet theirIntervals: Interval[] = (other as IntervalSet)._intervals;\r\n\t\tlet intersection: IntervalSet | undefined;\r\n\t\tlet mySize: number = myIntervals.length;\r\n\t\tlet theirSize: number = theirIntervals.length;\r\n\t\tlet i: number = 0;\r\n\t\tlet j: number = 0;\r\n\t\t// iterate down both interval lists looking for nondisjoint intervals\r\n\t\twhile (i < mySize && j < theirSize) {\r\n\t\t\tlet mine: Interval = myIntervals[i];\r\n\t\t\tlet theirs: Interval = theirIntervals[j];\r\n\t\t\t//System.out.println(\"mine=\"+mine+\" and theirs=\"+theirs);\r\n\t\t\tif (mine.startsBeforeDisjoint(theirs)) {\r\n\t\t\t\t// move this iterator looking for interval that might overlap\r\n\t\t\t\ti++;\r\n\t\t\t}\r\n\t\t\telse if (theirs.startsBeforeDisjoint(mine)) {\r\n\t\t\t\t// move other iterator looking for interval that might overlap\r\n\t\t\t\tj++;\r\n\t\t\t}\r\n\t\t\telse if (mine.properlyContains(theirs)) {\r\n\t\t\t\t// overlap, add intersection, get next theirs\r\n\t\t\t\tif (!intersection) {\r\n\t\t\t\t\tintersection = new IntervalSet();\r\n\t\t\t\t}\r\n\r\n\t\t\t\tintersection.addRange(mine.intersection(theirs));\r\n\t\t\t\tj++;\r\n\t\t\t}\r\n\t\t\telse if (theirs.properlyContains(mine)) {\r\n\t\t\t\t// overlap, add intersection, get next mine\r\n\t\t\t\tif (!intersection) {\r\n\t\t\t\t\tintersection = new IntervalSet();\r\n\t\t\t\t}\r\n\r\n\t\t\t\tintersection.addRange(mine.intersection(theirs));\r\n\t\t\t\ti++;\r\n\t\t\t}\r\n\t\t\telse if (!mine.disjoint(theirs)) {\r\n\t\t\t\t// overlap, add intersection\r\n\t\t\t\tif (!intersection) {\r\n\t\t\t\t\tintersection = new IntervalSet();\r\n\t\t\t\t}\r\n\r\n\t\t\t\tintersection.addRange(mine.intersection(theirs));\r\n\t\t\t\t// Move the iterator of lower range [a..b], but not\r\n\t\t\t\t// the upper range as it may contain elements that will collide\r\n\t\t\t\t// with the next iterator. So, if mine=[0..115] and\r\n\t\t\t\t// theirs=[115..200], then intersection is 115 and move mine\r\n\t\t\t\t// but not theirs as theirs may collide with the next range\r\n\t\t\t\t// in thisIter.\r\n\t\t\t\t// move both iterators to next ranges\r\n\t\t\t\tif (mine.startsAfterNonDisjoint(theirs)) {\r\n\t\t\t\t\tj++;\r\n\t\t\t\t}\r\n\t\t\t\telse if (theirs.startsAfterNonDisjoint(mine)) {\r\n\t\t\t\t\ti++;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (!intersection) {\r\n\t\t\treturn new IntervalSet();\r\n\t\t}\r\n\r\n\t\treturn intersection;\r\n\t}\r\n\r\n\t/** {@inheritDoc} */\r\n\t@Override\r\n\tpublic contains(el: number): boolean {\r\n\t\tlet n: number = this._intervals.length;\r\n\t\tlet l: number = 0;\r\n\t\tlet r: number = n - 1;\r\n\t\t// Binary search for the element in the (sorted, disjoint) array of intervals.\r\n\t\twhile (l <= r) {\r\n\t\t\tlet m: number = (l + r) >> 1;\r\n\t\t\tlet I: Interval = this._intervals[m];\r\n\t\t\tlet a: number = I.a;\r\n\t\t\tlet b: number = I.b;\r\n\t\t\tif (b < el) {\r\n\t\t\t\tl = m + 1;\r\n\t\t\t} else if (a > el) {\r\n\t\t\t\tr = m - 1;\r\n\t\t\t} else {\r\n\t\t\t\t// el >= a && el <= b\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/** {@inheritDoc} */\r\n\t@Override\r\n\tget isNil(): boolean {\r\n\t\treturn this._intervals == null || this._intervals.length === 0;\r\n\t}\r\n\r\n\t/**\r\n\t * Returns the maximum value contained in the set if not isNil.\r\n\t *\r\n\t * @return the maximum value contained in the set.\r\n\t * @throws RangeError if set is empty\r\n\t */\r\n\tget maxElement(): number {\r\n\t\tif (this.isNil) {\r\n\t\t\tthrow new RangeError(\"set is empty\");\r\n\t\t}\r\n\r\n\t\tlet last: Interval = this._intervals[this._intervals.length - 1];\r\n\t\treturn last.b;\r\n\t}\r\n\r\n\t/**\r\n\t * Returns the minimum value contained in the set if not isNil.\r\n\t *\r\n\t * @return the minimum value contained in the set.\r\n\t * @throws RangeError if set is empty\r\n\t */\r\n\tget minElement(): number {\r\n\t\tif (this.isNil) {\r\n\t\t\tthrow new RangeError(\"set is empty\");\r\n\t\t}\r\n\r\n\t\treturn this._intervals[0].a;\r\n\t}\r\n\r\n\t/** Return a list of Interval objects. */\r\n\tget intervals(): Interval[] {\r\n\t\treturn this._intervals;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\tfor (let I of this._intervals) {\r\n\t\t\thash = MurmurHash.update(hash, I.a);\r\n\t\t\thash = MurmurHash.update(hash, I.b);\r\n\t\t}\r\n\r\n\t\thash = MurmurHash.finish(hash, this._intervals.length * 2);\r\n\t\treturn hash;\r\n\t}\r\n\r\n\t/** Are two IntervalSets equal? Because all intervals are sorted\r\n\t * and disjoint, equals is a simple linear walk over both lists\r\n\t * to make sure they are the same. Interval.equals() is used\r\n\t * by the List.equals() method to check the ranges.\r\n\t */\r\n\t@Override\r\n\tpublic equals(o: any): boolean {\r\n\t\tif (o == null || !(o instanceof IntervalSet)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn ArrayEqualityComparator.INSTANCE.equals(this._intervals, o._intervals);\r\n\t}\r\n\r\n\tpublic toString(elemAreChar: boolean = false): string {\r\n\t\tlet buf: string = \"\";\r\n\t\tif (this._intervals == null || this._intervals.length === 0) {\r\n\t\t\treturn \"{}\";\r\n\t\t}\r\n\r\n\t\tif (this.size > 1) {\r\n\t\t\tbuf += \"{\";\r\n\t\t}\r\n\r\n\t\tlet first: boolean = true;\r\n\t\tfor (let I of this._intervals) {\r\n\t\t\tif (first) {\r\n\t\t\t\tfirst = false;\r\n\t\t\t} else {\r\n\t\t\t\tbuf += \", \";\r\n\t\t\t}\r\n\r\n\t\t\tlet a: number = I.a;\r\n\t\t\tlet b: number = I.b;\r\n\t\t\tif (a === b) {\r\n\t\t\t\tif (a === Token.EOF) {\r\n\t\t\t\t\tbuf += \"\";\r\n\t\t\t\t} else if (elemAreChar) {\r\n\t\t\t\t\tbuf += \"'\" + String.fromCodePoint(a) + \"'\";\r\n\t\t\t\t} else {\r\n\t\t\t\t\tbuf += a;\r\n\t\t\t\t}\r\n\t\t\t} else {\r\n\t\t\t\tif (elemAreChar) {\r\n\t\t\t\t\tbuf += \"'\" + String.fromCodePoint(a) + \"'..'\" + String.fromCodePoint(b) + \"'\";\r\n\t\t\t\t} else {\r\n\t\t\t\t\tbuf += a + \"..\" + b;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (this.size > 1) {\r\n\t\t\tbuf += \"}\";\r\n\t\t}\r\n\r\n\t\treturn buf;\r\n\t}\r\n\r\n\tpublic toStringVocabulary( @NotNull vocabulary: Vocabulary): string {\r\n\t\tif (this._intervals == null || this._intervals.length === 0) {\r\n\t\t\treturn \"{}\";\r\n\t\t}\r\n\r\n\t\tlet buf: string = \"\";\r\n\t\tif (this.size > 1) {\r\n\t\t\tbuf += \"{\";\r\n\t\t}\r\n\r\n\t\tlet first: boolean = true;\r\n\t\tfor (let I of this._intervals) {\r\n\t\t\tif (first) {\r\n\t\t\t\tfirst = false;\r\n\t\t\t} else {\r\n\t\t\t\tbuf += \", \";\r\n\t\t\t}\r\n\r\n\t\t\tlet a: number = I.a;\r\n\t\t\tlet b: number = I.b;\r\n\t\t\tif (a === b) {\r\n\t\t\t\tbuf += this.elementName(vocabulary, a);\r\n\t\t\t} else {\r\n\t\t\t\tfor (let i = a; i <= b; i++) {\r\n\t\t\t\t\tif (i > a) {\r\n\t\t\t\t\t\tbuf += \", \";\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tbuf += this.elementName(vocabulary, i);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (this.size > 1) {\r\n\t\t\tbuf += \"}\";\r\n\t\t}\r\n\r\n\t\treturn buf;\r\n\t}\r\n\r\n\t@NotNull\r\n\tprotected elementName( @NotNull vocabulary: Vocabulary, a: number): string {\r\n\t\tif (a === Token.EOF) {\r\n\t\t\treturn \"\";\r\n\t\t} else if (a === Token.EPSILON) {\r\n\t\t\treturn \"\";\r\n\t\t} else {\r\n\t\t\treturn vocabulary.getDisplayName(a);\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\tget size(): number {\r\n\t\tlet n: number = 0;\r\n\t\tlet numIntervals: number = this._intervals.length;\r\n\t\tif (numIntervals === 1) {\r\n\t\t\tlet firstInterval: Interval = this._intervals[0];\r\n\t\t\treturn firstInterval.b - firstInterval.a + 1;\r\n\t\t}\r\n\r\n\t\tfor (let i = 0; i < numIntervals; i++) {\r\n\t\t\tlet I: Interval = this._intervals[i];\r\n\t\t\tn += (I.b - I.a + 1);\r\n\t\t}\r\n\r\n\t\treturn n;\r\n\t}\r\n\r\n\tpublic toIntegerList(): IntegerList {\r\n\t\tlet values: IntegerList = new IntegerList(this.size);\r\n\t\tlet n: number = this._intervals.length;\r\n\t\tfor (let i = 0; i < n; i++) {\r\n\t\t\tlet I: Interval = this._intervals[i];\r\n\t\t\tlet a: number = I.a;\r\n\t\t\tlet b: number = I.b;\r\n\t\t\tfor (let v = a; v <= b; v++) {\r\n\t\t\t\tvalues.add(v);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn values;\r\n\t}\r\n\r\n\tpublic toSet(): Set {\r\n\t\tlet s: Set = new Set();\r\n\t\tfor (let I of this._intervals) {\r\n\t\t\tlet a: number = I.a;\r\n\t\t\tlet b: number = I.b;\r\n\t\t\tfor (let v = a; v <= b; v++) {\r\n\t\t\t\ts.add(v);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn s;\r\n\t}\r\n\r\n\tpublic toArray(): number[] {\r\n\t\tlet values: number[] = new Array();\r\n\t\tlet n: number = this._intervals.length;\r\n\t\tfor (let i = 0; i < n; i++) {\r\n\t\t\tlet I: Interval = this._intervals[i];\r\n\t\t\tlet a: number = I.a;\r\n\t\t\tlet b: number = I.b;\r\n\t\t\tfor (let v = a; v <= b; v++) {\r\n\t\t\t\tvalues.push(v);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn values;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic remove(el: number): void {\r\n\t\tif (this.readonly) {\r\n\t\t\tthrow new Error(\"can't alter readonly IntervalSet\");\r\n\t\t}\r\n\r\n\t\tlet n: number = this._intervals.length;\r\n\t\tfor (let i = 0; i < n; i++) {\r\n\t\t\tlet I: Interval = this._intervals[i];\r\n\t\t\tlet a: number = I.a;\r\n\t\t\tlet b: number = I.b;\r\n\t\t\tif (el < a) {\r\n\t\t\t\tbreak; // list is sorted and el is before this interval; not here\r\n\t\t\t}\r\n\t\t\t// if whole interval x..x, rm\r\n\t\t\tif (el === a && el === b) {\r\n\t\t\t\tthis._intervals.splice(i, 1);\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t\t// if on left edge x..b, adjust left\r\n\t\t\tif (el === a) {\r\n\t\t\t\tthis._intervals[i] = Interval.of(I.a + 1, I.b);\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t\t// if on right edge a..x, adjust right\r\n\t\t\tif (el === b) {\r\n\t\t\t\tthis._intervals[i] = Interval.of(I.a, I.b - 1);\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t\t// if in middle a..x..b, split interval\r\n\t\t\tif (el > a && el < b) { // found in this interval\r\n\t\t\t\tlet oldb: number = I.b;\r\n\t\t\t\tthis._intervals[i] = Interval.of(I.a, el - 1); // [a..x-1]\r\n\t\t\t\tthis.add(el + 1, oldb); // add [x+1..b]\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\tget isReadonly(): boolean {\r\n\t\treturn this.readonly;\r\n\t}\r\n\r\n\tpublic setReadonly(readonly: boolean): void {\r\n\t\tif (this.readonly && !readonly) {\r\n\t\t\tthrow new Error(\"can't alter readonly IntervalSet\");\r\n\t\t}\r\n\r\n\t\tthis.readonly = readonly;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:25.8187912-07:00\r\n\r\nimport { NotNull } from \"../Decorators\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class ATNDeserializationOptions {\r\n\tprivate static _defaultOptions?: ATNDeserializationOptions;\r\n\r\n\tprivate readOnly: boolean = false;\r\n\tprivate verifyATN: boolean;\r\n\tprivate generateRuleBypassTransitions: boolean;\r\n\tprivate optimize: boolean;\r\n\r\n\tconstructor(options?: ATNDeserializationOptions) {\r\n\t\tif (options) {\r\n\t\t\tthis.verifyATN = options.verifyATN;\r\n\t\t\tthis.generateRuleBypassTransitions = options.generateRuleBypassTransitions;\r\n\t\t\tthis.optimize = options.optimize;\r\n\t\t} else {\r\n\t\t\tthis.verifyATN = true;\r\n\t\t\tthis.generateRuleBypassTransitions = false;\r\n\t\t\tthis.optimize = true;\r\n\t\t}\r\n\t}\r\n\r\n\t@NotNull\r\n\tstatic get defaultOptions(): ATNDeserializationOptions {\r\n\t\tif (ATNDeserializationOptions._defaultOptions == null) {\r\n\t\t\tATNDeserializationOptions._defaultOptions = new ATNDeserializationOptions();\r\n\t\t\tATNDeserializationOptions._defaultOptions.makeReadOnly();\r\n\t\t}\r\n\r\n\t\treturn ATNDeserializationOptions._defaultOptions;\r\n\t}\r\n\r\n\tget isReadOnly(): boolean {\r\n\t\treturn this.readOnly;\r\n\t}\r\n\r\n\tpublic makeReadOnly(): void {\r\n\t\tthis.readOnly = true;\r\n\t}\r\n\r\n\tget isVerifyATN(): boolean {\r\n\t\treturn this.verifyATN;\r\n\t}\r\n\r\n\tset isVerifyATN(verifyATN: boolean) {\r\n\t\tthis.throwIfReadOnly();\r\n\t\tthis.verifyATN = verifyATN;\r\n\t}\r\n\r\n\tget isGenerateRuleBypassTransitions(): boolean {\r\n\t\treturn this.generateRuleBypassTransitions;\r\n\t}\r\n\r\n\tset isGenerateRuleBypassTransitions(generateRuleBypassTransitions: boolean) {\r\n\t\tthis.throwIfReadOnly();\r\n\t\tthis.generateRuleBypassTransitions = generateRuleBypassTransitions;\r\n\t}\r\n\r\n\tget isOptimize(): boolean {\r\n\t\treturn this.optimize;\r\n\t}\r\n\r\n\tset isOptimize(optimize: boolean) {\r\n\t\tthis.throwIfReadOnly();\r\n\t\tthis.optimize = optimize;\r\n\t}\r\n\r\n\tprotected throwIfReadOnly(): void {\r\n\t\tif (this.isReadOnly) {\r\n\t\t\tthrow new Error(\"The object is read only.\");\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:24.7363448-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { Override, NotNull } from \"../Decorators\";\r\nimport { Transition } from \"./Transition\";\r\nimport { TransitionType } from \"./TransitionType\";\r\n\r\nexport class ActionTransition extends Transition {\r\n\tpublic ruleIndex: number;\r\n\tpublic actionIndex: number;\r\n\tpublic isCtxDependent: boolean; // e.g., $i ref in action\r\n\r\n\tconstructor(@NotNull target: ATNState, ruleIndex: number, actionIndex: number = -1, isCtxDependent: boolean = false) {\r\n\t\tsuper(target);\r\n\t\tthis.ruleIndex = ruleIndex;\r\n\t\tthis.actionIndex = actionIndex;\r\n\t\tthis.isCtxDependent = isCtxDependent;\r\n\t}\r\n\r\n\t@Override\r\n\tget serializationType(): TransitionType {\r\n\t\treturn TransitionType.ACTION;\r\n\t}\r\n\r\n\t@Override\r\n\tget isEpsilon(): boolean {\r\n\t\treturn true; // we are to be ignored by analysis 'cept for predicates\r\n\t}\r\n\r\n\t@Override\r\n\tpublic matches(symbol: number, minVocabSymbol: number, maxVocabSymbol: number): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn \"action_\" + this.ruleIndex + \":\" + this.actionIndex;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:27.6769122-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { IntervalSet } from \"../misc/IntervalSet\";\r\nimport { Override, NotNull } from \"../Decorators\";\r\nimport { Transition } from \"./Transition\";\r\nimport { TransitionType } from \"./TransitionType\";\r\n\r\n/** TODO: make all transitions sets? no, should remove set edges */\r\nexport class AtomTransition extends Transition {\r\n\t/** The token type or character value; or, signifies special label. */\r\n\tpublic _label: number;\r\n\r\n\tconstructor(@NotNull target: ATNState, label: number) {\r\n\t\tsuper(target);\r\n\t\tthis._label = label;\r\n\t}\r\n\r\n\t@Override\r\n\tget serializationType(): TransitionType {\r\n\t\treturn TransitionType.ATOM;\r\n\t}\r\n\r\n\t@Override\r\n\t@NotNull\r\n\tget label(): IntervalSet {\r\n\t\treturn IntervalSet.of(this._label);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic matches(symbol: number, minVocabSymbol: number, maxVocabSymbol: number): boolean {\r\n\t\treturn this._label === symbol;\r\n\t}\r\n\r\n\t@Override\r\n\t@NotNull\r\n\tpublic toString(): string {\r\n\t\treturn String(this.label);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:27.9930394-07:00\r\n\r\nimport { BlockEndState } from \"./BlockEndState\";\r\nimport { DecisionState } from \"./DecisionState\";\r\nimport { Override } from \"../Decorators\";\r\n\r\n/** The start of a regular `(...)` block. */\r\nexport abstract class BlockStartState extends DecisionState {\r\n\t// This is always set during ATN deserialization\r\n\tpublic endState!: BlockEndState;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:27.7669801-07:00\r\n\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { BlockStartState } from \"./BlockStartState\";\r\nimport { Override } from \"../Decorators\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class BasicBlockStartState extends BlockStartState {\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.BLOCK_START;\r\n\t}\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:27.9125304-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { BlockStartState } from \"./BlockStartState\";\r\nimport { Override } from \"../Decorators\";\r\n\r\n/** Terminal node of a simple `(a|b|c)` block. */\r\nexport class BlockEndState extends ATNState {\r\n\t// This is always set during ATN deserialization\r\n\tpublic startState!: BlockStartState;\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.BLOCK_END;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:28.6283213-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { Override, NotNull } from \"../Decorators\";\r\nimport { Transition } from \"./Transition\";\r\nimport { TransitionType } from \"./TransitionType\";\r\n\r\nexport class EpsilonTransition extends Transition {\r\n\r\n\tprivate _outermostPrecedenceReturn: number;\r\n\r\n\tconstructor(@NotNull target: ATNState, outermostPrecedenceReturn: number = -1) {\r\n\t\tsuper(target);\r\n\t\tthis._outermostPrecedenceReturn = outermostPrecedenceReturn;\r\n\t}\r\n\r\n\t/**\r\n\t * @returns the rule index of a precedence rule for which this transition is\r\n\t * returning from, where the precedence value is 0; otherwise, -1.\r\n\t *\r\n\t * @see ATNConfig.isPrecedenceFilterSuppressed\r\n\t * @see ParserATNSimulator#applyPrecedenceFilter(ATNConfigSet, ParserRuleContext, PredictionContextCache)\r\n\t * @since 4.4.1\r\n\t */\r\n\tget outermostPrecedenceReturn(): number {\r\n\t\treturn this._outermostPrecedenceReturn;\r\n\t}\r\n\r\n\t@Override\r\n\tget serializationType(): TransitionType {\r\n\t\treturn TransitionType.EPSILON;\r\n\t}\r\n\r\n\t@Override\r\n\tget isEpsilon(): boolean {\r\n\t\treturn true;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic matches(symbol: number, minVocabSymbol: number, maxVocabSymbol: number): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t@Override\r\n\t@NotNull\r\n\tpublic toString(): string {\r\n\t\treturn \"epsilon\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:29.5634388-07:00\r\n\r\nimport { Lexer } from \"../Lexer\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LexerActionType } from \"./LexerActionType\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\n\r\n/**\r\n * Implements the `channel` lexer action by calling\r\n * {@link Lexer#setChannel} with the assigned channel.\r\n *\r\n * @author Sam Harwell\r\n * @since 4.2\r\n */\r\nexport class LexerChannelAction implements LexerAction {\r\n\tprivate readonly _channel: number;\r\n\r\n\t/**\r\n\t * Constructs a new `channel` action with the specified channel value.\r\n\t * @param channel The channel value to pass to {@link Lexer#setChannel}.\r\n\t */\r\n\tconstructor(channel: number) {\r\n\t\tthis._channel = channel;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the channel to use for the {@link Token} created by the lexer.\r\n\t *\r\n\t * @returns The channel to use for the {@link Token} created by the lexer.\r\n\t */\r\n\tget channel(): number {\r\n\t\treturn this._channel;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns {@link LexerActionType#CHANNEL}.\r\n\t */\r\n\t@Override\r\n\tget actionType(): LexerActionType {\r\n\t\treturn LexerActionType.CHANNEL;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns `false`.\r\n\t */\r\n\t@Override\r\n\tget isPositionDependent(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This action is implemented by calling {@link Lexer#setChannel} with the\r\n\t * value provided by {@link #getChannel}.\r\n\t */\r\n\t@Override\r\n\tpublic execute(@NotNull lexer: Lexer): void {\r\n\t\tlexer.channel = this._channel;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\thash = MurmurHash.update(hash, this.actionType);\r\n\t\thash = MurmurHash.update(hash, this._channel);\r\n\t\treturn MurmurHash.finish(hash, 2);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\tif (obj === this) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(obj instanceof LexerChannelAction)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this._channel === obj._channel;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn `channel(${this._channel})`;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:29.6567992-07:00\r\n\r\nimport { Lexer } from \"../Lexer\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LexerActionType } from \"./LexerActionType\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\n\r\n/**\r\n * Executes a custom lexer action by calling {@link Recognizer#action} with the\r\n * rule and action indexes assigned to the custom action. The implementation of\r\n * a custom action is added to the generated code for the lexer in an override\r\n * of {@link Recognizer#action} when the grammar is compiled.\r\n *\r\n * This class may represent embedded actions created with the `{...}`\r\n * syntax in ANTLR 4, as well as actions created for lexer commands where the\r\n * command argument could not be evaluated when the grammar was compiled.\r\n *\r\n * @author Sam Harwell\r\n * @since 4.2\r\n */\r\nexport class LexerCustomAction implements LexerAction {\r\n\tprivate readonly _ruleIndex: number;\r\n\tprivate readonly _actionIndex: number;\r\n\r\n\t/**\r\n\t * Constructs a custom lexer action with the specified rule and action\r\n\t * indexes.\r\n\t *\r\n\t * @param ruleIndex The rule index to use for calls to\r\n\t * {@link Recognizer#action}.\r\n\t * @param actionIndex The action index to use for calls to\r\n\t * {@link Recognizer#action}.\r\n\t */\r\n\tconstructor(ruleIndex: number, actionIndex: number) {\r\n\t\tthis._ruleIndex = ruleIndex;\r\n\t\tthis._actionIndex = actionIndex;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the rule index to use for calls to {@link Recognizer#action}.\r\n\t *\r\n\t * @returns The rule index for the custom action.\r\n\t */\r\n\tget ruleIndex(): number {\r\n\t\treturn this._ruleIndex;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the action index to use for calls to {@link Recognizer#action}.\r\n\t *\r\n\t * @returns The action index for the custom action.\r\n\t */\r\n\tget actionIndex(): number {\r\n\t\treturn this._actionIndex;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * @returns This method returns {@link LexerActionType#CUSTOM}.\r\n\t */\r\n\t@Override\r\n\tget actionType(): LexerActionType {\r\n\t\treturn LexerActionType.CUSTOM;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets whether the lexer action is position-dependent. Position-dependent\r\n\t * actions may have different semantics depending on the {@link CharStream}\r\n\t * index at the time the action is executed.\r\n\t *\r\n\t * Custom actions are position-dependent since they may represent a\r\n\t * user-defined embedded action which makes calls to methods like\r\n\t * {@link Lexer#getText}.\r\n\t *\r\n\t * @returns This method returns `true`.\r\n\t */\r\n\t@Override\r\n\tget isPositionDependent(): boolean {\r\n\t\treturn true;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * Custom actions are implemented by calling {@link Lexer#action} with the\r\n\t * appropriate rule and action indexes.\r\n\t */\r\n\t@Override\r\n\tpublic execute(@NotNull lexer: Lexer): void {\r\n\t\tlexer.action(undefined, this._ruleIndex, this._actionIndex);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\thash = MurmurHash.update(hash, this.actionType);\r\n\t\thash = MurmurHash.update(hash, this._ruleIndex);\r\n\t\thash = MurmurHash.update(hash, this._actionIndex);\r\n\t\treturn MurmurHash.finish(hash, 3);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\tif (obj === this) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(obj instanceof LexerCustomAction)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this._ruleIndex === obj._ruleIndex\r\n\t\t\t&& this._actionIndex === obj._actionIndex;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:29.8653427-07:00\r\n\r\nimport { Lexer } from \"../Lexer\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LexerActionType } from \"./LexerActionType\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\n\r\n/**\r\n * Implements the `mode` lexer action by calling {@link Lexer#mode} with\r\n * the assigned mode.\r\n *\r\n * @author Sam Harwell\r\n * @since 4.2\r\n */\r\nexport class LexerModeAction implements LexerAction {\r\n\tprivate readonly _mode: number;\r\n\r\n\t/**\r\n\t * Constructs a new `mode` action with the specified mode value.\r\n\t * @param mode The mode value to pass to {@link Lexer#mode}.\r\n\t */\r\n\tconstructor(mode: number) {\r\n\t\tthis._mode = mode;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the lexer mode this action should transition the lexer to.\r\n\t *\r\n\t * @returns The lexer mode for this `mode` command.\r\n\t */\r\n\tget mode(): number {\r\n\t\treturn this._mode;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns {@link LexerActionType#MODE}.\r\n\t */\r\n\t@Override\r\n\tget actionType(): LexerActionType {\r\n\t\treturn LexerActionType.MODE;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns `false`.\r\n\t */\r\n\t@Override\r\n\tget isPositionDependent(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This action is implemented by calling {@link Lexer#mode} with the\r\n\t * value provided by {@link #getMode}.\r\n\t */\r\n\t@Override\r\n\tpublic execute(@NotNull lexer: Lexer): void {\r\n\t\tlexer.mode(this._mode);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\thash = MurmurHash.update(hash, this.actionType);\r\n\t\thash = MurmurHash.update(hash, this._mode);\r\n\t\treturn MurmurHash.finish(hash, 2);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\tif (obj === this) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(obj instanceof LexerModeAction)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this._mode === obj._mode;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn `mode(${this._mode})`;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:29.9613221-07:00\r\n\r\nimport { Lexer } from \"../Lexer\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LexerActionType } from \"./LexerActionType\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\n\r\n/**\r\n * Implements the `more` lexer action by calling {@link Lexer#more}.\r\n *\r\n * The `more` command does not have any parameters, so this action is\r\n * implemented as a singleton instance exposed by {@link #INSTANCE}.\r\n *\r\n * @author Sam Harwell\r\n * @since 4.2\r\n */\r\nexport class LexerMoreAction implements LexerAction {\r\n\t/**\r\n\t * Constructs the singleton instance of the lexer `more` command.\r\n\t */\r\n\tconstructor() {\r\n\t\t// intentionally empty\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns {@link LexerActionType#MORE}.\r\n\t */\r\n\t@Override\r\n\tget actionType(): LexerActionType {\r\n\t\treturn LexerActionType.MORE;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns `false`.\r\n\t */\r\n\t@Override\r\n\tget isPositionDependent(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This action is implemented by calling {@link Lexer#more}.\r\n\t */\r\n\t@Override\r\n\tpublic execute(@NotNull lexer: Lexer): void {\r\n\t\tlexer.more();\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\thash = MurmurHash.update(hash, this.actionType);\r\n\t\treturn MurmurHash.finish(hash, 1);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\treturn obj === this;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn \"more\";\r\n\t}\r\n}\r\n\r\nexport namespace LexerMoreAction {\r\n\t/**\r\n\t * Provides a singleton instance of this parameterless lexer action.\r\n\t */\r\n\texport const INSTANCE: LexerMoreAction = new LexerMoreAction();\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:30.0449220-07:00\r\n\r\nimport { Lexer } from \"../Lexer\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LexerActionType } from \"./LexerActionType\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\n\r\n/**\r\n * Implements the `popMode` lexer action by calling {@link Lexer#popMode}.\r\n *\r\n * The `popMode` command does not have any parameters, so this action is\r\n * implemented as a singleton instance exposed by {@link #INSTANCE}.\r\n *\r\n * @author Sam Harwell\r\n * @since 4.2\r\n */\r\nexport class LexerPopModeAction implements LexerAction {\r\n\t/**\r\n\t * Constructs the singleton instance of the lexer `popMode` command.\r\n\t */\r\n\tconstructor() {\r\n\t\t// intentionally empty\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns {@link LexerActionType#POP_MODE}.\r\n\t */\r\n\t@Override\r\n\tget actionType(): LexerActionType {\r\n\t\treturn LexerActionType.POP_MODE;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns `false`.\r\n\t */\r\n\t@Override\r\n\tget isPositionDependent(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This action is implemented by calling {@link Lexer#popMode}.\r\n\t */\r\n\t@Override\r\n\tpublic execute(@NotNull lexer: Lexer): void {\r\n\t\tlexer.popMode();\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\thash = MurmurHash.update(hash, this.actionType);\r\n\t\treturn MurmurHash.finish(hash, 1);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\treturn obj === this;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn \"popMode\";\r\n\t}\r\n}\r\n\r\nexport namespace LexerPopModeAction {\r\n\t/**\r\n\t * Provides a singleton instance of this parameterless lexer action.\r\n\t */\r\n\texport const INSTANCE: LexerPopModeAction = new LexerPopModeAction();\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:30.1378801-07:00\r\n\r\nimport { Lexer } from \"../Lexer\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LexerActionType } from \"./LexerActionType\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\n\r\n/**\r\n * Implements the `pushMode` lexer action by calling\r\n * {@link Lexer#pushMode} with the assigned mode.\r\n *\r\n * @author Sam Harwell\r\n * @since 4.2\r\n */\r\nexport class LexerPushModeAction implements LexerAction {\r\n\tprivate readonly _mode: number;\r\n\r\n\t/**\r\n\t * Constructs a new `pushMode` action with the specified mode value.\r\n\t * @param mode The mode value to pass to {@link Lexer#pushMode}.\r\n\t */\r\n\tconstructor(mode: number) {\r\n\t\tthis._mode = mode;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the lexer mode this action should transition the lexer to.\r\n\t *\r\n\t * @returns The lexer mode for this `pushMode` command.\r\n\t */\r\n\tget mode(): number {\r\n\t\treturn this._mode;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns {@link LexerActionType#PUSH_MODE}.\r\n\t */\r\n\t@Override\r\n\tget actionType(): LexerActionType {\r\n\t\treturn LexerActionType.PUSH_MODE;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns `false`.\r\n\t */\r\n\t@Override\r\n\tget isPositionDependent(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This action is implemented by calling {@link Lexer#pushMode} with the\r\n\t * value provided by {@link #getMode}.\r\n\t */\r\n\t@Override\r\n\tpublic execute(@NotNull lexer: Lexer): void {\r\n\t\tlexer.pushMode(this._mode);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\thash = MurmurHash.update(hash, this.actionType);\r\n\t\thash = MurmurHash.update(hash, this._mode);\r\n\t\treturn MurmurHash.finish(hash, 2);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\tif (obj === this) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(obj instanceof LexerPushModeAction)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this._mode === obj._mode;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn `pushMode(${this._mode})`;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:30.2324460-07:00\r\n\r\nimport { Lexer } from \"../Lexer\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LexerActionType } from \"./LexerActionType\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\n\r\n/**\r\n * Implements the `skip` lexer action by calling {@link Lexer#skip}.\r\n *\r\n * The `skip` command does not have any parameters, so this action is\r\n * implemented as a singleton instance exposed by {@link #INSTANCE}.\r\n *\r\n * @author Sam Harwell\r\n * @since 4.2\r\n */\r\nexport class LexerSkipAction implements LexerAction {\r\n\t/**\r\n\t * Constructs the singleton instance of the lexer `skip` command.\r\n\t */\r\n\tconstructor() {\r\n\t\t// intentionally empty\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns {@link LexerActionType#SKIP}.\r\n\t */\r\n\t@Override\r\n\tget actionType(): LexerActionType {\r\n\t\treturn LexerActionType.SKIP;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns `false`.\r\n\t */\r\n\t@Override\r\n\tget isPositionDependent(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This action is implemented by calling {@link Lexer#skip}.\r\n\t */\r\n\t@Override\r\n\tpublic execute(@NotNull lexer: Lexer): void {\r\n\t\tlexer.skip();\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\thash = MurmurHash.update(hash, this.actionType);\r\n\t\treturn MurmurHash.finish(hash, 1);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\treturn obj === this;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn \"skip\";\r\n\t}\r\n}\r\n\r\nexport namespace LexerSkipAction {\r\n\t/**\r\n\t * Provides a singleton instance of this parameterless lexer action.\r\n\t */\r\n\texport const INSTANCE: LexerSkipAction = new LexerSkipAction();\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:30.3204839-07:00\r\n\r\nimport { Lexer } from \"../Lexer\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LexerActionType } from \"./LexerActionType\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\n\r\n/**\r\n * Implements the `type` lexer action by setting `Lexer.type`\r\n * with the assigned type.\r\n *\r\n * @author Sam Harwell\r\n * @since 4.2\r\n */\r\nexport class LexerTypeAction implements LexerAction {\r\n\tprivate readonly _type: number;\r\n\r\n\t/**\r\n\t * Constructs a new `type` action with the specified token type value.\r\n\t * @param type The type to assign to the token using `Lexer.type`.\r\n\t */\r\n\tconstructor(type: number) {\r\n\t\tthis._type = type;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the type to assign to a token created by the lexer.\r\n\t * @returns The type to assign to a token created by the lexer.\r\n\t */\r\n\tget type(): number {\r\n\t\treturn this._type;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns {@link LexerActionType#TYPE}.\r\n\t */\r\n\t@Override\r\n\tget actionType(): LexerActionType {\r\n\t\treturn LexerActionType.TYPE;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t * @returns This method returns `false`.\r\n\t */\r\n\t@Override\r\n\tget isPositionDependent(): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This action is implemented by setting `Lexer.type` with the\r\n\t * value provided by `type`.\r\n\t */\r\n\t@Override\r\n\tpublic execute(@NotNull lexer: Lexer): void {\r\n\t\tlexer.type = this._type;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\tlet hash: number = MurmurHash.initialize();\r\n\t\thash = MurmurHash.update(hash, this.actionType);\r\n\t\thash = MurmurHash.update(hash, this._type);\r\n\t\treturn MurmurHash.finish(hash, 2);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\tif (obj === this) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(obj instanceof LexerTypeAction)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this._type === obj._type;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn `type(${this._type})`;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:30.7737978-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { Override } from \"../Decorators\";\r\n\r\n/** Mark the end of a * or + loop. */\r\nexport class LoopEndState extends ATNState {\r\n\t// This is always set during ATN deserialization\r\n\tpublic loopBackState!: ATNState;\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.LOOP_END;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:28.0710131-07:00\r\n\r\nimport { BitSet } from \"../misc/BitSet\";\r\nimport { Override } from \"../Decorators\";\r\nimport * as Utils from \"../misc/Utils\";\r\n\r\n/**\r\n * This class stores information about a configuration conflict.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class ConflictInfo {\r\n\tprivate _conflictedAlts: BitSet;\r\n\r\n\tprivate exact: boolean;\r\n\r\n\tconstructor(conflictedAlts: BitSet, exact: boolean) {\r\n\t\tthis._conflictedAlts = conflictedAlts;\r\n\t\tthis.exact = exact;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the set of conflicting alternatives for the configuration set.\r\n\t */\r\n\tget conflictedAlts(): BitSet {\r\n\t\treturn this._conflictedAlts;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets whether or not the configuration conflict is an exact conflict.\r\n\t * An exact conflict occurs when the prediction algorithm determines that\r\n\t * the represented alternatives for a particular configuration set cannot be\r\n\t * further reduced by consuming additional input. After reaching an exact\r\n\t * conflict during an SLL prediction, only switch to full-context prediction\r\n\t * could reduce the set of viable alternatives. In LL prediction, an exact\r\n\t * conflict indicates a true ambiguity in the input.\r\n\t *\r\n\t * For the {@link PredictionMode#LL_EXACT_AMBIG_DETECTION} prediction mode,\r\n\t * accept states are conflicting but not exact are treated as non-accept\r\n\t * states.\r\n\t */\r\n\tget isExact(): boolean {\r\n\t\treturn this.exact;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic equals(obj: any): boolean {\r\n\t\tif (obj === this) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(obj instanceof ConflictInfo)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this.isExact === obj.isExact\r\n\t\t\t&& Utils.equals(this.conflictedAlts, obj.conflictedAlts);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic hashCode(): number {\r\n\t\treturn this.conflictedAlts.hashCode();\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:48.1433686-07:00\r\n\r\nimport { Interval } from \"../misc/Interval\";\r\nimport { Override } from \"../Decorators\";\r\nimport { Parser } from \"../Parser\";\r\nimport { ParseTree } from \"./ParseTree\";\r\nimport { ParseTreeVisitor } from \"./ParseTreeVisitor\";\r\nimport { RuleContext } from \"../RuleContext\";\r\nimport { RuleNode } from \"./RuleNode\";\r\nimport { Token } from \"../Token\";\r\n\r\nexport class TerminalNode implements ParseTree {\r\n\tpublic _symbol: Token;\r\n\tpublic _parent: RuleNode | undefined;\r\n\r\n\tconstructor(symbol: Token) {\r\n\t\tthis._symbol = symbol;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getChild(i: number): never {\r\n\t\tthrow new RangeError(\"Terminal Node has no children.\");\r\n\t}\r\n\r\n\tget symbol(): Token {\r\n\t\treturn this._symbol;\r\n\t}\r\n\r\n\t@Override\r\n\tget parent(): RuleNode | undefined {\r\n\t\treturn this._parent;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic setParent(parent: RuleContext): void {\r\n\t\tthis._parent = parent;\r\n\t}\r\n\r\n\t@Override\r\n\tget payload(): Token {\r\n\t\treturn this._symbol;\r\n\t}\r\n\r\n\t@Override\r\n\tget sourceInterval(): Interval {\r\n\t\tlet tokenIndex: number = this._symbol.tokenIndex;\r\n\t\treturn new Interval(tokenIndex, tokenIndex);\r\n\t}\r\n\r\n\t@Override\r\n\tget childCount(): number {\r\n\t\treturn 0;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic accept(visitor: ParseTreeVisitor): T {\r\n\t\treturn visitor.visitTerminal(this);\r\n\t}\r\n\r\n\t@Override\r\n\tget text(): string {\r\n\t\treturn this._symbol.text || \"\";\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toStringTree(parser?: Parser): string {\r\n\t\treturn this.toString();\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\tif (this._symbol.type === Token.EOF) {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\r\n\t\treturn this._symbol.text || \"\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:47.4646355-07:00\r\n\r\nimport { Override } from \"../Decorators\";\r\nimport { ParseTreeVisitor } from \"./ParseTreeVisitor\";\r\nimport { TerminalNode } from \"./TerminalNode\";\r\nimport { Token } from \"../Token\";\r\n\r\n/** Represents a token that was consumed during resynchronization\r\n * rather than during a valid match operation. For example,\r\n * we will create this kind of a node during single token insertion\r\n * and deletion as well as during \"consume until error recovery set\"\r\n * upon no viable alternative exceptions.\r\n */\r\nexport class ErrorNode extends TerminalNode {\r\n\tconstructor(token: Token) {\r\n\t\tsuper(token);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic accept(visitor: ParseTreeVisitor): T {\r\n\t\treturn visitor.visitErrorNode(this);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:47.9232756-07:00\r\n\r\nimport { RuleContext } from \"../RuleContext\";\r\nimport { ParseTree } from \"./ParseTree\";\r\nimport { ParseTreeVisitor } from \"./ParseTreeVisitor\";\r\nimport { Parser } from \"../Parser\";\r\nimport { Interval } from \"../misc/Interval\";\r\n\r\nexport abstract class RuleNode implements ParseTree {\r\n\tpublic abstract readonly ruleContext: RuleContext;\r\n\r\n\t//@Override\r\n\tpublic abstract readonly parent: RuleNode | undefined;\r\n\r\n\tpublic abstract setParent(parent: RuleContext): void;\r\n\r\n\tpublic abstract getChild(i: number): ParseTree;\r\n\r\n\tpublic abstract accept(visitor: ParseTreeVisitor): T;\r\n\r\n\tpublic abstract readonly text: string;\r\n\r\n\tpublic abstract toStringTree(parser?: Parser | undefined): string;\r\n\r\n\tpublic abstract readonly sourceInterval: Interval;\r\n\r\n\tpublic abstract readonly payload: any;\r\n\r\n\tpublic abstract readonly childCount: number;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:48.3187865-07:00\r\n\r\nimport { Arrays } from \"../misc/Arrays\";\r\nimport { ATN } from \"../atn/ATN\";\r\nimport { CommonToken } from \"../CommonToken\";\r\nimport { ErrorNode } from \"./ErrorNode\";\r\nimport { Interval } from \"../misc/Interval\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { Parser } from \"../Parser\";\r\nimport { ParserRuleContext } from \"../ParserRuleContext\";\r\nimport { ParseTree } from \"./ParseTree\";\r\nimport { RuleContext } from \"../RuleContext\";\r\nimport { RuleNode } from \"./RuleNode\";\r\nimport { TerminalNode } from \"./TerminalNode\";\r\nimport { Token } from \"../Token\";\r\nimport { Tree } from \"./Tree\";\r\nimport * as Utils from \"../misc/Utils\";\r\n\r\n/** A set of utility routines useful for all kinds of ANTLR trees. */\r\nexport class Trees {\r\n\t/** Print out a whole tree in LISP form. {@link #getNodeText} is used on the\r\n\t * node payloads to get the text for the nodes. Detect\r\n\t * parse trees and extract data appropriately.\r\n\t */\r\n\tpublic static toStringTree(/*@NotNull*/ t: Tree): string;\r\n\r\n\t/** Print out a whole tree in LISP form. {@link #getNodeText} is used on the\r\n\t * node payloads to get the text for the nodes. Detect\r\n\t * parse trees and extract data appropriately.\r\n\t */\r\n\tpublic static toStringTree(/*@NotNull*/ t: Tree, recog: Parser | undefined): string;\r\n\r\n\t/** Print out a whole tree in LISP form. {@link #getNodeText} is used on the\r\n\t * node payloads to get the text for the nodes.\r\n\t */\r\n\tpublic static toStringTree(/*@NotNull*/ t: Tree, /*@Nullable*/ ruleNames: string[] | undefined): string;\r\n\r\n\tpublic static toStringTree(/*@NotNull*/ t: Tree, arg2?: Parser | string[]): string;\r\n\tpublic static toStringTree(@NotNull t: Tree, arg2?: Parser | string[]): string {\r\n\t\tlet ruleNames: string[] | undefined;\r\n\t\tif (arg2 instanceof Parser) {\r\n\t\t\truleNames = arg2.ruleNames;\r\n\t\t} else {\r\n\t\t\truleNames = arg2;\r\n\t\t}\r\n\r\n\t\tlet s: string = Utils.escapeWhitespace(this.getNodeText(t, ruleNames), false);\r\n\t\tif (t.childCount === 0) {\r\n\t\t\treturn s;\r\n\t\t}\r\n\t\tlet buf = \"\";\r\n\t\tbuf += (\"(\");\r\n\t\ts = Utils.escapeWhitespace(this.getNodeText(t, ruleNames), false);\r\n\t\tbuf += (s);\r\n\t\tbuf += (\" \");\r\n\t\tfor (let i = 0; i < t.childCount; i++) {\r\n\t\t\tif (i > 0) {\r\n\t\t\t\tbuf += (\" \");\r\n\t\t\t}\r\n\t\t\tbuf += (this.toStringTree(t.getChild(i), ruleNames));\r\n\t\t}\r\n\t\tbuf += (\")\");\r\n\t\treturn buf;\r\n\t}\r\n\r\n\tpublic static getNodeText(/*@NotNull*/ t: Tree, recog: Parser | undefined): string;\r\n\tpublic static getNodeText(/*@NotNull*/ t: Tree, ruleNames: string[] | undefined): string;\r\n\tpublic static getNodeText(t: Tree, arg2: Parser | string[] | undefined): string {\r\n\t\tlet ruleNames: string[] | undefined;\r\n\t\tif (arg2 instanceof Parser) {\r\n\t\t\truleNames = arg2.ruleNames;\r\n\t\t} else if (arg2) {\r\n\t\t\truleNames = arg2;\r\n\t\t} else {\r\n\t\t\t// no recog or rule names\r\n\t\t\tlet payload = t.payload;\r\n\t\t\tif (typeof payload.text === \"string\") {\r\n\t\t\t\treturn payload.text;\r\n\t\t\t}\r\n\t\t\treturn t.payload.toString();\r\n\t\t}\r\n\r\n\t\tif (t instanceof RuleNode) {\r\n\t\t\tlet ruleContext: RuleContext = t.ruleContext;\r\n\t\t\tlet ruleIndex: number = ruleContext.ruleIndex;\r\n\t\t\tlet ruleName: string = ruleNames[ruleIndex];\r\n\t\t\tlet altNumber: number = ruleContext.altNumber;\r\n\t\t\tif (altNumber !== ATN.INVALID_ALT_NUMBER) {\r\n\t\t\t\treturn ruleName + \":\" + altNumber;\r\n\t\t\t}\r\n\t\t\treturn ruleName;\r\n\t\t}\r\n\t\telse if (t instanceof ErrorNode) {\r\n\t\t\treturn t.toString();\r\n\t\t}\r\n\t\telse if (t instanceof TerminalNode) {\r\n\t\t\tlet symbol = t.symbol;\r\n\t\t\treturn symbol.text || \"\";\r\n\t\t}\r\n\t\tthrow new TypeError(\"Unexpected node type\");\r\n\t}\r\n\r\n\t/** Return ordered list of all children of this node */\r\n\tpublic static getChildren(t: ParseTree): ParseTree[];\r\n\tpublic static getChildren(t: Tree): Tree[];\r\n\tpublic static getChildren(t: Tree): Tree[] {\r\n\t\tlet kids: Tree[] = [];\r\n\t\tfor (let i = 0; i < t.childCount; i++) {\r\n\t\t\tkids.push(t.getChild(i));\r\n\t\t}\r\n\t\treturn kids;\r\n\t}\r\n\r\n\t/** Return a list of all ancestors of this node. The first node of\r\n\t * list is the root and the last is the parent of this node.\r\n\t *\r\n\t * @since 4.5.1\r\n\t */\r\n\tpublic static getAncestors(t: ParseTree): ParseTree[];\r\n\tpublic static getAncestors(t: Tree): Tree[];\r\n\t@NotNull\r\n\tpublic static getAncestors(@NotNull t: Tree): Tree[] {\r\n\t\tlet ancestors: Tree[] = [];\r\n\t\tlet p = t.parent;\r\n\t\twhile (p) {\r\n\t\t\tancestors.unshift(p); // insert at start\r\n\t\t\tp = p.parent;\r\n\t\t}\r\n\t\treturn ancestors;\r\n\t}\r\n\r\n\t/** Return true if t is u's parent or a node on path to root from u.\r\n\t * Use === not equals().\r\n\t *\r\n\t * @since 4.5.1\r\n\t */\r\n\tpublic static isAncestorOf(t: Tree, u: Tree): boolean {\r\n\t\tif (!t || !u || !t.parent) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\t\tlet p = u.parent;\r\n\t\twhile (p) {\r\n\t\t\tif (t === p) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t\tp = p.parent;\r\n\t\t}\r\n\t\treturn false;\r\n\t}\r\n\r\n\tpublic static findAllTokenNodes(t: ParseTree, ttype: number): ParseTree[] {\r\n\t\treturn Trees.findAllNodes(t, ttype, true);\r\n\t}\r\n\r\n\tpublic static findAllRuleNodes(t: ParseTree, ruleIndex: number): ParseTree[] {\r\n\t\treturn Trees.findAllNodes(t, ruleIndex, false);\r\n\t}\r\n\r\n\tpublic static findAllNodes(t: ParseTree, index: number, findTokens: boolean): ParseTree[] {\r\n\t\tlet nodes: ParseTree[] = [];\r\n\t\tTrees._findAllNodes(t, index, findTokens, nodes);\r\n\t\treturn nodes;\r\n\t}\r\n\r\n\tpublic static _findAllNodes(t: ParseTree, index: number, findTokens: boolean, nodes: ParseTree[]): void {\r\n\t\t// check this node (the root) first\r\n\t\tif (findTokens && t instanceof TerminalNode) {\r\n\t\t\tif (t.symbol.type === index) {\r\n\t\t\t\tnodes.push(t);\r\n\t\t\t}\r\n\t\t}\r\n\t\telse if (!findTokens && t instanceof ParserRuleContext) {\r\n\t\t\tif (t.ruleIndex === index) {\r\n\t\t\t\tnodes.push(t);\r\n\t\t\t}\r\n\t\t}\r\n\t\t// check children\r\n\t\tfor (let i = 0; i < t.childCount; i++) {\r\n\t\t\tTrees._findAllNodes(t.getChild(i), index, findTokens, nodes);\r\n\t\t}\r\n\t}\r\n\r\n\t/** Get all descendents; includes t itself.\r\n\t *\r\n\t * @since 4.5.1\r\n\t */\r\n\tpublic static getDescendants(t: ParseTree): ParseTree[] {\r\n\t\tlet nodes: ParseTree[] = [];\r\n\r\n\t\tfunction recurse(e: ParseTree): void {\r\n\t\t\tnodes.push(e);\r\n\t\t\tconst n = e.childCount;\r\n\t\t\tfor (let i = 0; i < n; i++) {\r\n\t\t\t\trecurse(e.getChild(i));\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\trecurse(t);\r\n\t\treturn nodes;\r\n\t}\r\n\r\n\t/** Find smallest subtree of t enclosing range startTokenIndex..stopTokenIndex\r\n\t * inclusively using postorder traversal. Recursive depth-first-search.\r\n\t *\r\n\t * @since 4.5\r\n\t */\r\n\tpublic static getRootOfSubtreeEnclosingRegion(\r\n\t\t@NotNull t: ParseTree,\r\n\t\tstartTokenIndex: number, // inclusive\r\n\t\tstopTokenIndex: number, // inclusive\r\n\t): ParserRuleContext | undefined {\r\n\t\tlet n: number = t.childCount;\r\n\t\tfor (let i = 0; i < n; i++) {\r\n\t\t\tlet child: ParseTree = t.getChild(i);\r\n\t\t\tlet r = Trees.getRootOfSubtreeEnclosingRegion(child, startTokenIndex, stopTokenIndex);\r\n\t\t\tif (r) {\r\n\t\t\t\treturn r;\r\n\t\t\t}\r\n\t\t}\r\n\t\tif (t instanceof ParserRuleContext) {\r\n\t\t\tlet stopToken = t.stop;\r\n\t\t\tif (startTokenIndex >= t.start.tokenIndex && // is range fully contained in t?\r\n\t\t\t\t(stopToken == null || stopTokenIndex <= stopToken.tokenIndex)) {\r\n\t\t\t\t// note: r.stop==null likely implies that we bailed out of parser and there's nothing to the right\r\n\t\t\t\treturn t;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\t/** Replace any subtree siblings of root that are completely to left\r\n\t * or right of lookahead range with a CommonToken(Token.INVALID_TYPE,\"...\")\r\n\t * node. The source interval for t is not altered to suit smaller range!\r\n\t *\r\n\t * WARNING: destructive to t.\r\n\t *\r\n\t * @since 4.5.1\r\n\t */\r\n\tpublic static stripChildrenOutOfRange(\r\n\t\tt: ParserRuleContext,\r\n\t\troot: ParserRuleContext,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number): void {\r\n\t\tif (!t) {\r\n\t\t\treturn;\r\n\t\t}\r\n\t\tlet count = t.childCount;\r\n\t\tfor (let i = 0; i < count; i++) {\r\n\t\t\tlet child = t.getChild(i);\r\n\t\t\tlet range: Interval = child.sourceInterval;\r\n\t\t\tif (child instanceof ParserRuleContext && (range.b < startIndex || range.a > stopIndex)) {\r\n\t\t\t\tif (Trees.isAncestorOf(child, root)) { // replace only if subtree doesn't have displayed root\r\n\t\t\t\t\tlet abbrev: CommonToken = new CommonToken(Token.INVALID_TYPE, \"...\");\r\n\t\t\t\t\tt.children![i] = new TerminalNode(abbrev); // HACK access to private\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t/** Return first node satisfying the pred\r\n\t *\r\n\t * @since 4.5.1\r\n\t */\r\n\tpublic static findNodeSuchThat(t: ParseTree, pred: (tree: ParseTree) => boolean): ParseTree | undefined;\r\n\tpublic static findNodeSuchThat(t: Tree, pred: (tree: Tree) => boolean): Tree | undefined;\r\n\tpublic static findNodeSuchThat(t: Tree, pred: (tree: ParseTree) => boolean): Tree | undefined {\r\n\t\t// No type check needed as long as users only use one of the available overloads\r\n\t\tif (pred(t as ParseTree)) {\r\n\t\t\treturn t;\r\n\t\t}\r\n\r\n\t\tlet n: number = t.childCount;\r\n\t\tfor (let i = 0 ; i < n ; i++){\r\n\t\t\tlet u = Trees.findNodeSuchThat(t.getChild(i), pred as (tree: Tree) => boolean);\r\n\t\t\tif (u !== undefined) {\r\n\t\t\t\treturn u;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn undefined;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:57.3490837-07:00\r\n\r\nimport { ATN } from \"./atn/ATN\";\r\nimport { Parser } from \"./Parser\";\r\nimport { Recognizer } from \"./Recognizer\";\r\nimport { RuleNode } from \"./tree/RuleNode\";\r\nimport { ParseTree } from \"./tree/ParseTree\";\r\nimport { Interval } from \"./misc/Interval\";\r\nimport { Override } from \"./Decorators\";\r\nimport { Trees } from \"./tree/Trees\";\r\nimport { ParseTreeVisitor } from \"./tree/ParseTreeVisitor\";\r\nimport { ParserRuleContext } from \"./ParserRuleContext\";\r\n\r\n/** A rule context is a record of a single rule invocation.\r\n *\r\n * We form a stack of these context objects using the parent\r\n * pointer. A parent pointer of `undefined` indicates that the current\r\n * context is the bottom of the stack. The ParserRuleContext subclass\r\n * as a children list so that we can turn this data structure into a\r\n * tree.\r\n *\r\n * The root node always has a `undefined` pointer and invokingState of -1.\r\n *\r\n * Upon entry to parsing, the first invoked rule function creates a\r\n * context object (a subclass specialized for that rule such as\r\n * SContext) and makes it the root of a parse tree, recorded by field\r\n * Parser._ctx.\r\n *\r\n * public final SContext s() throws RecognitionException {\r\n * SContext _localctx = new SContext(_ctx, state); <-- create new node\r\n * enterRule(_localctx, 0, RULE_s); <-- push it\r\n * ...\r\n * exitRule(); <-- pop back to _localctx\r\n * return _localctx;\r\n * }\r\n *\r\n * A subsequent rule invocation of r from the start rule s pushes a\r\n * new context object for r whose parent points at s and use invoking\r\n * state is the state with r emanating as edge label.\r\n *\r\n * The invokingState fields from a context object to the root\r\n * together form a stack of rule indication states where the root\r\n * (bottom of the stack) has a -1 sentinel value. If we invoke start\r\n * symbol s then call r1, which calls r2, the would look like\r\n * this:\r\n *\r\n * SContext[-1] <- root node (bottom of the stack)\r\n * R1Context[p] <- p in rule s called r1\r\n * R2Context[q] <- q in rule r1 called r2\r\n *\r\n * So the top of the stack, _ctx, represents a call to the current\r\n * rule and it holds the return address from another rule that invoke\r\n * to this rule. To invoke a rule, we must always have a current context.\r\n *\r\n * The parent contexts are useful for computing lookahead sets and\r\n * getting error information.\r\n *\r\n * These objects are used during parsing and prediction.\r\n * For the special case of parsers, we use the subclass\r\n * ParserRuleContext.\r\n *\r\n * @see ParserRuleContext\r\n */\r\nexport class RuleContext extends RuleNode {\r\n\tpublic _parent: RuleContext | undefined;\r\n\tpublic invokingState: number;\r\n\r\n\tconstructor();\r\n\tconstructor(parent: RuleContext | undefined, invokingState: number);\r\n\tconstructor(parent?: RuleContext, invokingState?: number) {\r\n\t\tsuper();\r\n\t\tthis._parent = parent;\r\n\t\tthis.invokingState = invokingState != null ? invokingState : -1;\r\n\t}\r\n\r\n\tpublic static getChildContext(parent: RuleContext, invokingState: number): RuleContext {\r\n\t\treturn new RuleContext(parent, invokingState);\r\n\t}\r\n\r\n\tpublic depth(): number {\r\n\t\tlet n = 0;\r\n\t\tlet p: RuleContext | undefined = this;\r\n\t\twhile (p) {\r\n\t\t\tp = p._parent;\r\n\t\t\tn++;\r\n\t\t}\r\n\t\treturn n;\r\n\t}\r\n\r\n\t/** A context is empty if there is no invoking state; meaning nobody called\r\n\t * current context.\r\n\t */\r\n\tget isEmpty(): boolean {\r\n\t\treturn this.invokingState === -1;\r\n\t}\r\n\r\n\t// satisfy the ParseTree / SyntaxTree interface\r\n\r\n\t@Override\r\n\tget sourceInterval(): Interval {\r\n\t\treturn Interval.INVALID;\r\n\t}\r\n\r\n\t@Override\r\n\tget ruleContext(): RuleContext { return this; }\r\n\r\n\t@Override\r\n\tget parent(): RuleContext | undefined { return this._parent; }\r\n\r\n\t/** @since 4.7. {@see ParseTree#setParent} comment */\r\n\t@Override\r\n\tpublic setParent(parent: RuleContext): void {\r\n\t\tthis._parent = parent;\r\n\t}\r\n\r\n\t@Override\r\n\tget payload(): RuleContext { return this; }\r\n\r\n\t/** Return the combined text of all child nodes. This method only considers\r\n\t * tokens which have been added to the parse tree.\r\n\t *\r\n\t * Since tokens on hidden channels (e.g. whitespace or comments) are not\r\n\t * added to the parse trees, they will not appear in the output of this\r\n\t * method.\r\n\t */\r\n\t@Override\r\n\tget text(): string {\r\n\t\tif (this.childCount === 0) {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\r\n\t\tlet builder = \"\";\r\n\t\tfor (let i = 0; i < this.childCount; i++) {\r\n\t\t\tbuilder += this.getChild(i).text;\r\n\t\t}\r\n\r\n\t\treturn builder.toString();\r\n\t}\r\n\r\n\tget ruleIndex(): number { return -1; }\r\n\r\n\t/** For rule associated with this parse tree internal node, return\r\n\t * the outer alternative number used to match the input. Default\r\n\t * implementation does not compute nor store this alt num. Create\r\n\t * a subclass of ParserRuleContext with backing field and set\r\n\t * option contextSuperClass.\r\n\t * to set it.\r\n\t *\r\n\t * @since 4.5.3\r\n\t */\r\n\tget altNumber(): number { return ATN.INVALID_ALT_NUMBER; }\r\n\r\n\t/** Set the outer alternative number for this context node. Default\r\n\t * implementation does nothing to avoid backing field overhead for\r\n\t * trees that don't need it. Create\r\n\t * a subclass of ParserRuleContext with backing field and set\r\n\t * option contextSuperClass.\r\n\t *\r\n\t * @since 4.5.3\r\n\t */\r\n\tset altNumber(altNumber: number) {\r\n\t\t// intentionally ignored by the base implementation\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getChild(i: number): ParseTree {\r\n\t\tthrow new RangeError(\"i must be greater than or equal to 0 and less than childCount\");\r\n\t}\r\n\r\n\t@Override\r\n\tget childCount(): number {\r\n\t\treturn 0;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic accept(visitor: ParseTreeVisitor): T {\r\n\t\treturn visitor.visitChildren(this);\r\n\t}\r\n\r\n\t/** Print out a whole tree, not just a node, in LISP format\r\n\t * (root child1 .. childN). Print just a node if this is a leaf.\r\n\t * We have to know the recognizer so we can get rule names.\r\n\t */\r\n\tpublic toStringTree(recog: Parser): string;\r\n\r\n\t/** Print out a whole tree, not just a node, in LISP format\r\n\t * (root child1 .. childN). Print just a node if this is a leaf.\r\n\t */\r\n\tpublic toStringTree(ruleNames: string[] | undefined): string;\r\n\r\n\tpublic toStringTree(): string;\r\n\r\n\t@Override\r\n\tpublic toStringTree(recog?: Parser | string[]): string {\r\n\t\treturn Trees.toStringTree(this, recog);\r\n\t}\r\n\r\n\tpublic toString(): string;\r\n\tpublic toString(recog: Recognizer | undefined): string;\r\n\tpublic toString(ruleNames: string[] | undefined): string;\r\n\r\n\t// // recog undefined unless ParserRuleContext, in which case we use subclass toString(...)\r\n\tpublic toString(recog: Recognizer | undefined, stop: RuleContext | undefined): string;\r\n\r\n\tpublic toString(ruleNames: string[] | undefined, stop: RuleContext | undefined): string;\r\n\r\n\tpublic toString(\r\n\t\targ1?: Recognizer | string[],\r\n\t\tstop?: RuleContext)\r\n\t\t: string {\r\n\t\tconst ruleNames = (arg1 instanceof Recognizer) ? arg1.ruleNames : arg1;\r\n\t\tstop = stop || ParserRuleContext.emptyContext();\r\n\r\n\t\tlet buf = \"\";\r\n\t\tlet p: RuleContext | undefined = this;\r\n\t\tbuf += (\"[\");\r\n\t\twhile (p && p !== stop) {\r\n\t\t\tif (!ruleNames) {\r\n\t\t\t\tif (!p.isEmpty) {\r\n\t\t\t\t\tbuf += (p.invokingState);\r\n\t\t\t\t}\r\n\t\t\t} else {\r\n\t\t\t\tlet ruleIndex: number = p.ruleIndex;\r\n\t\t\t\tlet ruleName: string = (ruleIndex >= 0 && ruleIndex < ruleNames.length)\r\n\t\t\t\t\t? ruleNames[ruleIndex] : ruleIndex.toString();\r\n\t\t\t\tbuf += (ruleName);\r\n\t\t\t}\r\n\r\n\t\t\tif (p._parent && (ruleNames || !p._parent.isEmpty)) {\r\n\t\t\t\tbuf += (\" \");\r\n\t\t\t}\r\n\r\n\t\t\tp = p._parent;\r\n\t\t}\r\n\r\n\t\tbuf += (\"]\");\r\n\t\treturn buf.toString();\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:56.6285494-07:00\r\nimport { ErrorNode } from \"./tree/ErrorNode\";\r\nimport { Interval } from \"./misc/Interval\";\r\nimport { Override } from \"./Decorators\";\r\nimport { Parser } from \"./Parser\";\r\nimport { ParseTree } from \"./tree/ParseTree\";\r\nimport { ParseTreeListener } from \"./tree/ParseTreeListener\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { RuleContext } from \"./RuleContext\";\r\nimport { TerminalNode } from \"./tree/TerminalNode\";\r\nimport { Token } from \"./Token\";\r\n\r\n/** A rule invocation record for parsing.\r\n *\r\n * Contains all of the information about the current rule not stored in the\r\n * RuleContext. It handles parse tree children list, Any ATN state\r\n * tracing, and the default values available for rule invocations:\r\n * start, stop, rule index, current alt number.\r\n *\r\n * Subclasses made for each rule and grammar track the parameters,\r\n * return values, locals, and labels specific to that rule. These\r\n * are the objects that are returned from rules.\r\n *\r\n * Note text is not an actual field of a rule return value; it is computed\r\n * from start and stop using the input stream's toString() method. I\r\n * could add a ctor to this so that we can pass in and store the input\r\n * stream, but I'm not sure we want to do that. It would seem to be undefined\r\n * to get the .text property anyway if the rule matches tokens from multiple\r\n * input streams.\r\n *\r\n * I do not use getters for fields of objects that are used simply to\r\n * group values such as this aggregate. The getters/setters are there to\r\n * satisfy the superclass interface.\r\n */\r\nexport class ParserRuleContext extends RuleContext {\r\n\tprivate static readonly EMPTY: ParserRuleContext = new ParserRuleContext();\r\n\r\n\t/** If we are debugging or building a parse tree for a visitor,\r\n\t * we need to track all of the tokens and rule invocations associated\r\n\t * with this rule's context. This is empty for parsing w/o tree constr.\r\n\t * operation because we don't the need to track the details about\r\n\t * how we parse this rule.\r\n\t */\r\n\tpublic children?: ParseTree[];\r\n\r\n\t/** For debugging/tracing purposes, we want to track all of the nodes in\r\n\t * the ATN traversed by the parser for a particular rule.\r\n\t * This list indicates the sequence of ATN nodes used to match\r\n\t * the elements of the children list. This list does not include\r\n\t * ATN nodes and other rules used to match rule invocations. It\r\n\t * traces the rule invocation node itself but nothing inside that\r\n\t * other rule's ATN submachine.\r\n\t *\r\n\t * There is NOT a one-to-one correspondence between the children and\r\n\t * states list. There are typically many nodes in the ATN traversed\r\n\t * for each element in the children list. For example, for a rule\r\n\t * invocation there is the invoking state and the following state.\r\n\t *\r\n\t * The parser state property updates field s and adds it to this list\r\n\t * if we are debugging/tracing.\r\n\t *\r\n\t * This does not trace states visited during prediction.\r\n\t */\r\n//\tpublic Array states;\r\n\r\n\tpublic _start!: Token;\r\n\tpublic _stop: Token | undefined;\r\n\r\n\t/**\r\n\t * The exception that forced this rule to return. If the rule successfully\r\n\t * completed, this is `undefined`.\r\n\t */\r\n\tpublic exception?: RecognitionException;\r\n\r\n\tconstructor();\r\n\tconstructor(parent: ParserRuleContext | undefined, invokingStateNumber: number);\r\n\tconstructor(parent?: ParserRuleContext, invokingStateNumber?: number) {\r\n\t\tif (invokingStateNumber == null) {\r\n\t\t\tsuper();\r\n\t\t} else {\r\n\t\t\tsuper(parent, invokingStateNumber);\r\n\t\t}\r\n\t}\r\n\r\n\tpublic static emptyContext(): ParserRuleContext {\r\n\t\treturn ParserRuleContext.EMPTY;\r\n\t}\r\n\r\n\t/**\r\n\t * COPY a ctx (I'm deliberately not using copy constructor) to avoid\r\n\t * confusion with creating node with parent. Does not copy children\r\n\t * (except error leaves).\r\n\t *\r\n\t * This is used in the generated parser code to flip a generic XContext\r\n\t * node for rule X to a YContext for alt label Y. In that sense, it is not\r\n\t * really a generic copy function.\r\n\t *\r\n\t * If we do an error sync() at start of a rule, we might add error nodes\r\n\t * to the generic XContext so this function must copy those nodes to the\r\n\t * YContext as well else they are lost!\r\n\t */\r\n\tpublic copyFrom(ctx: ParserRuleContext): void {\r\n\t\tthis._parent = ctx._parent;\r\n\t\tthis.invokingState = ctx.invokingState;\r\n\r\n\t\tthis._start = ctx._start;\r\n\t\tthis._stop = ctx._stop;\r\n\r\n\t\t// copy any error nodes to alt label node\r\n\t\tif (ctx.children) {\r\n\t\t\tthis.children = [];\r\n\t\t\t// reset parent pointer for any error nodes\r\n\t\t\tfor (let child of ctx.children) {\r\n\t\t\t\tif (child instanceof ErrorNode) {\r\n\t\t\t\t\tthis.addChild(child);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t// Double dispatch methods for listeners\r\n\r\n\tpublic enterRule(listener: ParseTreeListener): void {\r\n\t\t// intentionally empty\r\n\t}\r\n\tpublic exitRule(listener: ParseTreeListener): void {\r\n\t\t// intentionally empty\r\n\t}\r\n\r\n\t/** Add a parse tree node to this as a child. Works for\r\n\t * internal and leaf nodes. Does not set parent link;\r\n\t * other add methods must do that. Other addChild methods\r\n\t * call this.\r\n\t *\r\n\t * We cannot set the parent pointer of the incoming node\r\n\t * because the existing interfaces do not have a setParent()\r\n\t * method and I don't want to break backward compatibility for this.\r\n\t *\r\n\t * @since 4.7\r\n\t */\r\n\tpublic addAnyChild(t: T): T {\r\n\t\tif (!this.children) {\r\n\t\t\tthis.children = [t];\r\n\t\t} else {\r\n\t\t\tthis.children.push(t);\r\n\t\t}\r\n\r\n\t\treturn t;\r\n\t}\r\n\r\n\t/** Add a token leaf node child and force its parent to be this node. */\r\n\tpublic addChild(t: TerminalNode): void;\r\n\tpublic addChild(ruleInvocation: RuleContext): void;\r\n\t/**\r\n\t * Add a child to this node based upon matchedToken. It\r\n\t * creates a TerminalNodeImpl rather than using\r\n\t * {@link Parser#createTerminalNode(ParserRuleContext, Token)}. I'm leaving this\r\n\t * in for compatibility but the parser doesn't use this anymore.\r\n\t *\r\n\t * @deprecated Use another overload instead.\r\n\t */\r\n\tpublic addChild(matchedToken: Token): TerminalNode;\r\n\tpublic addChild(t: TerminalNode | RuleContext | Token): TerminalNode | void {\r\n\t\tlet result: TerminalNode | void;\r\n\t\tif (t instanceof TerminalNode) {\r\n\t\t\tt.setParent(this);\r\n\t\t\tthis.addAnyChild(t);\r\n\t\t\treturn;\r\n\t\t} else if (t instanceof RuleContext) {\r\n\t\t\t// Does not set parent link\r\n\t\t\tthis.addAnyChild(t);\r\n\t\t\treturn;\r\n\t\t} else {\r\n\t\t\t// Deprecated code path\r\n\t\t\tt = new TerminalNode(t);\r\n\t\t\tthis.addAnyChild(t);\r\n\t\t\tt.setParent(this);\r\n\t\t\treturn t;\r\n\t\t}\r\n\t}\r\n\r\n\t/** Add an error node child and force its parent to be this node.\r\n\t *\r\n\t * @since 4.7\r\n\t */\r\n\tpublic addErrorNode(errorNode: ErrorNode): ErrorNode;\r\n\r\n\t/**\r\n\t * Add a child to this node based upon badToken. It\r\n\t * creates a ErrorNode rather than using\r\n\t * {@link Parser#createErrorNode(ParserRuleContext, Token)}. I'm leaving this\r\n\t * in for compatibility but the parser doesn't use this anymore.\r\n\t *\r\n\t * @deprecated Use another overload instead.\r\n\t */\r\n\tpublic addErrorNode(badToken: Token): ErrorNode;\r\n\tpublic addErrorNode(node: ErrorNode | Token): ErrorNode {\r\n\t\tif (node instanceof ErrorNode) {\r\n\t\t\tconst errorNode: ErrorNode = node;\r\n\t\t\terrorNode.setParent(this);\r\n\t\t\treturn this.addAnyChild(errorNode);\r\n\t\t} else {\r\n\t\t\t// deprecated path\r\n\t\t\tconst badToken: Token = node;\r\n\t\t\tlet t = new ErrorNode(badToken);\r\n\t\t\tthis.addAnyChild(t);\r\n\t\t\tt.setParent(this);\r\n\t\t\treturn t;\r\n\t\t}\r\n\t}\r\n\r\n//\tpublic void trace(int s) {\r\n//\t\tif ( states==null ) states = new ArrayList();\r\n//\t\tstates.add(s);\r\n//\t}\r\n\r\n\t/** Used by enterOuterAlt to toss out a RuleContext previously added as\r\n\t * we entered a rule. If we have # label, we will need to remove\r\n\t * generic ruleContext object.\r\n\t */\r\n\tpublic removeLastChild(): void {\r\n\t\tif (this.children) {\r\n\t\t\tthis.children.pop();\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\t/** Override to make type more specific */\r\n\tget parent(): ParserRuleContext | undefined {\r\n\t\tlet parent = super.parent;\r\n\t\tif (parent === undefined || parent instanceof ParserRuleContext) {\r\n\t\t\treturn parent;\r\n\t\t}\r\n\r\n\t\tthrow new TypeError(\"Invalid parent type for ParserRuleContext\");\r\n\t}\r\n\r\n\tpublic getChild(i: number): ParseTree;\r\n\tpublic getChild(i: number, ctxType: { new (...args: any[]): T; }): T;\r\n\t// Note: in TypeScript, order or arguments reversed\r\n\tpublic getChild(i: number, ctxType?: { new (...args: any[]): T; }): ParseTree {\r\n\t\tif (!this.children || i < 0 || i >= this.children.length) {\r\n\t\t\tthrow new RangeError(\"index parameter must be between >= 0 and <= number of children.\");\r\n\t\t}\r\n\r\n\t\tif (ctxType == null) {\r\n\t\t\treturn this.children[i];\r\n\t\t}\r\n\r\n\t\tlet result = this.tryGetChild(i, ctxType);\r\n\t\tif (result === undefined) {\r\n\t\t\tthrow new Error(\"The specified node does not exist\");\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\tpublic tryGetChild(i: number, ctxType: { new (...args: any[]): T; }): T | undefined {\r\n\t\tif (!this.children || i < 0 || i >= this.children.length) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\tlet j: number = -1; // what node with ctxType have we found?\r\n\t\tfor (let o of this.children) {\r\n\t\t\tif (o instanceof ctxType) {\r\n\t\t\t\tj++;\r\n\t\t\t\tif (j === i) {\r\n\t\t\t\t\treturn o;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\tpublic getToken(ttype: number, i: number): TerminalNode {\r\n\t\tlet result = this.tryGetToken(ttype, i);\r\n\t\tif (result === undefined) {\r\n\t\t\tthrow new Error(\"The specified token does not exist\");\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\tpublic tryGetToken(ttype: number, i: number): TerminalNode | undefined {\r\n\t\tif (!this.children || i < 0 || i >= this.children.length) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\tlet j: number = -1; // what token with ttype have we found?\r\n\t\tfor (let o of this.children) {\r\n\t\t\tif (o instanceof TerminalNode) {\r\n\t\t\t\tlet symbol: Token = o.symbol;\r\n\t\t\t\tif (symbol.type === ttype) {\r\n\t\t\t\t\tj++;\r\n\t\t\t\t\tif (j === i) {\r\n\t\t\t\t\t\treturn o;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\tpublic getTokens(ttype: number): TerminalNode[] {\r\n\t\tlet tokens: TerminalNode[] = [];\r\n\r\n\t\tif (!this.children) {\r\n\t\t\treturn tokens;\r\n\t\t}\r\n\r\n\t\tfor (let o of this.children) {\r\n\t\t\tif (o instanceof TerminalNode) {\r\n\t\t\t\tlet symbol = o.symbol;\r\n\t\t\t\tif (symbol.type === ttype) {\r\n\t\t\t\t\ttokens.push(o);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn tokens;\r\n\t}\r\n\r\n\tget ruleContext(): this {\r\n\t\treturn this;\r\n\t}\r\n\r\n\t// NOTE: argument order change from Java version\r\n\tpublic getRuleContext(i: number, ctxType: { new (...args: any[]): T; }): T {\r\n\t\treturn this.getChild(i, ctxType);\r\n\t}\r\n\r\n\tpublic tryGetRuleContext(i: number, ctxType: { new (...args: any[]): T; }): T | undefined {\r\n\t\treturn this.tryGetChild(i, ctxType);\r\n\t}\r\n\r\n\tpublic getRuleContexts(ctxType: { new (...args: any[]): T; }): T[] {\r\n\t\tlet contexts: T[] = [];\r\n\t\tif (!this.children) {\r\n\t\t\treturn contexts;\r\n\t\t}\r\n\r\n\t\tfor (let o of this.children) {\r\n\t\t\tif (o instanceof ctxType) {\r\n\t\t\t\tcontexts.push(o);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn contexts;\r\n\t}\r\n\r\n\t@Override\r\n\tget childCount() {\r\n\t\treturn this.children ? this.children.length : 0;\r\n\t}\r\n\r\n\t@Override\r\n\tget sourceInterval(): Interval {\r\n\t\tif (!this._start) {\r\n\t\t\treturn Interval.INVALID;\r\n\t\t}\r\n\t\tif (!this._stop || this._stop.tokenIndex < this._start.tokenIndex) {\r\n\t\t\treturn Interval.of(this._start.tokenIndex, this._start.tokenIndex - 1); // empty\r\n\t\t}\r\n\t\treturn Interval.of(this._start.tokenIndex, this._stop.tokenIndex);\r\n\t}\r\n\r\n\t/**\r\n\t * Get the initial token in this context.\r\n\t * Note that the range from start to stop is inclusive, so for rules that do not consume anything\r\n\t * (for example, zero length or error productions) this token may exceed stop.\r\n\t */\r\n\tget start(): Token { return this._start; }\r\n\t/**\r\n\t * Get the final token in this context.\r\n\t * Note that the range from start to stop is inclusive, so for rules that do not consume anything\r\n\t * (for example, zero length or error productions) this token may precede start.\r\n\t */\r\n\tget stop(): Token | undefined { return this._stop; }\r\n\r\n\t/** Used for rule context info debugging during parse-time, not so much for ATN debugging */\r\n\tpublic toInfoString(recognizer: Parser): string {\r\n\t\tlet rules: string[] =\r\n\t\t\trecognizer.getRuleInvocationStack(this).reverse();\r\n\t\treturn \"ParserRuleContext\" + rules + \"{\" +\r\n\t\t\t\"start=\" + this._start +\r\n\t\t\t\", stop=\" + this._stop +\r\n\t\t\t\"}\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:36.2673893-07:00\r\n\r\nimport { Array2DHashMap } from \"../misc/Array2DHashMap\";\r\nimport { ATN } from \"./ATN\";\r\nimport { ATNConfig } from \"./ATNConfig\";\r\nimport { ATNConfigSet } from \"./ATNConfigSet\";\r\nimport { ATNState } from \"./ATNState\";\r\nimport { BitSet } from \"../misc/BitSet\";\r\nimport { EqualityComparator } from \"../misc/EqualityComparator\";\r\nimport { MurmurHash } from \"../misc/MurmurHash\";\r\nimport { ObjectEqualityComparator } from \"../misc/ObjectEqualityComparator\";\r\nimport { Override } from \"../Decorators\";\r\nimport { RuleStopState } from \"./RuleStopState\";\r\nimport { SemanticContext } from \"./SemanticContext\";\r\n\r\n/**\r\n * This enumeration defines the prediction modes available in ANTLR 4 along with\r\n * utility methods for analyzing configuration sets for conflicts and/or\r\n * ambiguities.\r\n */\r\nexport enum PredictionMode {\r\n\t/**\r\n\t * The SLL(*) prediction mode. This prediction mode ignores the current\r\n\t * parser context when making predictions. This is the fastest prediction\r\n\t * mode, and provides correct results for many grammars. This prediction\r\n\t * mode is more powerful than the prediction mode provided by ANTLR 3, but\r\n\t * may result in syntax errors for grammar and input combinations which are\r\n\t * not SLL.\r\n\t *\r\n\t * When using this prediction mode, the parser will either return a correct\r\n\t * parse tree (i.e. the same parse tree that would be returned with the\r\n\t * {@link #LL} prediction mode), or it will report a syntax error. If a\r\n\t * syntax error is encountered when using the {@link #SLL} prediction mode,\r\n\t * it may be due to either an actual syntax error in the input or indicate\r\n\t * that the particular combination of grammar and input requires the more\r\n\t * powerful {@link #LL} prediction abilities to complete successfully.\r\n\t *\r\n\t * This prediction mode does not provide any guarantees for prediction\r\n\t * behavior for syntactically-incorrect inputs.\r\n\t */\r\n\tSLL,\r\n\t/**\r\n\t * The LL(*) prediction mode. This prediction mode allows the current parser\r\n\t * context to be used for resolving SLL conflicts that occur during\r\n\t * prediction. This is the fastest prediction mode that guarantees correct\r\n\t * parse results for all combinations of grammars with syntactically correct\r\n\t * inputs.\r\n\t *\r\n\t * When using this prediction mode, the parser will make correct decisions\r\n\t * for all syntactically-correct grammar and input combinations. However, in\r\n\t * cases where the grammar is truly ambiguous this prediction mode might not\r\n\t * report a precise answer for *exactly which* alternatives are\r\n\t * ambiguous.\r\n\t *\r\n\t * This prediction mode does not provide any guarantees for prediction\r\n\t * behavior for syntactically-incorrect inputs.\r\n\t */\r\n\tLL,\r\n\t/**\r\n\t * The LL(*) prediction mode with exact ambiguity detection. In addition to\r\n\t * the correctness guarantees provided by the {@link #LL} prediction mode,\r\n\t * this prediction mode instructs the prediction algorithm to determine the\r\n\t * complete and exact set of ambiguous alternatives for every ambiguous\r\n\t * decision encountered while parsing.\r\n\t *\r\n\t * This prediction mode may be used for diagnosing ambiguities during\r\n\t * grammar development. Due to the performance overhead of calculating sets\r\n\t * of ambiguous alternatives, this prediction mode should be avoided when\r\n\t * the exact results are not necessary.\r\n\t *\r\n\t * This prediction mode does not provide any guarantees for prediction\r\n\t * behavior for syntactically-incorrect inputs.\r\n\t */\r\n\tLL_EXACT_AMBIG_DETECTION,\r\n}\r\n\r\nexport namespace PredictionMode {\r\n\t/** A Map that uses just the state and the stack context as the key. */\r\n\t// NOTE: Base type used to be FlexibleHashMap\r\n\tclass AltAndContextMap extends Array2DHashMap {\r\n\t\tconstructor() {\r\n\t\t\tsuper(AltAndContextConfigEqualityComparator.INSTANCE);\r\n\t\t}\r\n\t}\r\n\r\n\tclass AltAndContextConfigEqualityComparator implements EqualityComparator {\r\n\t\tpublic static readonly INSTANCE: AltAndContextConfigEqualityComparator = new AltAndContextConfigEqualityComparator();\r\n\r\n\t\tprivate AltAndContextConfigEqualityComparator() {\r\n\t\t\t// intentionally empty\r\n\t\t}\r\n\r\n\t\t/**\r\n\t\t * The hash code is only a function of the {@link ATNState#stateNumber}\r\n\t\t * and {@link ATNConfig#context}.\r\n\t\t */\r\n\t\t@Override\r\n\t\tpublic hashCode(o: ATNConfig): number {\r\n\t\t\tlet hashCode: number = MurmurHash.initialize(7);\r\n\t\t\thashCode = MurmurHash.update(hashCode, o.state.stateNumber);\r\n\t\t\thashCode = MurmurHash.update(hashCode, o.context);\r\n\t\t\thashCode = MurmurHash.finish(hashCode, 2);\r\n\t\t\treturn hashCode;\r\n\t\t}\r\n\r\n\t\t@Override\r\n\t\tpublic equals(a: ATNConfig, b: ATNConfig): boolean {\r\n\t\t\tif (a === b) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t\tif (a == null || b == null) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t\treturn a.state.stateNumber === b.state.stateNumber\r\n\t\t\t\t&& a.context.equals(b.context);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Checks if any configuration in `configs` is in a\r\n\t * {@link RuleStopState}. Configurations meeting this condition have reached\r\n\t * the end of the decision rule (local context) or end of start rule (full\r\n\t * context).\r\n\t *\r\n\t * @param configs the configuration set to test\r\n\t * @returns `true` if any configuration in `configs` is in a\r\n\t * {@link RuleStopState}, otherwise `false`\r\n\t */\r\n\texport function hasConfigInRuleStopState(configs: ATNConfigSet): boolean {\r\n\t\tfor (let c of configs) {\r\n\t\t\tif (c.state instanceof RuleStopState) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/**\r\n\t * Checks if all configurations in `configs` are in a\r\n\t * {@link RuleStopState}. Configurations meeting this condition have reached\r\n\t * the end of the decision rule (local context) or end of start rule (full\r\n\t * context).\r\n\t *\r\n\t * @param configs the configuration set to test\r\n\t * @returns `true` if all configurations in `configs` are in a\r\n\t * {@link RuleStopState}, otherwise `false`\r\n\t */\r\n\texport function allConfigsInRuleStopStates(/*@NotNull*/ configs: ATNConfigSet): boolean {\r\n\t\tfor (let config of configs) {\r\n\t\t\tif (!(config.state instanceof RuleStopState)) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn true;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:37.3871984-07:00\r\n\r\nimport { DFAState } from \"../dfa/DFAState\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { ParserRuleContext } from \"../ParserRuleContext\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class SimulatorState {\r\n\tpublic outerContext: ParserRuleContext;\r\n\r\n\tpublic s0: DFAState;\r\n\r\n\tpublic useContext: boolean;\r\n\tpublic remainingOuterContext: ParserRuleContext | undefined;\r\n\r\n\tconstructor(outerContext: ParserRuleContext, @NotNull s0: DFAState, useContext: boolean, remainingOuterContext: ParserRuleContext | undefined) {\r\n\t\tthis.outerContext = outerContext != null ? outerContext : ParserRuleContext.emptyContext();\r\n\t\tthis.s0 = s0;\r\n\t\tthis.useContext = useContext;\r\n\t\tthis.remainingOuterContext = remainingOuterContext;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:31.1989835-07:00\r\n\r\nimport { AcceptStateInfo } from \"../dfa/AcceptStateInfo\";\r\nimport { ActionTransition } from \"./ActionTransition\";\r\nimport { Array2DHashSet } from \"../misc/Array2DHashSet\";\r\nimport { Arrays } from \"../misc/Arrays\";\r\nimport { ATN } from \"./ATN\";\r\nimport { ATNConfig } from \"./ATNConfig\";\r\nimport { ATNConfigSet } from \"./ATNConfigSet\";\r\nimport { ATNSimulator } from \"./ATNSimulator\";\r\nimport { ATNState } from \"./ATNState\";\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { AtomTransition } from \"./AtomTransition\";\r\nimport { BitSet } from \"../misc/BitSet\";\r\nimport { ConflictInfo } from \"./ConflictInfo\";\r\nimport { DecisionState } from \"./DecisionState\";\r\nimport { DFA } from \"../dfa/DFA\";\r\nimport { DFAState } from \"../dfa/DFAState\";\r\nimport { EpsilonTransition } from \"./EpsilonTransition\";\r\nimport { IntegerList } from \"../misc/IntegerList\";\r\nimport { Interval } from \"../misc/Interval\";\r\nimport { IntStream } from \"../IntStream\";\r\nimport { NotNull, Nullable, Override } from \"../Decorators\";\r\nimport { NotSetTransition } from \"./NotSetTransition\";\r\nimport { NoViableAltException } from \"../NoViableAltException\";\r\nimport { ObjectEqualityComparator } from \"../misc/ObjectEqualityComparator\";\r\nimport { Parser } from \"../Parser\";\r\nimport { ParserRuleContext } from \"../ParserRuleContext\";\r\nimport { PrecedencePredicateTransition } from \"./PrecedencePredicateTransition\";\r\nimport { PredicateTransition } from \"./PredicateTransition\";\r\nimport { PredictionContext } from \"./PredictionContext\";\r\nimport { PredictionContextCache } from \"./PredictionContextCache\";\r\nimport { PredictionMode } from \"./PredictionMode\";\r\nimport { RuleContext } from \"../RuleContext\";\r\nimport { RuleStopState } from \"./RuleStopState\";\r\nimport { RuleTransition } from \"./RuleTransition\";\r\nimport { SemanticContext } from \"./SemanticContext\";\r\nimport { SetTransition } from \"./SetTransition\";\r\nimport { SimulatorState } from \"./SimulatorState\";\r\nimport { StarLoopEntryState } from \"./StarLoopEntryState\";\r\nimport { Token } from \"../Token\";\r\nimport { TokenStream } from \"../TokenStream\";\r\nimport { Transition } from \"./Transition\";\r\nimport { TransitionType } from \"./TransitionType\";\r\nimport { Vocabulary } from \"../Vocabulary\";\r\nimport { VocabularyImpl } from \"../VocabularyImpl\";\r\n\r\nimport * as assert from \"assert\";\r\n\r\nconst MAX_SHORT_VALUE = 0xFFFF;\r\nconst MIN_INTEGER_VALUE = -((1 << 31) >>> 0);\r\n\r\n/**\r\n * The embodiment of the adaptive LL(*), ALL(*), parsing strategy.\r\n *\r\n * The basic complexity of the adaptive strategy makes it harder to understand.\r\n * We begin with ATN simulation to build paths in a DFA. Subsequent prediction\r\n * requests go through the DFA first. If they reach a state without an edge for\r\n * the current symbol, the algorithm fails over to the ATN simulation to\r\n * complete the DFA path for the current input (until it finds a conflict state\r\n * or uniquely predicting state).\r\n *\r\n * All of that is done without using the outer context because we want to create\r\n * a DFA that is not dependent upon the rule invocation stack when we do a\r\n * prediction. One DFA works in all contexts. We avoid using context not\r\n * necessarily because it's slower, although it can be, but because of the DFA\r\n * caching problem. The closure routine only considers the rule invocation stack\r\n * created during prediction beginning in the decision rule. For example, if\r\n * prediction occurs without invoking another rule's ATN, there are no context\r\n * stacks in the configurations. When lack of context leads to a conflict, we\r\n * don't know if it's an ambiguity or a weakness in the strong LL(*) parsing\r\n * strategy (versus full LL(*)).\r\n *\r\n * When SLL yields a configuration set with conflict, we rewind the input and\r\n * retry the ATN simulation, this time using full outer context without adding\r\n * to the DFA. Configuration context stacks will be the full invocation stacks\r\n * from the start rule. If we get a conflict using full context, then we can\r\n * definitively say we have a true ambiguity for that input sequence. If we\r\n * don't get a conflict, it implies that the decision is sensitive to the outer\r\n * context. (It is not context-sensitive in the sense of context-sensitive\r\n * grammars.)\r\n *\r\n * The next time we reach this DFA state with an SLL conflict, through DFA\r\n * simulation, we will again retry the ATN simulation using full context mode.\r\n * This is slow because we can't save the results and have to \"interpret\" the\r\n * ATN each time we get that input.\r\n *\r\n * **CACHING FULL CONTEXT PREDICTIONS**\r\n *\r\n * We could cache results from full context to predicted alternative easily and\r\n * that saves a lot of time but doesn't work in presence of predicates. The set\r\n * of visible predicates from the ATN start state changes depending on the\r\n * context, because closure can fall off the end of a rule. I tried to cache\r\n * tuples (stack context, semantic context, predicted alt) but it was slower\r\n * than interpreting and much more complicated. Also required a huge amount of\r\n * memory. The goal is not to create the world's fastest parser anyway. I'd like\r\n * to keep this algorithm simple. By launching multiple threads, we can improve\r\n * the speed of parsing across a large number of files.\r\n *\r\n * There is no strict ordering between the amount of input used by SLL vs LL,\r\n * which makes it really hard to build a cache for full context. Let's say that\r\n * we have input A B C that leads to an SLL conflict with full context X. That\r\n * implies that using X we might only use A B but we could also use A B C D to\r\n * resolve conflict. Input A B C D could predict alternative 1 in one position\r\n * in the input and A B C E could predict alternative 2 in another position in\r\n * input. The conflicting SLL configurations could still be non-unique in the\r\n * full context prediction, which would lead us to requiring more input than the\r\n * original A B C.\tTo make a\tprediction cache work, we have to track\tthe exact\r\n * input\tused during the previous prediction. That amounts to a cache that maps\r\n * X to a specific DFA for that context.\r\n *\r\n * Something should be done for left-recursive expression predictions. They are\r\n * likely LL(1) + pred eval. Easier to do the whole SLL unless error and retry\r\n * with full LL thing Sam does.\r\n *\r\n * **AVOIDING FULL CONTEXT PREDICTION**\r\n *\r\n * We avoid doing full context retry when the outer context is empty, we did not\r\n * dip into the outer context by falling off the end of the decision state rule,\r\n * or when we force SLL mode.\r\n *\r\n * As an example of the not dip into outer context case, consider as super\r\n * constructor calls versus function calls. One grammar might look like\r\n * this:\r\n *\r\n * ```antlr\r\n * ctorBody\r\n * : '{' superCall? stat* '}'\r\n * ;\r\n * ```\r\n *\r\n * Or, you might see something like\r\n *\r\n * ```antlr\r\n * stat\r\n * : superCall ';'\r\n * | expression ';'\r\n * | ...\r\n * ;\r\n * ```\r\n *\r\n * In both cases I believe that no closure operations will dip into the outer\r\n * context. In the first case ctorBody in the worst case will stop at the '}'.\r\n * In the 2nd case it should stop at the ';'. Both cases should stay within the\r\n * entry rule and not dip into the outer context.\r\n *\r\n * **PREDICATES**\r\n *\r\n * Predicates are always evaluated if present in either SLL or LL both. SLL and\r\n * LL simulation deals with predicates differently. SLL collects predicates as\r\n * it performs closure operations like ANTLR v3 did. It delays predicate\r\n * evaluation until it reaches and accept state. This allows us to cache the SLL\r\n * ATN simulation whereas, if we had evaluated predicates on-the-fly during\r\n * closure, the DFA state configuration sets would be different and we couldn't\r\n * build up a suitable DFA.\r\n *\r\n * When building a DFA accept state during ATN simulation, we evaluate any\r\n * predicates and return the sole semantically valid alternative. If there is\r\n * more than 1 alternative, we report an ambiguity. If there are 0 alternatives,\r\n * we throw an exception. Alternatives without predicates act like they have\r\n * true predicates. The simple way to think about it is to strip away all\r\n * alternatives with false predicates and choose the minimum alternative that\r\n * remains.\r\n *\r\n * When we start in the DFA and reach an accept state that's predicated, we test\r\n * those and return the minimum semantically viable alternative. If no\r\n * alternatives are viable, we throw an exception.\r\n *\r\n * During full LL ATN simulation, closure always evaluates predicates and\r\n * on-the-fly. This is crucial to reducing the configuration set size during\r\n * closure. It hits a landmine when parsing with the Java grammar, for example,\r\n * without this on-the-fly evaluation.\r\n *\r\n * **SHARING DFA**\r\n *\r\n * All instances of the same parser share the same decision DFAs through a\r\n * static field. Each instance gets its own ATN simulator but they share the\r\n * same {@link ATN#decisionToDFA} field. They also share a\r\n * {@link PredictionContextCache} object that makes sure that all\r\n * {@link PredictionContext} objects are shared among the DFA states. This makes\r\n * a big size difference.\r\n *\r\n * **THREAD SAFETY**\r\n *\r\n * The {@link ParserATNSimulator} locks on the {@link ATN#decisionToDFA} field when\r\n * it adds a new DFA object to that array. {@link #addDFAEdge}\r\n * locks on the DFA for the current decision when setting the\r\n * {@link DFAState#edges} field. {@link #addDFAState} locks on\r\n * the DFA for the current decision when looking up a DFA state to see if it\r\n * already exists. We must make sure that all requests to add DFA states that\r\n * are equivalent result in the same shared DFA object. This is because lots of\r\n * threads will be trying to update the DFA at once. The\r\n * {@link #addDFAState} method also locks inside the DFA lock\r\n * but this time on the shared context cache when it rebuilds the\r\n * configurations' {@link PredictionContext} objects using cached\r\n * subgraphs/nodes. No other locking occurs, even during DFA simulation. This is\r\n * safe as long as we can guarantee that all threads referencing\r\n * `s.edge[t]` get the same physical target {@link DFAState}, or\r\n * `undefined`. Once into the DFA, the DFA simulation does not reference the\r\n * {@link DFA#states} map. It follows the {@link DFAState#edges} field to new\r\n * targets. The DFA simulator will either find {@link DFAState#edges} to be\r\n * `undefined`, to be non-`undefined` and `dfa.edges[t]` undefined, or\r\n * `dfa.edges[t]` to be non-undefined. The\r\n * {@link #addDFAEdge} method could be racing to set the field\r\n * but in either case the DFA simulator works; if `undefined`, and requests ATN\r\n * simulation. It could also race trying to get `dfa.edges[t]`, but either\r\n * way it will work because it's not doing a test and set operation.\r\n *\r\n * **Starting with SLL then failing to combined SLL/LL (Two-Stage\r\n * Parsing)**\r\n *\r\n * Sam pointed out that if SLL does not give a syntax error, then there is no\r\n * point in doing full LL, which is slower. We only have to try LL if we get a\r\n * syntax error. For maximum speed, Sam starts the parser set to pure SLL\r\n * mode with the {@link BailErrorStrategy}:\r\n *\r\n * ```\r\n * parser.interpreter.{@link #setPredictionMode setPredictionMode}`(`{@link PredictionMode#SLL}`)`;\r\n * parser.{@link Parser#setErrorHandler setErrorHandler}(new {@link BailErrorStrategy}());\r\n * ```\r\n *\r\n * If it does not get a syntax error, then we're done. If it does get a syntax\r\n * error, we need to retry with the combined SLL/LL strategy.\r\n *\r\n * The reason this works is as follows. If there are no SLL conflicts, then the\r\n * grammar is SLL (at least for that input set). If there is an SLL conflict,\r\n * the full LL analysis must yield a set of viable alternatives which is a\r\n * subset of the alternatives reported by SLL. If the LL set is a singleton,\r\n * then the grammar is LL but not SLL. If the LL set is the same size as the SLL\r\n * set, the decision is SLL. If the LL set has size > 1, then that decision\r\n * is truly ambiguous on the current input. If the LL set is smaller, then the\r\n * SLL conflict resolution might choose an alternative that the full LL would\r\n * rule out as a possibility based upon better context information. If that's\r\n * the case, then the SLL parse will definitely get an error because the full LL\r\n * analysis says it's not viable. If SLL conflict resolution chooses an\r\n * alternative within the LL set, them both SLL and LL would choose the same\r\n * alternative because they both choose the minimum of multiple conflicting\r\n * alternatives.\r\n *\r\n * Let's say we have a set of SLL conflicting alternatives `{1, 2, 3}` and\r\n * a smaller LL set called *s*. If *s* is `{2, 3}`, then SLL\r\n * parsing will get an error because SLL will pursue alternative 1. If\r\n * *s* is `{1, 2}` or `{1, 3}` then both SLL and LL will\r\n * choose the same alternative because alternative one is the minimum of either\r\n * set. If *s* is `{2}` or `{3}` then SLL will get a syntax\r\n * error. If *s* is `{1}` then SLL will succeed.\r\n *\r\n * Of course, if the input is invalid, then we will get an error for sure in\r\n * both SLL and LL parsing. Erroneous input will therefore require 2 passes over\r\n * the input.\r\n */\r\nexport class ParserATNSimulator extends ATNSimulator {\r\n\tpublic static debug: boolean = false;\r\n\tpublic static dfa_debug: boolean = false;\r\n\tpublic static retry_debug: boolean = false;\r\n\r\n\t@NotNull\r\n\tprivate predictionMode: PredictionMode = PredictionMode.LL;\r\n\tpublic force_global_context: boolean = false;\r\n\tpublic always_try_local_context: boolean = true;\r\n\r\n\t/**\r\n\t * Determines whether the DFA is used for full-context predictions. When\r\n\t * `true`, the DFA stores transition information for both full-context\r\n\t * and SLL parsing; otherwise, the DFA only stores SLL transition\r\n\t * information.\r\n\t *\r\n\t * For some grammars, enabling the full-context DFA can result in a\r\n\t * substantial performance improvement. However, this improvement typically\r\n\t * comes at the expense of memory used for storing the cached DFA states,\r\n\t * configuration sets, and prediction contexts.\r\n\t *\r\n\t * The default value is `false`.\r\n\t */\r\n\tpublic enable_global_context_dfa: boolean = false;\r\n\tpublic optimize_unique_closure: boolean = true;\r\n\tpublic optimize_ll1: boolean = true;\r\n\tpublic optimize_tail_calls: boolean = true;\r\n\tpublic tail_call_preserves_sll: boolean = true;\r\n\tpublic treat_sllk1_conflict_as_ambiguity: boolean = false;\r\n\r\n\tprotected _parser: Parser;\r\n\r\n\t/**\r\n\t * When `true`, ambiguous alternatives are reported when they are\r\n\t * encountered within {@link #execATN}. When `false`, these messages\r\n\t * are suppressed. The default is `false`.\r\n\t *\r\n\t * When messages about ambiguous alternatives are not required, setting this\r\n\t * to `false` enables additional internal optimizations which may lose\r\n\t * this information.\r\n\t */\r\n\tpublic reportAmbiguities: boolean = false;\r\n\r\n\t/** By default we do full context-sensitive LL(*) parsing not\r\n\t * Strong LL(*) parsing. If we fail with Strong LL(*) we\r\n\t * try full LL(*). That means we rewind and use context information\r\n\t * when closure operations fall off the end of the rule that\r\n\t * holds the decision were evaluating.\r\n\t */\r\n\tprotected userWantsCtxSensitive: boolean = true;\r\n\r\n\tprivate dfa?: DFA;\r\n\r\n\tconstructor(@NotNull atn: ATN, parser: Parser) {\r\n\t\tsuper(atn);\r\n\t\tthis._parser = parser;\r\n\t}\r\n\r\n\t@NotNull\r\n\tpublic getPredictionMode(): PredictionMode {\r\n\t\treturn this.predictionMode;\r\n\t}\r\n\r\n\tpublic setPredictionMode(@NotNull predictionMode: PredictionMode): void {\r\n\t\tthis.predictionMode = predictionMode;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic reset(): void {\r\n\t\t// intentionally empty\r\n\t}\r\n\r\n\tpublic adaptivePredict(/*@NotNull*/ input: TokenStream, decision: number, outerContext: ParserRuleContext | undefined): number;\r\n\tpublic adaptivePredict(/*@NotNull*/ input: TokenStream, decision: number, outerContext: ParserRuleContext | undefined, useContext: boolean): number;\r\n\tpublic adaptivePredict(\r\n\t\t@NotNull input: TokenStream,\r\n\t\tdecision: number,\r\n\t\touterContext: ParserRuleContext | undefined,\r\n\t\tuseContext?: boolean): number {\r\n\t\tif (useContext === undefined) {\r\n\t\t\tuseContext = false;\r\n\t\t}\r\n\r\n\t\tlet dfa: DFA = this.atn.decisionToDFA[decision];\r\n\t\tassert(dfa != null);\r\n\t\tif (this.optimize_ll1 && !dfa.isPrecedenceDfa && !dfa.isEmpty) {\r\n\t\t\tlet ll_1: number = input.LA(1);\r\n\t\t\tif (ll_1 >= 0 && ll_1 <= 0xFFFF) {\r\n\t\t\t\tlet key: number = ((decision << 16) >>> 0) + ll_1;\r\n\t\t\t\tlet alt: number | undefined = this.atn.LL1Table.get(key);\r\n\t\t\t\tif (alt != null) {\r\n\t\t\t\t\treturn alt;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tthis.dfa = dfa;\r\n\r\n\t\tif (this.force_global_context) {\r\n\t\t\tuseContext = true;\r\n\t\t}\r\n\t\telse if (!this.always_try_local_context) {\r\n\t\t\tuseContext = useContext || dfa.isContextSensitive;\r\n\t\t}\r\n\r\n\t\tthis.userWantsCtxSensitive = useContext || (this.predictionMode !== PredictionMode.SLL && outerContext != null && !this.atn.decisionToState[decision].sll);\r\n\t\tif (outerContext == null) {\r\n\t\t\touterContext = ParserRuleContext.emptyContext();\r\n\t\t}\r\n\r\n\t\tlet state: SimulatorState | undefined;\r\n\t\tif (!dfa.isEmpty) {\r\n\t\t\tstate = this.getStartState(dfa, input, outerContext, useContext);\r\n\t\t}\r\n\r\n\t\tif (state == null) {\r\n\t\t\tif (outerContext == null) {\r\n\t\t\t\touterContext = ParserRuleContext.emptyContext();\r\n\t\t\t}\r\n\t\t\tif (ParserATNSimulator.debug) {\r\n\t\t\t\tconsole.log(\"ATN decision \" + dfa.decision +\r\n\t\t\t\t\t\" exec LA(1)==\" + this.getLookaheadName(input) +\r\n\t\t\t\t\t\", outerContext=\" + outerContext.toString(this._parser));\r\n\t\t\t}\r\n\r\n\t\t\tstate = this.computeStartState(dfa, outerContext, useContext);\r\n\t\t}\r\n\r\n\t\tlet m: number = input.mark();\r\n\t\tlet index: number = input.index;\r\n\t\ttry {\r\n\t\t\tlet alt: number = this.execDFA(dfa, input, index, state);\r\n\t\t\tif (ParserATNSimulator.debug) {\r\n\t\t\t\tconsole.log(\"DFA after predictATN: \" + dfa.toString(this._parser.vocabulary, this._parser.ruleNames));\r\n\t\t\t}\r\n\t\t\treturn alt;\r\n\t\t}\r\n\t\tfinally {\r\n\t\t\tthis.dfa = undefined;\r\n\t\t\tinput.seek(index);\r\n\t\t\tinput.release(m);\r\n\t\t}\r\n\t}\r\n\r\n\tprotected getStartState(\r\n\t\t@NotNull dfa: DFA,\r\n\t\t@NotNull input: TokenStream,\r\n\t\t@NotNull outerContext: ParserRuleContext,\r\n\t\tuseContext: boolean): SimulatorState | undefined {\r\n\r\n\t\tif (!useContext) {\r\n\t\t\tif (dfa.isPrecedenceDfa) {\r\n\t\t\t\t// the start state for a precedence DFA depends on the current\r\n\t\t\t\t// parser precedence, and is provided by a DFA method.\r\n\t\t\t\tlet state: DFAState | undefined = dfa.getPrecedenceStartState(this._parser.precedence, false);\r\n\t\t\t\tif (state == null) {\r\n\t\t\t\t\treturn undefined;\r\n\t\t\t\t}\r\n\r\n\t\t\t\treturn new SimulatorState(outerContext, state, false, outerContext);\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tif (dfa.s0 == null) {\r\n\t\t\t\t\treturn undefined;\r\n\t\t\t\t}\r\n\r\n\t\t\t\treturn new SimulatorState(outerContext, dfa.s0, false, outerContext);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (!this.enable_global_context_dfa) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\tlet remainingContext: ParserRuleContext | undefined = outerContext;\r\n\t\tassert(outerContext != null);\r\n\t\tlet s0: DFAState | undefined;\r\n\t\tif (dfa.isPrecedenceDfa) {\r\n\t\t\ts0 = dfa.getPrecedenceStartState(this._parser.precedence, true);\r\n\t\t}\r\n\t\telse {\r\n\t\t\ts0 = dfa.s0full;\r\n\t\t}\r\n\r\n\t\twhile (remainingContext != null && s0 != null && s0.isContextSensitive) {\r\n\t\t\tremainingContext = this.skipTailCalls(remainingContext);\r\n\t\t\ts0 = s0.getContextTarget(this.getReturnState(remainingContext));\r\n\t\t\tif (remainingContext.isEmpty) {\r\n\t\t\t\tassert(s0 == null || !s0.isContextSensitive);\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tremainingContext = remainingContext.parent;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (s0 == null) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\treturn new SimulatorState(outerContext, s0, useContext, remainingContext);\r\n\t}\r\n\r\n\tprotected execDFA(\r\n\t\t@NotNull dfa: DFA,\r\n\t\t@NotNull input: TokenStream, startIndex: number,\r\n\t\t@NotNull state: SimulatorState): number {\r\n\t\tlet outerContext: ParserRuleContext = state.outerContext;\r\n\t\tif (ParserATNSimulator.dfa_debug) {\r\n\t\t\tconsole.log(\"DFA decision \" + dfa.decision +\r\n\t\t\t\t\" exec LA(1)==\" + this.getLookaheadName(input) +\r\n\t\t\t\t\", outerContext=\" + outerContext.toString(this._parser));\r\n\t\t}\r\n\t\tif (ParserATNSimulator.dfa_debug) {\r\n\t\t\tconsole.log(dfa.toString(this._parser.vocabulary, this._parser.ruleNames));\r\n\t\t}\r\n\t\tlet s: DFAState = state.s0;\r\n\r\n\t\tlet t: number = input.LA(1);\r\n\t\tlet remainingOuterContext: ParserRuleContext | undefined = state.remainingOuterContext;\r\n\r\n\t\twhile (true) {\r\n\t\t\tif (ParserATNSimulator.dfa_debug) {\r\n\t\t\t\tconsole.log(\"DFA state \" + s.stateNumber + \" LA(1)==\" + this.getLookaheadName(input));\r\n\t\t\t}\r\n\t\t\tif (state.useContext) {\r\n\t\t\t\twhile (s.isContextSymbol(t)) {\r\n\t\t\t\t\tlet next: DFAState | undefined;\r\n\t\t\t\t\tif (remainingOuterContext != null) {\r\n\t\t\t\t\t\tremainingOuterContext = this.skipTailCalls(remainingOuterContext);\r\n\t\t\t\t\t\tnext = s.getContextTarget(this.getReturnState(remainingOuterContext));\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tif (next == null) {\r\n\t\t\t\t\t\t// fail over to ATN\r\n\t\t\t\t\t\tlet initialState: SimulatorState = new SimulatorState(state.outerContext, s, state.useContext, remainingOuterContext);\r\n\t\t\t\t\t\treturn this.execATN(dfa, input, startIndex, initialState);\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tassert(remainingOuterContext != null);\r\n\t\t\t\t\tremainingOuterContext = remainingOuterContext.parent;\r\n\t\t\t\t\ts = next;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (this.isAcceptState(s, state.useContext)) {\r\n\t\t\t\tif (s.predicates != null) {\r\n\t\t\t\t\tif (ParserATNSimulator.dfa_debug) {\r\n\t\t\t\t\t\tconsole.log(\"accept \" + s);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\tif (ParserATNSimulator.dfa_debug) {\r\n\t\t\t\t\t\tconsole.log(\"accept; predict \" + s.prediction + \" in state \" + s.stateNumber);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\t// keep going unless we're at EOF or state only has one alt number\r\n\t\t\t\t// mentioned in configs; check if something else could match\r\n\t\t\t\t// TODO: don't we always stop? only lexer would keep going\r\n\t\t\t\t// TODO: v3 dfa don't do this.\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\t// t is not updated if one of these states is reached\r\n\t\t\tassert(!this.isAcceptState(s, state.useContext));\r\n\r\n\t\t\t// if no edge, pop over to ATN interpreter, update DFA and return\r\n\t\t\tlet target: DFAState | undefined = this.getExistingTargetState(s, t);\r\n\t\t\tif (target == null) {\r\n\t\t\t\tif (ParserATNSimulator.dfa_debug && t >= 0) {\r\n\t\t\t\t\tconsole.log(\"no edge for \" + this._parser.vocabulary.getDisplayName(t));\r\n\t\t\t\t}\r\n\t\t\t\tlet alt: number;\r\n\t\t\t\tif (ParserATNSimulator.dfa_debug) {\r\n\t\t\t\t\tlet interval: Interval = Interval.of(startIndex, this._parser.inputStream.index);\r\n\t\t\t\t\tconsole.log(\"ATN exec upon \" +\r\n\t\t\t\t\t\tthis._parser.inputStream.getText(interval) +\r\n\t\t\t\t\t\t\" at DFA state \" + s.stateNumber);\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet initialState: SimulatorState = new SimulatorState(outerContext, s, state.useContext, remainingOuterContext);\r\n\t\t\t\talt = this.execATN(dfa, input, startIndex, initialState);\r\n\t\t\t\tif (ParserATNSimulator.dfa_debug) {\r\n\t\t\t\t\tconsole.log(\"back from DFA update, alt=\" + alt + \", dfa=\\n\" + dfa.toString(this._parser.vocabulary, this._parser.ruleNames));\r\n\t\t\t\t\t//dump(dfa);\r\n\t\t\t\t}\r\n\t\t\t\t// action already executed\r\n\t\t\t\tif (ParserATNSimulator.dfa_debug) {\r\n\t\t\t\t\tconsole.log(\"DFA decision \" + dfa.decision +\r\n\t\t\t\t\t\t\" predicts \" + alt);\r\n\t\t\t\t}\r\n\t\t\t\treturn alt; // we've updated DFA, exec'd action, and have our deepest answer\r\n\t\t\t}\r\n\t\t\telse if (target === ATNSimulator.ERROR) {\r\n\t\t\t\tlet errorState: SimulatorState = new SimulatorState(outerContext, s, state.useContext, remainingOuterContext);\r\n\t\t\t\treturn this.handleNoViableAlt(input, startIndex, errorState);\r\n\t\t\t}\r\n\t\t\ts = target;\r\n\t\t\tif (!this.isAcceptState(s, state.useContext) && t !== IntStream.EOF) {\r\n\t\t\t\tinput.consume();\r\n\t\t\t\tt = input.LA(1);\r\n\t\t\t}\r\n\t\t}\r\n//\t\tif ( acceptState==null ) {\r\n//\t\t\tif ( debug ) System.out.println(\"!!! no viable alt in dfa\");\r\n//\t\t\treturn -1;\r\n//\t\t}\r\n\r\n\t\tif (!state.useContext && s.configs.conflictInfo != null) {\r\n\t\t\tif (dfa.atnStartState instanceof DecisionState) {\r\n\t\t\t\tif (!this.userWantsCtxSensitive ||\r\n\t\t\t\t\t(!s.configs.dipsIntoOuterContext && s.configs.isExactConflict) ||\r\n\t\t\t\t\t(this.treat_sllk1_conflict_as_ambiguity && input.index === startIndex)) {\r\n\t\t\t\t\t// we don't report the ambiguity again\r\n\t\t\t\t\t//if ( !this.acceptState.configset.hasSemanticContext ) {\r\n\t\t\t\t\t// \tthis.reportAmbiguity(dfa, acceptState, startIndex, input.index, acceptState.configset.conflictingAlts, acceptState.configset);\r\n\t\t\t\t\t//}\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\tassert(!state.useContext);\r\n\r\n\t\t\t\t\t// Before attempting full context prediction, check to see if there are\r\n\t\t\t\t\t// disambiguating or validating predicates to evaluate which allow an\r\n\t\t\t\t\t// immediate decision\r\n\t\t\t\t\tlet conflictingAlts: BitSet | undefined;\r\n\t\t\t\t\tlet predicates: DFAState.PredPrediction[] | undefined = s.predicates;\r\n\t\t\t\t\tif (predicates != null) {\r\n\t\t\t\t\t\tlet conflictIndex: number = input.index;\r\n\t\t\t\t\t\tif (conflictIndex !== startIndex) {\r\n\t\t\t\t\t\t\tinput.seek(startIndex);\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tconflictingAlts = this.evalSemanticContext(predicates, outerContext, true);\r\n\t\t\t\t\t\tif (conflictingAlts.cardinality() === 1) {\r\n\t\t\t\t\t\t\treturn conflictingAlts.nextSetBit(0);\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tif (conflictIndex !== startIndex) {\r\n\t\t\t\t\t\t\t// restore the index so reporting the fallback to full\r\n\t\t\t\t\t\t\t// context occurs with the index at the correct spot\r\n\t\t\t\t\t\t\tinput.seek(conflictIndex);\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tif (this.reportAmbiguities) {\r\n\t\t\t\t\t\tlet conflictState: SimulatorState = new SimulatorState(outerContext, s, state.useContext, remainingOuterContext);\r\n\t\t\t\t\t\tthis.reportAttemptingFullContext(dfa, conflictingAlts, conflictState, startIndex, input.index);\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tinput.seek(startIndex);\r\n\t\t\t\t\treturn this.adaptivePredict(input, dfa.decision, outerContext, true);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// Before jumping to prediction, check to see if there are\r\n\t\t// disambiguating or validating predicates to evaluate\r\n\t\tlet predicates: DFAState.PredPrediction[] | undefined = s.predicates;\r\n\t\tif (predicates != null) {\r\n\t\t\tlet stopIndex: number = input.index;\r\n\t\t\tif (startIndex !== stopIndex) {\r\n\t\t\t\tinput.seek(startIndex);\r\n\t\t\t}\r\n\r\n\t\t\tlet alts: BitSet = this.evalSemanticContext(predicates, outerContext, this.reportAmbiguities && this.predictionMode === PredictionMode.LL_EXACT_AMBIG_DETECTION);\r\n\t\t\tswitch (alts.cardinality()) {\r\n\t\t\tcase 0:\r\n\t\t\t\tthrow this.noViableAlt(input, outerContext, s.configs, startIndex);\r\n\r\n\t\t\tcase 1:\r\n\t\t\t\treturn alts.nextSetBit(0);\r\n\r\n\t\t\tdefault:\r\n\t\t\t\t// report ambiguity after predicate evaluation to make sure the correct\r\n\t\t\t\t// set of ambig alts is reported.\r\n\t\t\t\tif (startIndex !== stopIndex) {\r\n\t\t\t\t\tinput.seek(stopIndex);\r\n\t\t\t\t}\r\n\r\n\t\t\t\tthis.reportAmbiguity(dfa, s, startIndex, stopIndex, s.configs.isExactConflict, alts, s.configs);\r\n\t\t\t\treturn alts.nextSetBit(0);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (ParserATNSimulator.dfa_debug) {\r\n\t\t\tconsole.log(\"DFA decision \" + dfa.decision +\r\n\t\t\t\t\" predicts \" + s.prediction);\r\n\t\t}\r\n\t\treturn s.prediction;\r\n\t}\r\n\r\n\t/**\r\n\t * Determines if a particular DFA state should be treated as an accept state\r\n\t * for the current prediction mode. In addition to the `useContext`\r\n\t * parameter, the {@link #getPredictionMode()} method provides the\r\n\t * prediction mode controlling the prediction algorithm as a whole.\r\n\t *\r\n\t * The default implementation simply returns the value of\r\n\t * `DFAState.isAcceptState` except for conflict states when\r\n\t * `useContext` is `true` and {@link #getPredictionMode()} is\r\n\t * {@link PredictionMode#LL_EXACT_AMBIG_DETECTION}. In that case, only\r\n\t * conflict states where {@link ATNConfigSet#isExactConflict} is\r\n\t * `true` are considered accept states.\r\n\t *\r\n\t * @param state The DFA state to check.\r\n\t * @param useContext `true` if the prediction algorithm is currently\r\n\t * considering the full parser context; otherwise, `false` if the\r\n\t * algorithm is currently performing a local context prediction.\r\n\t *\r\n\t * @returns `true` if the specified `state` is an accept state;\r\n\t * otherwise, `false`.\r\n\t */\r\n\tprotected isAcceptState(state: DFAState, useContext: boolean): boolean {\r\n\t\tif (!state.isAcceptState) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\tif (state.configs.conflictingAlts == null) {\r\n\t\t\t// unambiguous\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\t// More picky when we need exact conflicts\r\n\t\tif (useContext && this.predictionMode === PredictionMode.LL_EXACT_AMBIG_DETECTION) {\r\n\t\t\treturn state.configs.isExactConflict;\r\n\t\t}\r\n\r\n\t\treturn true;\r\n\t}\r\n\r\n\t/** Performs ATN simulation to compute a predicted alternative based\r\n\t * upon the remaining input, but also updates the DFA cache to avoid\r\n\t * having to traverse the ATN again for the same input sequence.\r\n\t *\r\n\t * There are some key conditions we're looking for after computing a new\r\n\t * set of ATN configs (proposed DFA state):\r\n\t *\r\n\t * * if the set is empty, there is no viable alternative for current symbol\r\n\t * * does the state uniquely predict an alternative?\r\n\t * * does the state have a conflict that would prevent us from\r\n\t * putting it on the work list?\r\n\t * * if in non-greedy decision is there a config at a rule stop state?\r\n\t *\r\n\t * We also have some key operations to do:\r\n\t *\r\n\t * * add an edge from previous DFA state to potentially new DFA state, D,\r\n\t * upon current symbol but only if adding to work list, which means in all\r\n\t * cases except no viable alternative (and possibly non-greedy decisions?)\r\n\t * * collecting predicates and adding semantic context to DFA accept states\r\n\t * * adding rule context to context-sensitive DFA accept states\r\n\t * * consuming an input symbol\r\n\t * * reporting a conflict\r\n\t * * reporting an ambiguity\r\n\t * * reporting a context sensitivity\r\n\t * * reporting insufficient predicates\r\n\t *\r\n\t * We should isolate those operations, which are side-effecting, to the\r\n\t * main work loop. We can isolate lots of code into other functions, but\r\n\t * they should be side effect free. They can return package that\r\n\t * indicates whether we should report something, whether we need to add a\r\n\t * DFA edge, whether we need to augment accept state with semantic\r\n\t * context or rule invocation context. Actually, it seems like we always\r\n\t * add predicates if they exist, so that can simply be done in the main\r\n\t * loop for any accept state creation or modification request.\r\n\t *\r\n\t * cover these cases:\r\n\t * dead end\r\n\t * single alt\r\n\t * single alt + preds\r\n\t * conflict\r\n\t * conflict + preds\r\n\t *\r\n\t * TODO: greedy + those\r\n\t */\r\n\tprotected execATN(\r\n\t\t@NotNull dfa: DFA,\r\n\t\t@NotNull input: TokenStream, startIndex: number,\r\n\t\t@NotNull initialState: SimulatorState): number {\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"execATN decision \" + dfa.decision + \" exec LA(1)==\" + this.getLookaheadName(input));\r\n\t\t}\r\n\r\n\t\tlet outerContext: ParserRuleContext = initialState.outerContext;\r\n\t\tlet useContext: boolean = initialState.useContext;\r\n\r\n\t\tlet t: number = input.LA(1);\r\n\r\n\t\tlet previous: SimulatorState = initialState;\r\n\r\n\t\tlet contextCache: PredictionContextCache = new PredictionContextCache();\r\n\t\twhile (true) { // while more work\r\n\t\t\tlet nextState: SimulatorState | undefined = this.computeReachSet(dfa, previous, t, contextCache);\r\n\t\t\tif (nextState == null) {\r\n\t\t\t\tthis.setDFAEdge(previous.s0, input.LA(1), ATNSimulator.ERROR);\r\n\t\t\t\treturn this.handleNoViableAlt(input, startIndex, previous);\r\n\t\t\t}\r\n\r\n\t\t\tlet D: DFAState = nextState.s0;\r\n\r\n\t\t\t// predicted alt => accept state\r\n\t\t\tassert(D.isAcceptState || D.prediction === ATN.INVALID_ALT_NUMBER);\r\n\t\t\t// conflicted => accept state\r\n\t\t\tassert(D.isAcceptState || D.configs.conflictInfo == null);\r\n\r\n\t\t\tif (this.isAcceptState(D, useContext)) {\r\n\t\t\t\tlet conflictingAlts: BitSet | undefined = D.configs.conflictingAlts;\r\n\t\t\t\tlet predictedAlt: number = conflictingAlts == null ? D.prediction : ATN.INVALID_ALT_NUMBER;\r\n\t\t\t\tif (predictedAlt !== ATN.INVALID_ALT_NUMBER) {\r\n\t\t\t\t\tif (this.optimize_ll1\r\n\t\t\t\t\t\t&& input.index === startIndex\r\n\t\t\t\t\t\t&& !dfa.isPrecedenceDfa\r\n\t\t\t\t\t\t&& nextState.outerContext === nextState.remainingOuterContext\r\n\t\t\t\t\t\t&& dfa.decision >= 0\r\n\t\t\t\t\t\t&& !D.configs.hasSemanticContext) {\r\n\t\t\t\t\t\tif (t >= 0 && t <= MAX_SHORT_VALUE) {\r\n\t\t\t\t\t\t\tlet key: number = ((dfa.decision << 16) >>> 0) + t;\r\n\t\t\t\t\t\t\tthis.atn.LL1Table.set(key, predictedAlt);\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tif (useContext && this.always_try_local_context) {\r\n\t\t\t\t\t\tthis.reportContextSensitivity(dfa, predictedAlt, nextState, startIndex, input.index);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tpredictedAlt = D.prediction;\r\n//\t\t\t\tint k = input.index - startIndex + 1; // how much input we used\r\n//\t\t\t\tSystem.out.println(\"used k=\"+k);\r\n\t\t\t\tlet attemptFullContext: boolean = conflictingAlts != null && this.userWantsCtxSensitive;\r\n\t\t\t\tif (attemptFullContext) {\r\n\t\t\t\t\t// Only exact conflicts are known to be ambiguous when local\r\n\t\t\t\t\t// prediction does not step out of the decision rule.\r\n\t\t\t\t\tattemptFullContext = !useContext\r\n\t\t\t\t\t\t&& (D.configs.dipsIntoOuterContext || !D.configs.isExactConflict)\r\n\t\t\t\t\t\t&& (!this.treat_sllk1_conflict_as_ambiguity || input.index !== startIndex);\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (D.configs.hasSemanticContext) {\r\n\t\t\t\t\tlet predPredictions: DFAState.PredPrediction[] | undefined = D.predicates;\r\n\t\t\t\t\tif (predPredictions != null) {\r\n\t\t\t\t\t\tlet conflictIndex: number = input.index;\r\n\t\t\t\t\t\tif (conflictIndex !== startIndex) {\r\n\t\t\t\t\t\t\tinput.seek(startIndex);\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\t// use complete evaluation here if we'll want to retry with full context if still ambiguous\r\n\t\t\t\t\t\tconflictingAlts = this.evalSemanticContext(predPredictions, outerContext, attemptFullContext || this.reportAmbiguities);\r\n\t\t\t\t\t\tswitch (conflictingAlts.cardinality()) {\r\n\t\t\t\t\t\tcase 0:\r\n\t\t\t\t\t\t\tthrow this.noViableAlt(input, outerContext, D.configs, startIndex);\r\n\r\n\t\t\t\t\t\tcase 1:\r\n\t\t\t\t\t\t\treturn conflictingAlts.nextSetBit(0);\r\n\r\n\t\t\t\t\t\tdefault:\r\n\t\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tif (conflictIndex !== startIndex) {\r\n\t\t\t\t\t\t\t// restore the index so reporting the fallback to full\r\n\t\t\t\t\t\t\t// context occurs with the index at the correct spot\r\n\t\t\t\t\t\t\tinput.seek(conflictIndex);\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (!attemptFullContext) {\r\n\t\t\t\t\tif (conflictingAlts != null) {\r\n\t\t\t\t\t\tif (this.reportAmbiguities && conflictingAlts.cardinality() > 1) {\r\n\t\t\t\t\t\t\tthis.reportAmbiguity(dfa, D, startIndex, input.index, D.configs.isExactConflict, conflictingAlts, D.configs);\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tpredictedAlt = conflictingAlts.nextSetBit(0);\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\treturn predictedAlt;\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\tassert(!useContext);\r\n\t\t\t\t\tassert(this.isAcceptState(D, false));\r\n\r\n\t\t\t\t\tif (ParserATNSimulator.debug) {\r\n\t\t\t\t\t\tconsole.log(\"RETRY with outerContext=\" + outerContext);\r\n\t\t\t\t\t}\r\n\t\t\t\t\tlet fullContextState: SimulatorState = this.computeStartState(dfa, outerContext, true);\r\n\t\t\t\t\tif (this.reportAmbiguities) {\r\n\t\t\t\t\t\tthis.reportAttemptingFullContext(dfa, conflictingAlts, nextState, startIndex, input.index);\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tinput.seek(startIndex);\r\n\t\t\t\t\treturn this.execATN(dfa, input, startIndex, fullContextState);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tprevious = nextState;\r\n\r\n\t\t\tif (t !== IntStream.EOF) {\r\n\t\t\t\tinput.consume();\r\n\t\t\t\tt = input.LA(1);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * This method is used to improve the localization of error messages by\r\n\t * choosing an alternative rather than throwing a\r\n\t * {@link NoViableAltException} in particular prediction scenarios where the\r\n\t * {@link #ERROR} state was reached during ATN simulation.\r\n\t *\r\n\t * The default implementation of this method uses the following\r\n\t * algorithm to identify an ATN configuration which successfully parsed the\r\n\t * decision entry rule. Choosing such an alternative ensures that the\r\n\t * {@link ParserRuleContext} returned by the calling rule will be complete\r\n\t * and valid, and the syntax error will be reported later at a more\r\n\t * localized location.\r\n\t *\r\n\t * * If no configuration in `configs` reached the end of the\r\n\t * decision rule, return {@link ATN#INVALID_ALT_NUMBER}.\r\n\t * * If all configurations in `configs` which reached the end of the\r\n\t * decision rule predict the same alternative, return that alternative.\r\n\t * * If the configurations in `configs` which reached the end of the\r\n\t * decision rule predict multiple alternatives (call this *S*),\r\n\t * choose an alternative in the following order.\r\n\t *\r\n\t * 1. Filter the configurations in `configs` to only those\r\n\t * configurations which remain viable after evaluating semantic predicates.\r\n\t * If the set of these filtered configurations which also reached the end of\r\n\t * the decision rule is not empty, return the minimum alternative\r\n\t * represented in this set.\r\n\t * 1. Otherwise, choose the minimum alternative in *S*.\r\n\t *\r\n\t * In some scenarios, the algorithm described above could predict an\r\n\t * alternative which will result in a {@link FailedPredicateException} in\r\n\t * parser. Specifically, this could occur if the *only* configuration\r\n\t * capable of successfully parsing to the end of the decision rule is\r\n\t * blocked by a semantic predicate. By choosing this alternative within\r\n\t * {@link #adaptivePredict} instead of throwing a\r\n\t * {@link NoViableAltException}, the resulting\r\n\t * {@link FailedPredicateException} in the parser will identify the specific\r\n\t * predicate which is preventing the parser from successfully parsing the\r\n\t * decision rule, which helps developers identify and correct logic errors\r\n\t * in semantic predicates.\r\n\t *\r\n\t * @param input The input {@link TokenStream}\r\n\t * @param startIndex The start index for the current prediction, which is\r\n\t * the input index where any semantic context in `configs` should be\r\n\t * evaluated\r\n\t * @param previous The ATN simulation state immediately before the\r\n\t * {@link #ERROR} state was reached\r\n\t *\r\n\t * @returns The value to return from {@link #adaptivePredict}, or\r\n\t * {@link ATN#INVALID_ALT_NUMBER} if a suitable alternative was not\r\n\t * identified and {@link #adaptivePredict} should report an error instead.\r\n\t */\r\n\tprotected handleNoViableAlt(@NotNull input: TokenStream, startIndex: number, @NotNull previous: SimulatorState): number {\r\n\t\tif (previous.s0 != null) {\r\n\t\t\tlet alts: BitSet = new BitSet();\r\n\t\t\tlet maxAlt: number = 0;\r\n\t\t\tfor (let config of previous.s0.configs) {\r\n\t\t\t\tif (config.reachesIntoOuterContext || config.state instanceof RuleStopState) {\r\n\t\t\t\t\talts.set(config.alt);\r\n\t\t\t\t\tmaxAlt = Math.max(maxAlt, config.alt);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tswitch (alts.cardinality()) {\r\n\t\t\tcase 0:\r\n\t\t\t\tbreak;\r\n\r\n\t\t\tcase 1:\r\n\t\t\t\treturn alts.nextSetBit(0);\r\n\r\n\t\t\tdefault:\r\n\t\t\t\tif (!previous.s0.configs.hasSemanticContext) {\r\n\t\t\t\t\t// configs doesn't contain any predicates, so the predicate\r\n\t\t\t\t\t// filtering code below would be pointless\r\n\t\t\t\t\treturn alts.nextSetBit(0);\r\n\t\t\t\t}\r\n\r\n\t\t\t\t/*\r\n\t\t\t\t * Try to find a configuration set that not only dipped into the outer\r\n\t\t\t\t * context, but also isn't eliminated by a predicate.\r\n\t\t\t\t */\r\n\t\t\t\tlet filteredConfigs: ATNConfigSet = new ATNConfigSet();\r\n\t\t\t\tfor (let config of previous.s0.configs) {\r\n\t\t\t\t\tif (config.reachesIntoOuterContext || config.state instanceof RuleStopState) {\r\n\t\t\t\t\t\tfilteredConfigs.add(config);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\t/* The following code blocks are adapted from predicateDFAState with\r\n\t\t\t\t * the following key changes.\r\n\t\t\t\t *\r\n\t\t\t\t * 1. The code operates on an ATNConfigSet rather than a DFAState.\r\n\t\t\t\t * 2. Predicates are collected for all alternatives represented in\r\n\t\t\t\t * filteredConfigs, rather than restricting the evaluation to\r\n\t\t\t\t * conflicting and/or unique configurations.\r\n\t\t\t\t */\r\n\t\t\t\tlet altToPred: SemanticContext[] | undefined = this.getPredsForAmbigAlts(alts, filteredConfigs, maxAlt);\r\n\t\t\t\tif (altToPred != null) {\r\n\t\t\t\t\tlet predicates: DFAState.PredPrediction[] | undefined = this.getPredicatePredictions(alts, altToPred);\r\n\t\t\t\t\tif (predicates != null) {\r\n\t\t\t\t\t\tlet stopIndex: number = input.index;\r\n\t\t\t\t\t\ttry {\r\n\t\t\t\t\t\t\tinput.seek(startIndex);\r\n\t\t\t\t\t\t\tlet filteredAlts: BitSet = this.evalSemanticContext(predicates, previous.outerContext, false);\r\n\t\t\t\t\t\t\tif (!filteredAlts.isEmpty) {\r\n\t\t\t\t\t\t\t\treturn filteredAlts.nextSetBit(0);\r\n\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t\tfinally {\r\n\t\t\t\t\t\t\tinput.seek(stopIndex);\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\treturn alts.nextSetBit(0);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tthrow this.noViableAlt(input, previous.outerContext, previous.s0.configs, startIndex);\r\n\t}\r\n\r\n\tprotected computeReachSet(dfa: DFA, previous: SimulatorState, t: number, contextCache: PredictionContextCache): SimulatorState | undefined {\r\n\t\tlet useContext: boolean = previous.useContext;\r\n\t\tlet remainingGlobalContext: ParserRuleContext | undefined = previous.remainingOuterContext;\r\n\r\n\t\tlet s: DFAState = previous.s0;\r\n\t\tif (useContext) {\r\n\t\t\twhile (s.isContextSymbol(t)) {\r\n\t\t\t\tlet next: DFAState | undefined;\r\n\t\t\t\tif (remainingGlobalContext != null) {\r\n\t\t\t\t\tremainingGlobalContext = this.skipTailCalls(remainingGlobalContext);\r\n\t\t\t\t\tnext = s.getContextTarget(this.getReturnState(remainingGlobalContext));\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (next == null) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tassert(remainingGlobalContext != null);\r\n\t\t\t\tremainingGlobalContext = remainingGlobalContext.parent;\r\n\t\t\t\ts = next;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tassert(!this.isAcceptState(s, useContext));\r\n\t\tif (this.isAcceptState(s, useContext)) {\r\n\t\t\treturn new SimulatorState(previous.outerContext, s, useContext, remainingGlobalContext);\r\n\t\t}\r\n\r\n\t\tlet s0: DFAState = s;\r\n\r\n\t\tlet target: DFAState | undefined = this.getExistingTargetState(s0, t);\r\n\t\tif (target == null) {\r\n\t\t\tlet result: [DFAState, ParserRuleContext | undefined] = this.computeTargetState(dfa, s0, remainingGlobalContext, t, useContext, contextCache);\r\n\t\t\ttarget = result[0];\r\n\t\t\tremainingGlobalContext = result[1];\r\n\t\t}\r\n\r\n\t\tif (target === ATNSimulator.ERROR) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\tassert(!useContext || !target.configs.dipsIntoOuterContext);\r\n\t\treturn new SimulatorState(previous.outerContext, target, useContext, remainingGlobalContext);\r\n\t}\r\n\r\n\t/**\r\n\t * Get an existing target state for an edge in the DFA. If the target state\r\n\t * for the edge has not yet been computed or is otherwise not available,\r\n\t * this method returns `undefined`.\r\n\t *\r\n\t * @param s The current DFA state\r\n\t * @param t The next input symbol\r\n\t * @returns The existing target DFA state for the given input symbol\r\n\t * `t`, or `undefined` if the target state for this edge is not\r\n\t * already cached\r\n\t */\r\n\tprotected getExistingTargetState(@NotNull s: DFAState, t: number): DFAState | undefined {\r\n\t\treturn s.getTarget(t);\r\n\t}\r\n\r\n\t/**\r\n\t * Compute a target state for an edge in the DFA, and attempt to add the\r\n\t * computed state and corresponding edge to the DFA.\r\n\t *\r\n\t * @param dfa\r\n\t * @param s The current DFA state\r\n\t * @param remainingGlobalContext\r\n\t * @param t The next input symbol\r\n\t * @param useContext\r\n\t * @param contextCache\r\n\t *\r\n\t * @returns The computed target DFA state for the given input symbol\r\n\t * `t`. If `t` does not lead to a valid DFA state, this method\r\n\t * returns {@link #ERROR}.\r\n\t */\r\n\t@NotNull\r\n\tprotected computeTargetState(@NotNull dfa: DFA, @NotNull s: DFAState, remainingGlobalContext: ParserRuleContext | undefined, t: number, useContext: boolean, contextCache: PredictionContextCache): [DFAState, ParserRuleContext | undefined] {\r\n\t\tlet closureConfigs: ATNConfig[] = s.configs.toArray();\r\n\t\tlet contextElements: IntegerList | undefined;\r\n\t\tlet reach: ATNConfigSet = new ATNConfigSet();\r\n\t\tlet stepIntoGlobal: boolean;\r\n\t\tdo {\r\n\t\t\tlet hasMoreContext: boolean = !useContext || remainingGlobalContext != null;\r\n\t\t\tif (!hasMoreContext) {\r\n\t\t\t\treach.isOutermostConfigSet = true;\r\n\t\t\t}\r\n\r\n\t\t\tlet reachIntermediate: ATNConfigSet = new ATNConfigSet();\r\n\r\n\t\t\t/* Configurations already in a rule stop state indicate reaching the end\r\n\t\t\t * of the decision rule (local context) or end of the start rule (full\r\n\t\t\t * context). Once reached, these configurations are never updated by a\r\n\t\t\t * closure operation, so they are handled separately for the performance\r\n\t\t\t * advantage of having a smaller intermediate set when calling closure.\r\n\t\t\t *\r\n\t\t\t * For full-context reach operations, separate handling is required to\r\n\t\t\t * ensure that the alternative matching the longest overall sequence is\r\n\t\t\t * chosen when multiple such configurations can match the input.\r\n\t\t\t */\r\n\t\t\tlet skippedStopStates: ATNConfig[] | undefined;\r\n\r\n\t\t\tfor (let c of closureConfigs) {\r\n\t\t\t\tif (ParserATNSimulator.debug) {\r\n\t\t\t\t\tconsole.log(\"testing \" + this.getTokenName(t) + \" at \" + c.toString());\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (c.state instanceof RuleStopState) {\r\n\t\t\t\t\tassert(c.context.isEmpty);\r\n\t\t\t\t\tif (useContext && !c.reachesIntoOuterContext || t === IntStream.EOF) {\r\n\t\t\t\t\t\tif (skippedStopStates == null) {\r\n\t\t\t\t\t\t\tskippedStopStates = [];\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tskippedStopStates.push(c);\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet n: number = c.state.numberOfOptimizedTransitions;\r\n\t\t\t\tfor (let ti = 0; ti < n; ti++) { // for each optimized transition\r\n\t\t\t\t\tlet trans: Transition = c.state.getOptimizedTransition(ti);\r\n\t\t\t\t\tlet target: ATNState | undefined = this.getReachableTarget(c, trans, t);\r\n\t\t\t\t\tif (target != null) {\r\n\t\t\t\t\t\treachIntermediate.add(c.transform(target, false), contextCache);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\t/* This block optimizes the reach operation for intermediate sets which\r\n\t\t\t * trivially indicate a termination state for the overall\r\n\t\t\t * adaptivePredict operation.\r\n\t\t\t *\r\n\t\t\t * The conditions assume that intermediate\r\n\t\t\t * contains all configurations relevant to the reach set, but this\r\n\t\t\t * condition is not true when one or more configurations have been\r\n\t\t\t * withheld in skippedStopStates, or when the current symbol is EOF.\r\n\t\t\t */\r\n\t\t\tif (this.optimize_unique_closure && skippedStopStates == null && t !== Token.EOF && reachIntermediate.uniqueAlt !== ATN.INVALID_ALT_NUMBER) {\r\n\t\t\t\treachIntermediate.isOutermostConfigSet = reach.isOutermostConfigSet;\r\n\t\t\t\treach = reachIntermediate;\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\t/* If the reach set could not be trivially determined, perform a closure\r\n\t\t\t * operation on the intermediate set to compute its initial value.\r\n\t\t\t */\r\n\t\t\tlet collectPredicates: boolean = false;\r\n\t\t\tlet treatEofAsEpsilon: boolean = t === Token.EOF;\r\n\t\t\tthis.closure(reachIntermediate, reach, collectPredicates, hasMoreContext, contextCache, treatEofAsEpsilon);\r\n\t\t\tstepIntoGlobal = reach.dipsIntoOuterContext;\r\n\r\n\t\t\tif (t === IntStream.EOF) {\r\n\t\t\t\t/* After consuming EOF no additional input is possible, so we are\r\n\t\t\t\t * only interested in configurations which reached the end of the\r\n\t\t\t\t * decision rule (local context) or end of the start rule (full\r\n\t\t\t\t * context). Update reach to contain only these configurations. This\r\n\t\t\t\t * handles both explicit EOF transitions in the grammar and implicit\r\n\t\t\t\t * EOF transitions following the end of the decision or start rule.\r\n\t\t\t\t *\r\n\t\t\t\t * This is handled before the configurations in skippedStopStates,\r\n\t\t\t\t * because any configurations potentially added from that list are\r\n\t\t\t\t * already guaranteed to meet this condition whether or not it's\r\n\t\t\t\t * required.\r\n\t\t\t\t */\r\n\t\t\t\treach = this.removeAllConfigsNotInRuleStopState(reach, contextCache);\r\n\t\t\t}\r\n\r\n\t\t\t/* If skippedStopStates is not undefined, then it contains at least one\r\n\t\t\t * configuration. For full-context reach operations, these\r\n\t\t\t * configurations reached the end of the start rule, in which case we\r\n\t\t\t * only add them back to reach if no configuration during the current\r\n\t\t\t * closure operation reached such a state. This ensures adaptivePredict\r\n\t\t\t * chooses an alternative matching the longest overall sequence when\r\n\t\t\t * multiple alternatives are viable.\r\n\t\t\t */\r\n\t\t\tif (skippedStopStates != null && (!useContext || !PredictionMode.hasConfigInRuleStopState(reach))) {\r\n\t\t\t\tassert(skippedStopStates.length > 0);\r\n\t\t\t\tfor (let c of skippedStopStates) {\r\n\t\t\t\t\treach.add(c, contextCache);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (useContext && stepIntoGlobal) {\r\n\t\t\t\treach.clear();\r\n\r\n\t\t\t\t// We know remainingGlobalContext is not undefined at this point (why?)\r\n\t\t\t\tremainingGlobalContext = remainingGlobalContext as ParserRuleContext;\r\n\r\n\t\t\t\tremainingGlobalContext = this.skipTailCalls(remainingGlobalContext);\r\n\t\t\t\tlet nextContextElement: number = this.getReturnState(remainingGlobalContext);\r\n\t\t\t\tif (contextElements == null) {\r\n\t\t\t\t\tcontextElements = new IntegerList();\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (remainingGlobalContext.isEmpty) {\r\n\t\t\t\t\tremainingGlobalContext = undefined;\r\n\t\t\t\t} else {\r\n\t\t\t\t\tremainingGlobalContext = remainingGlobalContext.parent;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tcontextElements.add(nextContextElement);\r\n\t\t\t\tif (nextContextElement !== PredictionContext.EMPTY_FULL_STATE_KEY) {\r\n\t\t\t\t\tfor (let i = 0; i < closureConfigs.length; i++) {\r\n\t\t\t\t\t\tclosureConfigs[i] = closureConfigs[i].appendContext(nextContextElement, contextCache);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t} while (useContext && stepIntoGlobal);\r\n\r\n\t\tif (reach.isEmpty) {\r\n\t\t\tthis.setDFAEdge(s, t, ATNSimulator.ERROR);\r\n\t\t\treturn [ATNSimulator.ERROR, remainingGlobalContext];\r\n\t\t}\r\n\r\n\t\tlet result: DFAState = this.addDFAEdge(dfa, s, t, contextElements, reach, contextCache);\r\n\t\treturn [result, remainingGlobalContext];\r\n\t}\r\n\r\n\t/**\r\n\t * Return a configuration set containing only the configurations from\r\n\t * `configs` which are in a {@link RuleStopState}. If all\r\n\t * configurations in `configs` are already in a rule stop state, this\r\n\t * method simply returns `configs`.\r\n\t *\r\n\t * @param configs the configuration set to update\r\n\t * @param contextCache the {@link PredictionContext} cache\r\n\t *\r\n\t * @returns `configs` if all configurations in `configs` are in a\r\n\t * rule stop state, otherwise return a new configuration set containing only\r\n\t * the configurations from `configs` which are in a rule stop state\r\n\t */\r\n\t@NotNull\r\n\tprotected removeAllConfigsNotInRuleStopState(@NotNull configs: ATNConfigSet, contextCache: PredictionContextCache): ATNConfigSet {\r\n\t\tif (PredictionMode.allConfigsInRuleStopStates(configs)) {\r\n\t\t\treturn configs;\r\n\t\t}\r\n\r\n\t\tlet result: ATNConfigSet = new ATNConfigSet();\r\n\t\tfor (let config of configs) {\r\n\t\t\tif (!(config.state instanceof RuleStopState)) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tresult.add(config, contextCache);\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\t@NotNull\r\n\tprotected computeStartState(\r\n\t\tdfa: DFA,\r\n\t\tglobalContext: ParserRuleContext,\r\n\t\tuseContext: boolean): SimulatorState {\r\n\t\tlet s0: DFAState | undefined =\r\n\t\t\tdfa.isPrecedenceDfa ? dfa.getPrecedenceStartState(this._parser.precedence, useContext) :\r\n\t\t\t\tuseContext ? dfa.s0full :\r\n\t\t\t\t\tdfa.s0;\r\n\r\n\t\tif (s0 != null) {\r\n\t\t\tif (!useContext) {\r\n\t\t\t\treturn new SimulatorState(globalContext, s0, useContext, globalContext);\r\n\t\t\t}\r\n\r\n\t\t\ts0.setContextSensitive(this.atn);\r\n\t\t}\r\n\r\n\t\tlet decision: number = dfa.decision;\r\n\t\t// @NotNull\r\n\t\tlet p: ATNState = dfa.atnStartState;\r\n\r\n\t\tlet previousContext: number = 0;\r\n\t\tlet remainingGlobalContext: ParserRuleContext | undefined = globalContext;\r\n\t\tlet initialContext: PredictionContext = useContext ? PredictionContext.EMPTY_FULL : PredictionContext.EMPTY_LOCAL; // always at least the implicit call to start rule\r\n\t\tlet contextCache: PredictionContextCache = new PredictionContextCache();\r\n\t\tif (useContext) {\r\n\t\t\tif (!this.enable_global_context_dfa) {\r\n\t\t\t\twhile (remainingGlobalContext != null) {\r\n\t\t\t\t\tif (remainingGlobalContext.isEmpty) {\r\n\t\t\t\t\t\tpreviousContext = PredictionContext.EMPTY_FULL_STATE_KEY;\r\n\t\t\t\t\t\tremainingGlobalContext = undefined;\r\n\t\t\t\t\t}\r\n\t\t\t\t\telse {\r\n\t\t\t\t\t\tpreviousContext = this.getReturnState(remainingGlobalContext);\r\n\t\t\t\t\t\tinitialContext = initialContext.appendSingleContext(previousContext, contextCache);\r\n\t\t\t\t\t\tremainingGlobalContext = remainingGlobalContext.parent;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\twhile (s0 != null && s0.isContextSensitive && remainingGlobalContext != null) {\r\n\t\t\t\tlet next: DFAState | undefined;\r\n\t\t\t\tremainingGlobalContext = this.skipTailCalls(remainingGlobalContext);\r\n\t\t\t\tif (remainingGlobalContext.isEmpty) {\r\n\t\t\t\t\tnext = s0.getContextTarget(PredictionContext.EMPTY_FULL_STATE_KEY);\r\n\t\t\t\t\tpreviousContext = PredictionContext.EMPTY_FULL_STATE_KEY;\r\n\t\t\t\t\tremainingGlobalContext = undefined;\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\tpreviousContext = this.getReturnState(remainingGlobalContext);\r\n\t\t\t\t\tnext = s0.getContextTarget(previousContext);\r\n\t\t\t\t\tinitialContext = initialContext.appendSingleContext(previousContext, contextCache);\r\n\t\t\t\t\tremainingGlobalContext = remainingGlobalContext.parent;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (next == null) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\r\n\t\t\t\ts0 = next;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (s0 != null && !s0.isContextSensitive) {\r\n\t\t\treturn new SimulatorState(globalContext, s0, useContext, remainingGlobalContext);\r\n\t\t}\r\n\r\n\t\tlet configs: ATNConfigSet = new ATNConfigSet();\r\n\t\twhile (true) {\r\n\t\t\tlet reachIntermediate: ATNConfigSet = new ATNConfigSet();\r\n\t\t\tlet n: number = p.numberOfTransitions;\r\n\t\t\tfor (let ti = 0; ti < n; ti++) {\r\n\t\t\t\t// for each transition\r\n\t\t\t\tlet target: ATNState = p.transition(ti).target;\r\n\t\t\t\treachIntermediate.add(ATNConfig.create(target, ti + 1, initialContext));\r\n\t\t\t}\r\n\r\n\t\t\tlet hasMoreContext: boolean = remainingGlobalContext != null;\r\n\t\t\tif (!hasMoreContext) {\r\n\t\t\t\tconfigs.isOutermostConfigSet = true;\r\n\t\t\t}\r\n\r\n\t\t\tlet collectPredicates: boolean = true;\r\n\t\t\tthis.closure(reachIntermediate, configs, collectPredicates, hasMoreContext, contextCache, false);\r\n\t\t\tlet stepIntoGlobal: boolean = configs.dipsIntoOuterContext;\r\n\r\n\t\t\tlet next: DFAState;\r\n\t\t\tif (useContext && !this.enable_global_context_dfa) {\r\n\t\t\t\ts0 = this.addDFAState(dfa, configs, contextCache);\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t\telse if (s0 == null) {\r\n\t\t\t\tif (!dfa.isPrecedenceDfa) {\r\n\t\t\t\t\tnext = this.addDFAState(dfa, configs, contextCache);\r\n\t\t\t\t\tif (useContext) {\r\n\t\t\t\t\t\tif (!dfa.s0full) {\r\n\t\t\t\t\t\t\tdfa.s0full = next;\r\n\t\t\t\t\t\t} else {\r\n\t\t\t\t\t\t\tnext = dfa.s0full;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t} else {\r\n\t\t\t\t\t\tif (!dfa.s0) {\r\n\t\t\t\t\t\t\tdfa.s0 = next;\r\n\t\t\t\t\t\t} else {\r\n\t\t\t\t\t\t\tnext = dfa.s0;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\t/* If this is a precedence DFA, we use applyPrecedenceFilter\r\n\t\t\t\t\t * to convert the computed start state to a precedence start\r\n\t\t\t\t\t * state. We then use DFA.setPrecedenceStartState to set the\r\n\t\t\t\t\t * appropriate start state for the precedence level rather\r\n\t\t\t\t\t * than simply setting DFA.s0.\r\n\t\t\t\t\t */\r\n\t\t\t\t\tconfigs = this.applyPrecedenceFilter(configs, globalContext, contextCache);\r\n\t\t\t\t\tnext = this.addDFAState(dfa, configs, contextCache);\r\n\t\t\t\t\tdfa.setPrecedenceStartState(this._parser.precedence, useContext, next);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tif (dfa.isPrecedenceDfa) {\r\n\t\t\t\t\tconfigs = this.applyPrecedenceFilter(configs, globalContext, contextCache);\r\n\t\t\t\t}\r\n\r\n\t\t\t\tnext = this.addDFAState(dfa, configs, contextCache);\r\n\t\t\t\ts0.setContextTarget(previousContext, next);\r\n\t\t\t}\r\n\r\n\t\t\ts0 = next;\r\n\r\n\t\t\tif (!useContext || !stepIntoGlobal) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\t// TODO: make sure it distinguishes empty stack states\r\n\t\t\tnext.setContextSensitive(this.atn);\r\n\r\n\t\t\t// We know remainingGlobalContext is not undefined at this point (why?)\r\n\t\t\tremainingGlobalContext = remainingGlobalContext as ParserRuleContext;\r\n\r\n\t\t\tconfigs.clear();\r\n\t\t\tremainingGlobalContext = this.skipTailCalls(remainingGlobalContext);\r\n\t\t\tlet nextContextElement: number = this.getReturnState(remainingGlobalContext);\r\n\r\n\t\t\tif (remainingGlobalContext.isEmpty) {\r\n\t\t\t\tremainingGlobalContext = undefined;\r\n\t\t\t} else {\r\n\t\t\t\tremainingGlobalContext = remainingGlobalContext.parent;\r\n\t\t\t}\r\n\r\n\t\t\tif (nextContextElement !== PredictionContext.EMPTY_FULL_STATE_KEY) {\r\n\t\t\t\tinitialContext = initialContext.appendSingleContext(nextContextElement, contextCache);\r\n\t\t\t}\r\n\r\n\t\t\tpreviousContext = nextContextElement;\r\n\t\t}\r\n\r\n\t\treturn new SimulatorState(globalContext, s0, useContext, remainingGlobalContext);\r\n\t}\r\n\r\n\t/**\r\n\t * This method transforms the start state computed by\r\n\t * {@link #computeStartState} to the special start state used by a\r\n\t * precedence DFA for a particular precedence value. The transformation\r\n\t * process applies the following changes to the start state's configuration\r\n\t * set.\r\n\t *\r\n\t * 1. Evaluate the precedence predicates for each configuration using\r\n\t * {@link SemanticContext#evalPrecedence}.\r\n\t * 1. When {@link ATNConfig#isPrecedenceFilterSuppressed} is `false`,\r\n\t * remove all configurations which predict an alternative greater than 1,\r\n\t * for which another configuration that predicts alternative 1 is in the\r\n\t * same ATN state with the same prediction context. This transformation is\r\n\t * valid for the following reasons:\r\n\t *\r\n\t * * The closure block cannot contain any epsilon transitions which bypass\r\n\t * the body of the closure, so all states reachable via alternative 1 are\r\n\t * part of the precedence alternatives of the transformed left-recursive\r\n\t * rule.\r\n\t * * The \"primary\" portion of a left recursive rule cannot contain an\r\n\t * epsilon transition, so the only way an alternative other than 1 can exist\r\n\t * in a state that is also reachable via alternative 1 is by nesting calls\r\n\t * to the left-recursive rule, with the outer calls not being at the\r\n\t * preferred precedence level. The\r\n\t * {@link ATNConfig#isPrecedenceFilterSuppressed} property marks ATN\r\n\t * configurations which do not meet this condition, and therefore are not\r\n\t * eligible for elimination during the filtering process.\r\n\t *\r\n\t * The prediction context must be considered by this filter to address\r\n\t * situations like the following.\r\n\t *\r\n\t * ```antlr\r\n\t * grammar TA;\r\n\t * prog: statement* EOF;\r\n\t * statement: letterA | statement letterA 'b' ;\r\n\t * letterA: 'a';\r\n\t * ```\r\n\t *\r\n\t * If the above grammar, the ATN state immediately before the token\r\n\t * reference `'a'` in `letterA` is reachable from the left edge\r\n\t * of both the primary and closure blocks of the left-recursive rule\r\n\t * `statement`. The prediction context associated with each of these\r\n\t * configurations distinguishes between them, and prevents the alternative\r\n\t * which stepped out to `prog` (and then back in to `statement`\r\n\t * from being eliminated by the filter.\r\n\t *\r\n\t * @param configs The configuration set computed by\r\n\t * {@link #computeStartState} as the start state for the DFA.\r\n\t * @returns The transformed configuration set representing the start state\r\n\t * for a precedence DFA at a particular precedence level (determined by\r\n\t * calling {@link Parser#getPrecedence}).\r\n\t */\r\n\t@NotNull\r\n\tprotected applyPrecedenceFilter(@NotNull configs: ATNConfigSet, globalContext: ParserRuleContext, contextCache: PredictionContextCache): ATNConfigSet {\r\n\t\tlet statesFromAlt1: Map = new Map();\r\n\t\tlet configSet: ATNConfigSet = new ATNConfigSet();\r\n\t\tfor (let config of configs) {\r\n\t\t\t// handle alt 1 first\r\n\t\t\tif (config.alt !== 1) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tlet updatedContext: SemanticContext | undefined = config.semanticContext.evalPrecedence(this._parser, globalContext);\r\n\t\t\tif (updatedContext == null) {\r\n\t\t\t\t// the configuration was eliminated\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tstatesFromAlt1.set(config.state.stateNumber, config.context);\r\n\t\t\tif (updatedContext !== config.semanticContext) {\r\n\t\t\t\tconfigSet.add(config.transform(config.state, false, updatedContext), contextCache);\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tconfigSet.add(config, contextCache);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tfor (let config of configs) {\r\n\t\t\tif (config.alt === 1) {\r\n\t\t\t\t// already handled\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tif (!config.isPrecedenceFilterSuppressed) {\r\n\t\t\t\t/* In the future, this elimination step could be updated to also\r\n\t\t\t\t * filter the prediction context for alternatives predicting alt>1\r\n\t\t\t\t * (basically a graph subtraction algorithm).\r\n\t\t\t\t */\r\n\t\t\t\tlet context: PredictionContext | undefined = statesFromAlt1.get(config.state.stateNumber);\r\n\t\t\t\tif (context != null && context.equals(config.context)) {\r\n\t\t\t\t\t// eliminated\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tconfigSet.add(config, contextCache);\r\n\t\t}\r\n\r\n\t\treturn configSet;\r\n\t}\r\n\r\n\tprotected getReachableTarget(@NotNull source: ATNConfig, @NotNull trans: Transition, ttype: number): ATNState | undefined {\r\n\t\tif (trans.matches(ttype, 0, this.atn.maxTokenType)) {\r\n\t\t\treturn trans.target;\r\n\t\t}\r\n\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\t/** collect and set D's semantic context */\r\n\tprotected predicateDFAState(\r\n\t\tD: DFAState,\r\n\t\tconfigs: ATNConfigSet,\r\n\t\tnalts: number): DFAState.PredPrediction[] | undefined {\r\n\t\tlet conflictingAlts: BitSet | undefined = this.getConflictingAltsFromConfigSet(configs);\r\n\t\tif (!conflictingAlts) {\r\n\t\t\tthrow new Error(\"This unhandled scenario is intended to be unreachable, but I'm currently not sure of why we know that's the case.\");\r\n\t\t}\r\n\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"predicateDFAState \" + D);\r\n\t\t}\r\n\t\tlet altToPred: SemanticContext[] | undefined = this.getPredsForAmbigAlts(conflictingAlts, configs, nalts);\r\n\t\t// altToPred[uniqueAlt] is now our validating predicate (if any)\r\n\t\tlet predPredictions: DFAState.PredPrediction[] | undefined;\r\n\t\tif (altToPred != null) {\r\n\t\t\t// we have a validating predicate; test it\r\n\t\t\t// Update DFA so reach becomes accept state with predicate\r\n\t\t\tpredPredictions = this.getPredicatePredictions(conflictingAlts, altToPred);\r\n\t\t\tD.predicates = predPredictions;\r\n\t\t}\r\n\t\treturn predPredictions;\r\n\t}\r\n\r\n\tprotected getPredsForAmbigAlts(\r\n\t\t@NotNull ambigAlts: BitSet,\r\n\t\t@NotNull configs: ATNConfigSet,\r\n\t\tnalts: number): SemanticContext[] | undefined {\r\n\t\t// REACH=[1|1|[]|0:0, 1|2|[]|0:1]\r\n\r\n\t\t/* altToPred starts as an array of all undefined contexts. The entry at index i\r\n\t\t * corresponds to alternative i. altToPred[i] may have one of three values:\r\n\t\t * 1. undefined: no ATNConfig c is found such that c.alt===i\r\n\t\t * 2. SemanticContext.NONE: At least one ATNConfig c exists such that\r\n\t\t * c.alt===i and c.semanticContext===SemanticContext.NONE. In other words,\r\n\t\t * alt i has at least one unpredicated config.\r\n\t\t * 3. Non-NONE Semantic Context: There exists at least one, and for all\r\n\t\t * ATNConfig c such that c.alt===i, c.semanticContext!==SemanticContext.NONE.\r\n\t\t *\r\n\t\t * From this, it is clear that NONE||anything==NONE.\r\n\t\t */\r\n\t\tlet altToPred: Array | undefined = new Array(nalts + 1);\r\n\t\tlet n: number = altToPred.length;\r\n\t\tfor (let c of configs) {\r\n\t\t\tif (ambigAlts.get(c.alt)) {\r\n\t\t\t\taltToPred[c.alt] = SemanticContext.or(altToPred[c.alt], c.semanticContext);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet nPredAlts: number = 0;\r\n\t\tfor (let i = 0; i < n; i++) {\r\n\t\t\tif (altToPred[i] == null) {\r\n\t\t\t\taltToPred[i] = SemanticContext.NONE;\r\n\t\t\t}\r\n\t\t\telse if (altToPred[i] !== SemanticContext.NONE) {\r\n\t\t\t\tnPredAlts++;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// At this point we know `altToPred` doesn't contain any undefined entries\r\n\t\tlet result: SemanticContext[] | undefined = altToPred as SemanticContext[];\r\n\r\n\t\t// nonambig alts are undefined in result\r\n\t\tif (nPredAlts === 0) {\r\n\t\t\tresult = undefined;\r\n\t\t}\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"getPredsForAmbigAlts result \" + (result ? Arrays.toString(result) : \"undefined\"));\r\n\t\t}\r\n\t\treturn result;\r\n\t}\r\n\r\n\tprotected getPredicatePredictions(ambigAlts: BitSet | undefined, altToPred: SemanticContext[]): DFAState.PredPrediction[] | undefined {\r\n\t\tlet pairs: DFAState.PredPrediction[] = [];\r\n\t\tlet containsPredicate: boolean = false;\r\n\t\tfor (let i = 1; i < altToPred.length; i++) {\r\n\t\t\tlet pred: SemanticContext = altToPred[i];\r\n\r\n\t\t\t// unpredicated is indicated by SemanticContext.NONE\r\n\t\t\tassert(pred != null);\r\n\r\n\t\t\t// find first unpredicated but ambig alternative, if any.\r\n\t\t\t// Only ambiguous alternatives will have SemanticContext.NONE.\r\n\t\t\t// Any unambig alts or ambig naked alts after first ambig naked are ignored\r\n\t\t\t// (undefined, i) means alt i is the default prediction\r\n\t\t\t// if no (undefined, i), then no default prediction.\r\n\t\t\tif (ambigAlts != null && ambigAlts.get(i) && pred === SemanticContext.NONE) {\r\n\t\t\t\tpairs.push(new DFAState.PredPrediction(pred, i));\r\n\t\t\t}\r\n\t\t\telse if (pred !== SemanticContext.NONE) {\r\n\t\t\t\tcontainsPredicate = true;\r\n\t\t\t\tpairs.push(new DFAState.PredPrediction(pred, i));\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (!containsPredicate) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n//\t\tSystem.out.println(Arrays.toString(altToPred)+\"->\"+pairs);\r\n\t\treturn pairs;\r\n\t}\r\n\r\n\t/** Look through a list of predicate/alt pairs, returning alts for the\r\n\t * pairs that win. An `undefined` predicate indicates an alt containing an\r\n\t * unpredicated config which behaves as \"always true.\"\r\n\t */\r\n\tprotected evalSemanticContext(\r\n\t\t@NotNull predPredictions: DFAState.PredPrediction[],\r\n\t\touterContext: ParserRuleContext,\r\n\t\tcomplete: boolean): BitSet {\r\n\t\tlet predictions: BitSet = new BitSet();\r\n\t\tfor (let pair of predPredictions) {\r\n\t\t\tif (pair.pred === SemanticContext.NONE) {\r\n\t\t\t\tpredictions.set(pair.alt);\r\n\t\t\t\tif (!complete) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tlet evaluatedResult: boolean = this.evalSemanticContextImpl(pair.pred, outerContext, pair.alt);\r\n\t\t\tif (ParserATNSimulator.debug || ParserATNSimulator.dfa_debug) {\r\n\t\t\t\tconsole.log(\"eval pred \" + pair + \"=\" + evaluatedResult);\r\n\t\t\t}\r\n\r\n\t\t\tif (evaluatedResult) {\r\n\t\t\t\tif (ParserATNSimulator.debug || ParserATNSimulator.dfa_debug) {\r\n\t\t\t\t\tconsole.log(\"PREDICT \" + pair.alt);\r\n\t\t\t\t}\r\n\t\t\t\tpredictions.set(pair.alt);\r\n\t\t\t\tif (!complete) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn predictions;\r\n\t}\r\n\r\n\t/**\r\n\t * Evaluate a semantic context within a specific parser context.\r\n\t *\r\n\t * This method might not be called for every semantic context evaluated\r\n\t * during the prediction process. In particular, we currently do not\r\n\t * evaluate the following but it may change in the future:\r\n\t *\r\n\t * * Precedence predicates (represented by\r\n\t * {@link SemanticContext.PrecedencePredicate}) are not currently evaluated\r\n\t * through this method.\r\n\t * * Operator predicates (represented by {@link SemanticContext.AND} and\r\n\t * {@link SemanticContext.OR}) are evaluated as a single semantic\r\n\t * context, rather than evaluating the operands individually.\r\n\t * Implementations which require evaluation results from individual\r\n\t * predicates should override this method to explicitly handle evaluation of\r\n\t * the operands within operator predicates.\r\n\t *\r\n\t * @param pred The semantic context to evaluate\r\n\t * @param parserCallStack The parser context in which to evaluate the\r\n\t * semantic context\r\n\t * @param alt The alternative which is guarded by `pred`\r\n\t *\r\n\t * @since 4.3\r\n\t */\r\n\tprotected evalSemanticContextImpl(@NotNull pred: SemanticContext, parserCallStack: ParserRuleContext, alt: number): boolean {\r\n\t\treturn pred.eval(this._parser, parserCallStack);\r\n\t}\r\n\r\n\t/* TODO: If we are doing predicates, there is no point in pursuing\r\n\t\t closure operations if we reach a DFA state that uniquely predicts\r\n\t\t alternative. We will not be caching that DFA state and it is a\r\n\t\t waste to pursue the closure. Might have to advance when we do\r\n\t\t ambig detection thought :(\r\n\t\t */\r\n\r\n\tprotected closure(\r\n\t\tsourceConfigs: ATNConfigSet,\r\n\t\t@NotNull configs: ATNConfigSet,\r\n\t\tcollectPredicates: boolean,\r\n\t\thasMoreContext: boolean,\r\n\t\t@Nullable contextCache: PredictionContextCache,\r\n\t\ttreatEofAsEpsilon: boolean): void {\r\n\t\tif (contextCache == null) {\r\n\t\t\tcontextCache = PredictionContextCache.UNCACHED;\r\n\t\t}\r\n\r\n\t\tlet currentConfigs: ATNConfigSet = sourceConfigs;\r\n\t\tlet closureBusy: Array2DHashSet = new Array2DHashSet(ObjectEqualityComparator.INSTANCE);\r\n\t\twhile (currentConfigs.size > 0) {\r\n\t\t\tlet intermediate: ATNConfigSet = new ATNConfigSet();\r\n\t\t\tfor (let config of currentConfigs) {\r\n\t\t\t\tthis.closureImpl(config, configs, intermediate, closureBusy, collectPredicates, hasMoreContext, contextCache, 0, treatEofAsEpsilon);\r\n\t\t\t}\r\n\r\n\t\t\tcurrentConfigs = intermediate;\r\n\t\t}\r\n\t}\r\n\r\n\tprotected closureImpl(\r\n\t\t@NotNull config: ATNConfig,\r\n\t\t@NotNull configs: ATNConfigSet,\r\n\t\t@Nullable intermediate: ATNConfigSet,\r\n\t\t@NotNull closureBusy: Array2DHashSet,\r\n\t\tcollectPredicates: boolean,\r\n\t\thasMoreContexts: boolean,\r\n\t\t@NotNull contextCache: PredictionContextCache,\r\n\t\tdepth: number,\r\n\t\ttreatEofAsEpsilon: boolean): void {\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"closure(\" + config.toString(this._parser, true) + \")\");\r\n\t\t}\r\n\r\n\t\tif (config.state instanceof RuleStopState) {\r\n\t\t\t// We hit rule end. If we have context info, use it\r\n\t\t\tif (!config.context.isEmpty) {\r\n\t\t\t\tlet hasEmpty: boolean = config.context.hasEmpty;\r\n\t\t\t\tlet nonEmptySize: number = config.context.size - (hasEmpty ? 1 : 0);\r\n\t\t\t\tfor (let i = 0; i < nonEmptySize; i++) {\r\n\t\t\t\t\tlet newContext: PredictionContext = config.context.getParent(i); // \"pop\" return state\r\n\t\t\t\t\tlet returnState: ATNState = this.atn.states[config.context.getReturnState(i)];\r\n\t\t\t\t\tlet c: ATNConfig = ATNConfig.create(returnState, config.alt, newContext, config.semanticContext);\r\n\t\t\t\t\t// While we have context to pop back from, we may have\r\n\t\t\t\t\t// gotten that context AFTER having fallen off a rule.\r\n\t\t\t\t\t// Make sure we track that we are now out of context.\r\n\t\t\t\t\tc.outerContextDepth = config.outerContextDepth;\r\n\t\t\t\t\tc.isPrecedenceFilterSuppressed = config.isPrecedenceFilterSuppressed;\r\n\t\t\t\t\tassert(depth > MIN_INTEGER_VALUE);\r\n\t\t\t\t\tthis.closureImpl(c, configs, intermediate, closureBusy, collectPredicates, hasMoreContexts, contextCache, depth - 1, treatEofAsEpsilon);\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (!hasEmpty || !hasMoreContexts) {\r\n\t\t\t\t\treturn;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tconfig = config.transform(config.state, false, PredictionContext.EMPTY_LOCAL);\r\n\t\t\t}\r\n\t\t\telse if (!hasMoreContexts) {\r\n\t\t\t\tconfigs.add(config, contextCache);\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\t// else if we have no context info, just chase follow links (if greedy)\r\n\t\t\t\tif (ParserATNSimulator.debug) {\r\n\t\t\t\t\tconsole.log(\"FALLING off rule \" +\r\n\t\t\t\t\t\tthis.getRuleName(config.state.ruleIndex));\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (config.context === PredictionContext.EMPTY_FULL) {\r\n\t\t\t\t\t// no need to keep full context overhead when we step out\r\n\t\t\t\t\tconfig = config.transform(config.state, false, PredictionContext.EMPTY_LOCAL);\r\n\t\t\t\t}\r\n\t\t\t\telse if (!config.reachesIntoOuterContext && PredictionContext.isEmptyLocal(config.context)) {\r\n\t\t\t\t\t// add stop state when leaving decision rule for the first time\r\n\t\t\t\t\tconfigs.add(config, contextCache);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet p: ATNState = config.state;\r\n\t\t// optimization\r\n\t\tif (!p.onlyHasEpsilonTransitions) {\r\n\t\t\tconfigs.add(config, contextCache);\r\n\t\t\t// make sure to not return here, because EOF transitions can act as\r\n\t\t\t// both epsilon transitions and non-epsilon transitions.\r\n\t\t\tif (ParserATNSimulator.debug) {\r\n\t\t\t\tconsole.log(\"added config \" + configs);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tfor (let i = 0; i < p.numberOfOptimizedTransitions; i++) {\r\n\t\t\t// This block implements first-edge elimination of ambiguous LR\r\n\t\t\t// alternatives as part of dynamic disambiguation during prediction.\r\n\t\t\t// See antlr/antlr4#1398.\r\n\t\t\tif (i === 0\r\n\t\t\t\t&& p.stateType === ATNStateType.STAR_LOOP_ENTRY\r\n\t\t\t\t&& (p as StarLoopEntryState).precedenceRuleDecision\r\n\t\t\t\t&& !config.context.hasEmpty) {\r\n\r\n\t\t\t\tlet precedenceDecision = p as StarLoopEntryState;\r\n\r\n\t\t\t\t// When suppress is true, it means the outgoing edge i==0 is\r\n\t\t\t\t// ambiguous with the outgoing edge i==1, and thus the closure\r\n\t\t\t\t// operation can dynamically disambiguate by suppressing this\r\n\t\t\t\t// edge during the closure operation.\r\n\t\t\t\tlet suppress: boolean = true;\r\n\t\t\t\tfor (let j: number = 0; j < config.context.size; j++) {\r\n\t\t\t\t\tif (!precedenceDecision.precedenceLoopbackStates.get(config.context.getReturnState(j))) {\r\n\t\t\t\t\t\tsuppress = false;\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (suppress) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tlet t: Transition = p.getOptimizedTransition(i);\r\n\t\t\tlet continueCollecting: boolean =\r\n\t\t\t\t!(t instanceof ActionTransition) && collectPredicates;\r\n\t\t\tlet c: ATNConfig | undefined = this.getEpsilonTarget(config, t, continueCollecting, depth === 0, contextCache, treatEofAsEpsilon);\r\n\t\t\tif (c != null) {\r\n\t\t\t\tif (t instanceof RuleTransition) {\r\n\t\t\t\t\tif (intermediate != null && !collectPredicates) {\r\n\t\t\t\t\t\tintermediate.add(c, contextCache);\r\n\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet newDepth: number = depth;\r\n\t\t\t\tif (config.state instanceof RuleStopState) {\r\n\t\t\t\t\t// target fell off end of rule; mark resulting c as having dipped into outer context\r\n\t\t\t\t\t// We can't get here if incoming config was rule stop and we had context\r\n\t\t\t\t\t// track how far we dip into outer context. Might\r\n\t\t\t\t\t// come in handy and we avoid evaluating context dependent\r\n\t\t\t\t\t// preds if this is > 0.\r\n\r\n\t\t\t\t\tif (this.dfa != null && this.dfa.isPrecedenceDfa) {\r\n\t\t\t\t\t\tlet outermostPrecedenceReturn: number = (t as EpsilonTransition).outermostPrecedenceReturn;\r\n\t\t\t\t\t\tif (outermostPrecedenceReturn === this.dfa.atnStartState.ruleIndex) {\r\n\t\t\t\t\t\t\tc.isPrecedenceFilterSuppressed = true;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tc.outerContextDepth = c.outerContextDepth + 1;\r\n\r\n\t\t\t\t\tif (!closureBusy.add(c)) {\r\n\t\t\t\t\t\t// avoid infinite recursion for right-recursive rules\r\n\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tassert(newDepth > MIN_INTEGER_VALUE);\r\n\t\t\t\t\tnewDepth--;\r\n\t\t\t\t\tif (ParserATNSimulator.debug) {\r\n\t\t\t\t\t\tconsole.log(\"dips into outer ctx: \" + c);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t\telse if (t instanceof RuleTransition) {\r\n\t\t\t\t\tif (this.optimize_tail_calls && t.optimizedTailCall && (!this.tail_call_preserves_sll || !PredictionContext.isEmptyLocal(config.context))) {\r\n\t\t\t\t\t\tassert(c.context === config.context);\r\n\t\t\t\t\t\tif (newDepth === 0) {\r\n\t\t\t\t\t\t\t// the pop/push of a tail call would keep the depth\r\n\t\t\t\t\t\t\t// constant, except we latch if it goes negative\r\n\t\t\t\t\t\t\tnewDepth--;\r\n\t\t\t\t\t\t\tif (!this.tail_call_preserves_sll && PredictionContext.isEmptyLocal(config.context)) {\r\n\t\t\t\t\t\t\t\t// make sure the SLL config \"dips into the outer context\" or prediction may not fall back to LL on conflict\r\n\t\t\t\t\t\t\t\tc.outerContextDepth = c.outerContextDepth + 1;\r\n\t\t\t\t\t\t\t}\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t\telse {\r\n\t\t\t\t\t\t// latch when newDepth goes negative - once we step out of the entry context we can't return\r\n\t\t\t\t\t\tif (newDepth >= 0) {\r\n\t\t\t\t\t\t\tnewDepth++;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\tif (!t.isEpsilon && !closureBusy.add(c)) {\r\n\t\t\t\t\t\t// avoid infinite recursion for EOF* and EOF+\r\n\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tthis.closureImpl(c, configs, intermediate, closureBusy, continueCollecting, hasMoreContexts, contextCache, newDepth, treatEofAsEpsilon);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t@NotNull\r\n\tpublic getRuleName(index: number): string {\r\n\t\tif (this._parser != null && index >= 0) {\r\n\t\t\treturn this._parser.ruleNames[index];\r\n\t\t}\r\n\t\treturn \"\";\r\n\t}\r\n\r\n\tprotected getEpsilonTarget(@NotNull config: ATNConfig, @NotNull t: Transition, collectPredicates: boolean, inContext: boolean, contextCache: PredictionContextCache, treatEofAsEpsilon: boolean): ATNConfig | undefined {\r\n\t\tswitch (t.serializationType) {\r\n\t\tcase TransitionType.RULE:\r\n\t\t\treturn this.ruleTransition(config, t as RuleTransition, contextCache);\r\n\r\n\t\tcase TransitionType.PRECEDENCE:\r\n\t\t\treturn this.precedenceTransition(config, t as PrecedencePredicateTransition, collectPredicates, inContext);\r\n\r\n\t\tcase TransitionType.PREDICATE:\r\n\t\t\treturn this.predTransition(config, t as PredicateTransition, collectPredicates, inContext);\r\n\r\n\t\tcase TransitionType.ACTION:\r\n\t\t\treturn this.actionTransition(config, t as ActionTransition);\r\n\r\n\t\tcase TransitionType.EPSILON:\r\n\t\t\treturn config.transform(t.target, false);\r\n\r\n\t\tcase TransitionType.ATOM:\r\n\t\tcase TransitionType.RANGE:\r\n\t\tcase TransitionType.SET:\r\n\t\t\t// EOF transitions act like epsilon transitions after the first EOF\r\n\t\t\t// transition is traversed\r\n\t\t\tif (treatEofAsEpsilon) {\r\n\t\t\t\tif (t.matches(Token.EOF, 0, 1)) {\r\n\t\t\t\t\treturn config.transform(t.target, false);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\treturn undefined;\r\n\r\n\t\tdefault:\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\t}\r\n\r\n\t@NotNull\r\n\tprotected actionTransition(@NotNull config: ATNConfig, @NotNull t: ActionTransition): ATNConfig {\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"ACTION edge \" + t.ruleIndex + \":\" + t.actionIndex);\r\n\t\t}\r\n\t\treturn config.transform(t.target, false);\r\n\t}\r\n\r\n\t@Nullable\r\n\tprotected precedenceTransition(\r\n\t\t@NotNull config: ATNConfig,\r\n\t\t@NotNull pt: PrecedencePredicateTransition,\r\n\t\tcollectPredicates: boolean,\r\n\t\tinContext: boolean): ATNConfig {\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"PRED (collectPredicates=\" + collectPredicates + \") \" +\r\n\t\t\t\tpt.precedence + \">=_p\" +\r\n\t\t\t\t\", ctx dependent=true\");\r\n\t\t\tif (this._parser != null) {\r\n\t\t\t\tconsole.log(\"context surrounding pred is \" +\r\n\t\t\t\t\tthis._parser.getRuleInvocationStack());\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet c: ATNConfig;\r\n\t\tif (collectPredicates && inContext) {\r\n\t\t\tlet newSemCtx: SemanticContext = SemanticContext.and(config.semanticContext, pt.predicate);\r\n\t\t\tc = config.transform(pt.target, false, newSemCtx);\r\n\t\t}\r\n\t\telse {\r\n\t\t\tc = config.transform(pt.target, false);\r\n\t\t}\r\n\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"config from pred transition=\" + c);\r\n\t\t}\r\n\t\treturn c;\r\n\t}\r\n\r\n\t@Nullable\r\n\tprotected predTransition(\r\n\t\t@NotNull config: ATNConfig,\r\n\t\t@NotNull pt: PredicateTransition,\r\n\t\tcollectPredicates: boolean,\r\n\t\tinContext: boolean): ATNConfig {\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"PRED (collectPredicates=\" + collectPredicates + \") \" +\r\n\t\t\t\tpt.ruleIndex + \":\" + pt.predIndex +\r\n\t\t\t\t\", ctx dependent=\" + pt.isCtxDependent);\r\n\t\t\tif (this._parser != null) {\r\n\t\t\t\tconsole.log(\"context surrounding pred is \" +\r\n\t\t\t\t\tthis._parser.getRuleInvocationStack());\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet c: ATNConfig;\r\n\t\tif (collectPredicates &&\r\n\t\t\t(!pt.isCtxDependent || (pt.isCtxDependent && inContext))) {\r\n\t\t\tlet newSemCtx: SemanticContext = SemanticContext.and(config.semanticContext, pt.predicate);\r\n\t\t\tc = config.transform(pt.target, false, newSemCtx);\r\n\t\t}\r\n\t\telse {\r\n\t\t\tc = config.transform(pt.target, false);\r\n\t\t}\r\n\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"config from pred transition=\" + c);\r\n\t\t}\r\n\t\treturn c;\r\n\t}\r\n\r\n\t@NotNull\r\n\tprotected ruleTransition(@NotNull config: ATNConfig, @NotNull t: RuleTransition, @Nullable contextCache: PredictionContextCache): ATNConfig {\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"CALL rule \" + this.getRuleName(t.target.ruleIndex) +\r\n\t\t\t\t\", ctx=\" + config.context);\r\n\t\t}\r\n\r\n\t\tlet returnState: ATNState = t.followState;\r\n\t\tlet newContext: PredictionContext;\r\n\r\n\t\tif (this.optimize_tail_calls && t.optimizedTailCall && (!this.tail_call_preserves_sll || !PredictionContext.isEmptyLocal(config.context))) {\r\n\t\t\tnewContext = config.context;\r\n\t\t}\r\n\t\telse if (contextCache != null) {\r\n\t\t\tnewContext = contextCache.getChild(config.context, returnState.stateNumber);\r\n\t\t}\r\n\t\telse {\r\n\t\t\tnewContext = config.context.getChild(returnState.stateNumber);\r\n\t\t}\r\n\r\n\t\treturn config.transform(t.target, false, newContext);\r\n\t}\r\n\r\n\tprivate static STATE_ALT_SORT_COMPARATOR: (o1: ATNConfig, o2: ATNConfig) => number =\r\n\t\t(o1: ATNConfig, o2: ATNConfig): number => {\r\n\t\t\tlet diff: number = o1.state.nonStopStateNumber - o2.state.nonStopStateNumber;\r\n\t\t\tif (diff !== 0) {\r\n\t\t\t\treturn diff;\r\n\t\t\t}\r\n\r\n\t\t\tdiff = o1.alt - o2.alt;\r\n\t\t\tif (diff !== 0) {\r\n\t\t\t\treturn diff;\r\n\t\t\t}\r\n\r\n\t\t\treturn 0;\r\n\t\t}\r\n\r\n\tprivate isConflicted(@NotNull configset: ATNConfigSet, contextCache: PredictionContextCache): ConflictInfo | undefined {\r\n\t\tif (configset.uniqueAlt !== ATN.INVALID_ALT_NUMBER || configset.size <= 1) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\tlet configs: ATNConfig[] = configset.toArray();\r\n\t\tconfigs.sort(ParserATNSimulator.STATE_ALT_SORT_COMPARATOR);\r\n\r\n\t\tlet exact: boolean = !configset.dipsIntoOuterContext;\r\n\t\tlet alts: BitSet = new BitSet();\r\n\t\tlet minAlt: number = configs[0].alt;\r\n\t\talts.set(minAlt);\r\n\r\n\t\t/* Quick checks come first (single pass, no context joining):\r\n\t\t * 1. Make sure first config in the sorted list predicts the minimum\r\n\t\t * represented alternative.\r\n\t\t * 2. Make sure every represented state has at least one configuration\r\n\t\t * which predicts the minimum represented alternative.\r\n\t\t * 3. (exact only) make sure every represented state has at least one\r\n\t\t * configuration which predicts each represented alternative.\r\n\t\t */\r\n\r\n\t\t// quick check 1 & 2 => if we assume #1 holds and check #2 against the\r\n\t\t// minAlt from the first state, #2 will fail if the assumption was\r\n\t\t// incorrect\r\n\t\tlet currentState: number = configs[0].state.nonStopStateNumber;\r\n\t\tfor (let config of configs) {\r\n\t\t\tlet stateNumber: number = config.state.nonStopStateNumber;\r\n\t\t\tif (stateNumber !== currentState) {\r\n\t\t\t\tif (config.alt !== minAlt) {\r\n\t\t\t\t\treturn undefined;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tcurrentState = stateNumber;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet representedAlts: BitSet;\r\n\t\tif (exact) {\r\n\t\t\tcurrentState = configs[0].state.nonStopStateNumber;\r\n\r\n\t\t\t// get the represented alternatives of the first state\r\n\t\t\trepresentedAlts = new BitSet();\r\n\t\t\tlet maxAlt: number = minAlt;\r\n\t\t\tfor (let config of configs) {\r\n\t\t\t\tif (config.state.nonStopStateNumber !== currentState) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet alt: number = config.alt;\r\n\t\t\t\trepresentedAlts.set(alt);\r\n\t\t\t\tmaxAlt = alt;\r\n\t\t\t}\r\n\r\n\t\t\t// quick check #3:\r\n\t\t\tcurrentState = configs[0].state.nonStopStateNumber;\r\n\t\t\tlet currentAlt: number = minAlt;\r\n\t\t\tfor (let config of configs) {\r\n\t\t\t\tlet stateNumber: number = config.state.nonStopStateNumber;\r\n\t\t\t\tlet alt: number = config.alt;\r\n\t\t\t\tif (stateNumber !== currentState) {\r\n\t\t\t\t\tif (currentAlt !== maxAlt) {\r\n\t\t\t\t\t\texact = false;\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tcurrentState = stateNumber;\r\n\t\t\t\t\tcurrentAlt = minAlt;\r\n\t\t\t\t}\r\n\t\t\t\telse if (alt !== currentAlt) {\r\n\t\t\t\t\tif (alt !== representedAlts.nextSetBit(currentAlt + 1)) {\r\n\t\t\t\t\t\texact = false;\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tcurrentAlt = alt;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tcurrentState = configs[0].state.nonStopStateNumber;\r\n\t\tlet firstIndexCurrentState: number = 0;\r\n\t\tlet lastIndexCurrentStateMinAlt: number = 0;\r\n\t\tlet joinedCheckContext: PredictionContext = configs[0].context;\r\n\t\tfor (let i = 1; i < configs.length; i++) {\r\n\t\t\tlet config: ATNConfig = configs[i];\r\n\t\t\tif (config.alt !== minAlt) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\tif (config.state.nonStopStateNumber !== currentState) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\tlastIndexCurrentStateMinAlt = i;\r\n\t\t\tjoinedCheckContext = contextCache.join(joinedCheckContext, configs[i].context);\r\n\t\t}\r\n\r\n\t\tfor (let i = lastIndexCurrentStateMinAlt + 1; i < configs.length; i++) {\r\n\t\t\tlet config: ATNConfig = configs[i];\r\n\t\t\tlet state: ATNState = config.state;\r\n\t\t\talts.set(config.alt);\r\n\t\t\tif (state.nonStopStateNumber !== currentState) {\r\n\t\t\t\tcurrentState = state.nonStopStateNumber;\r\n\t\t\t\tfirstIndexCurrentState = i;\r\n\t\t\t\tlastIndexCurrentStateMinAlt = i;\r\n\t\t\t\tjoinedCheckContext = config.context;\r\n\t\t\t\tfor (let j = firstIndexCurrentState + 1; j < configs.length; j++) {\r\n\t\t\t\t\tlet config2: ATNConfig = configs[j];\r\n\t\t\t\t\tif (config2.alt !== minAlt) {\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tif (config2.state.nonStopStateNumber !== currentState) {\r\n\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tlastIndexCurrentStateMinAlt = j;\r\n\t\t\t\t\tjoinedCheckContext = contextCache.join(joinedCheckContext, config2.context);\r\n\t\t\t\t}\r\n\r\n\t\t\t\ti = lastIndexCurrentStateMinAlt;\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tlet joinedCheckContext2: PredictionContext = config.context;\r\n\t\t\tlet currentAlt: number = config.alt;\r\n\t\t\tlet lastIndexCurrentStateCurrentAlt: number = i;\r\n\t\t\tfor (let j = lastIndexCurrentStateCurrentAlt + 1; j < configs.length; j++) {\r\n\t\t\t\tlet config2: ATNConfig = configs[j];\r\n\t\t\t\tif (config2.alt !== currentAlt) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (config2.state.nonStopStateNumber !== currentState) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlastIndexCurrentStateCurrentAlt = j;\r\n\t\t\t\tjoinedCheckContext2 = contextCache.join(joinedCheckContext2, config2.context);\r\n\t\t\t}\r\n\r\n\t\t\ti = lastIndexCurrentStateCurrentAlt;\r\n\r\n\t\t\tlet check: PredictionContext = contextCache.join(joinedCheckContext, joinedCheckContext2);\r\n\t\t\tif (!joinedCheckContext.equals(check)) {\r\n\t\t\t\treturn undefined;\r\n\t\t\t}\r\n\r\n\t\t\t// update exact if necessary\r\n\t\t\texact = exact && joinedCheckContext.equals(joinedCheckContext2);\r\n\t\t}\r\n\r\n\t\treturn new ConflictInfo(alts, exact);\r\n\t}\r\n\r\n\tprotected getConflictingAltsFromConfigSet(configs: ATNConfigSet): BitSet | undefined {\r\n\t\tlet conflictingAlts: BitSet | undefined = configs.conflictingAlts;\r\n\t\tif (conflictingAlts == null && configs.uniqueAlt !== ATN.INVALID_ALT_NUMBER) {\r\n\t\t\tconflictingAlts = new BitSet();\r\n\t\t\tconflictingAlts.set(configs.uniqueAlt);\r\n\t\t}\r\n\r\n\t\treturn conflictingAlts;\r\n\t}\r\n\r\n\t@NotNull\r\n\tpublic getTokenName(t: number): string {\r\n\t\tif (t === Token.EOF) {\r\n\t\t\treturn \"EOF\";\r\n\t\t}\r\n\r\n\t\tlet vocabulary: Vocabulary = this._parser != null ? this._parser.vocabulary : VocabularyImpl.EMPTY_VOCABULARY;\r\n\t\tlet displayName: string = vocabulary.getDisplayName(t);\r\n\t\tif (displayName === String(t)) {\r\n\t\t\treturn displayName;\r\n\t\t}\r\n\r\n\t\treturn displayName + \"<\" + t + \">\";\r\n\t}\r\n\r\n\tpublic getLookaheadName(input: TokenStream): string {\r\n\t\treturn this.getTokenName(input.LA(1));\r\n\t}\r\n\r\n\tpublic dumpDeadEndConfigs(@NotNull nvae: NoViableAltException): void {\r\n\t\tconsole.log(\"dead end configs: \");\r\n\t\tlet deadEndConfigs = nvae.deadEndConfigs;\r\n\t\tif (!deadEndConfigs) {\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tfor (let c of deadEndConfigs) {\r\n\t\t\tlet trans: string = \"no edges\";\r\n\t\t\tif (c.state.numberOfOptimizedTransitions > 0) {\r\n\t\t\t\tlet t: Transition = c.state.getOptimizedTransition(0);\r\n\t\t\t\tif (t instanceof AtomTransition) {\r\n\t\t\t\t\ttrans = \"Atom \" + this.getTokenName(t._label);\r\n\t\t\t\t}\r\n\t\t\t\telse if (t instanceof SetTransition) {\r\n\t\t\t\t\tlet not: boolean = t instanceof NotSetTransition;\r\n\t\t\t\t\ttrans = (not ? \"~\" : \"\") + \"Set \" + t.set.toString();\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\tconsole.log(c.toString(this._parser, true) + \":\" + trans);\r\n\t\t}\r\n\t}\r\n\r\n\t@NotNull\r\n\tprotected noViableAlt(\r\n\t\t@NotNull input: TokenStream,\r\n\t\t@NotNull outerContext: ParserRuleContext,\r\n\t\t@NotNull configs: ATNConfigSet,\r\n\t\tstartIndex: number): NoViableAltException {\r\n\t\treturn new NoViableAltException(this._parser, input,\r\n\t\t\tinput.get(startIndex),\r\n\t\t\tinput.LT(1),\r\n\t\t\tconfigs, outerContext);\r\n\t}\r\n\r\n\tprotected getUniqueAlt(@NotNull configs: Iterable): number {\r\n\t\tlet alt: number = ATN.INVALID_ALT_NUMBER;\r\n\t\tfor (let c of configs) {\r\n\t\t\tif (alt === ATN.INVALID_ALT_NUMBER) {\r\n\t\t\t\talt = c.alt; // found first alt\r\n\t\t\t}\r\n\t\t\telse if (c.alt !== alt) {\r\n\t\t\t\treturn ATN.INVALID_ALT_NUMBER;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn alt;\r\n\t}\r\n\r\n\tprotected configWithAltAtStopState(@NotNull configs: Iterable, alt: number): boolean {\r\n\t\tfor (let c of configs) {\r\n\t\t\tif (c.alt === alt) {\r\n\t\t\t\tif (c.state instanceof RuleStopState) {\r\n\t\t\t\t\treturn true;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn false;\r\n\t}\r\n\r\n\t@NotNull\r\n\tprotected addDFAEdge(\r\n\t\t@NotNull dfa: DFA,\r\n\t\t@NotNull fromState: DFAState,\r\n\t\tt: number,\r\n\t\tcontextTransitions: IntegerList | undefined,\r\n\t\t@NotNull toConfigs: ATNConfigSet,\r\n\t\tcontextCache: PredictionContextCache): DFAState {\r\n\t\tassert(contextTransitions == null || contextTransitions.isEmpty || dfa.isContextSensitive);\r\n\r\n\t\tlet from: DFAState = fromState;\r\n\t\tlet to: DFAState = this.addDFAState(dfa, toConfigs, contextCache);\r\n\r\n\t\tif (contextTransitions != null) {\r\n\t\t\tfor (let context of contextTransitions.toArray()) {\r\n\t\t\t\tif (context === PredictionContext.EMPTY_FULL_STATE_KEY) {\r\n\t\t\t\t\tif (from.configs.isOutermostConfigSet) {\r\n\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tfrom.setContextSensitive(this.atn);\r\n\t\t\t\tfrom.setContextSymbol(t);\r\n\t\t\t\tlet next: DFAState | undefined = from.getContextTarget(context);\r\n\t\t\t\tif (next != null) {\r\n\t\t\t\t\tfrom = next;\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tnext = this.addDFAContextState(dfa, from.configs, context, contextCache);\r\n\t\t\t\tassert(context !== PredictionContext.EMPTY_FULL_STATE_KEY || next.configs.isOutermostConfigSet);\r\n\t\t\t\tfrom.setContextTarget(context, next);\r\n\t\t\t\tfrom = next;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"EDGE \" + from + \" -> \" + to + \" upon \" + this.getTokenName(t));\r\n\t\t}\r\n\t\tthis.setDFAEdge(from, t, to);\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"DFA=\\n\" + dfa.toString(this._parser != null ? this._parser.vocabulary : VocabularyImpl.EMPTY_VOCABULARY, this._parser != null ? this._parser.ruleNames : undefined));\r\n\t\t}\r\n\t\treturn to;\r\n\t}\r\n\r\n\tprotected setDFAEdge(@Nullable p: DFAState, t: number, @Nullable q: DFAState): void {\r\n\t\tif (p != null) {\r\n\t\t\tp.setTarget(t, q);\r\n\t\t}\r\n\t}\r\n\r\n\t/** See comment on LexerInterpreter.addDFAState. */\r\n\t@NotNull\r\n\tprotected addDFAContextState(@NotNull dfa: DFA, @NotNull configs: ATNConfigSet, returnContext: number, contextCache: PredictionContextCache): DFAState {\r\n\t\tif (returnContext !== PredictionContext.EMPTY_FULL_STATE_KEY) {\r\n\t\t\tlet contextConfigs: ATNConfigSet = new ATNConfigSet();\r\n\t\t\tfor (let config of configs) {\r\n\t\t\t\tcontextConfigs.add(config.appendContext(returnContext, contextCache));\r\n\t\t\t}\r\n\r\n\t\t\treturn this.addDFAState(dfa, contextConfigs, contextCache);\r\n\t\t}\r\n\t\telse {\r\n\t\t\tassert(!configs.isOutermostConfigSet, \"Shouldn't be adding a duplicate edge.\");\r\n\t\t\tconfigs = configs.clone(true);\r\n\t\t\tconfigs.isOutermostConfigSet = true;\r\n\t\t\treturn this.addDFAState(dfa, configs, contextCache);\r\n\t\t}\r\n\t}\r\n\r\n\t/** See comment on LexerInterpreter.addDFAState. */\r\n\t@NotNull\r\n\tprotected addDFAState(@NotNull dfa: DFA, @NotNull configs: ATNConfigSet, contextCache: PredictionContextCache): DFAState {\r\n\t\tlet enableDfa: boolean = this.enable_global_context_dfa || !configs.isOutermostConfigSet;\r\n\t\tif (enableDfa) {\r\n\t\t\tif (!configs.isReadOnly) {\r\n\t\t\t\tconfigs.optimizeConfigs(this);\r\n\t\t\t}\r\n\r\n\t\t\tlet proposed: DFAState = this.createDFAState(dfa, configs);\r\n\t\t\tlet existing: DFAState | undefined = dfa.states.get(proposed);\r\n\t\t\tif (existing != null) {\r\n\t\t\t\treturn existing;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (!configs.isReadOnly) {\r\n\t\t\tif (configs.conflictInfo == null) {\r\n\t\t\t\tconfigs.conflictInfo = this.isConflicted(configs, contextCache);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet newState: DFAState = this.createDFAState(dfa, configs.clone(true));\r\n\t\t// getDecisionState won't return undefined when we request a known valid decision\r\n\t\tlet decisionState: DecisionState = this.atn.getDecisionState(dfa.decision) as DecisionState;\r\n\t\tlet predictedAlt: number = this.getUniqueAlt(configs);\r\n\t\tif (predictedAlt !== ATN.INVALID_ALT_NUMBER) {\r\n\t\t\tnewState.acceptStateInfo = new AcceptStateInfo(predictedAlt);\r\n\t\t} else if (configs.conflictingAlts != null) {\r\n\t\t\tlet conflictingAlts = configs.conflictingAlts;\r\n\t\t\tif (conflictingAlts) {\r\n\t\t\t\tnewState.acceptStateInfo = new AcceptStateInfo(conflictingAlts.nextSetBit(0));\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (newState.isAcceptState && configs.hasSemanticContext) {\r\n\t\t\tthis.predicateDFAState(newState, configs, decisionState.numberOfTransitions);\r\n\t\t}\r\n\r\n\t\tif (!enableDfa) {\r\n\t\t\treturn newState;\r\n\t\t}\r\n\r\n\t\tlet added: DFAState = dfa.addState(newState);\r\n\t\tif (ParserATNSimulator.debug && added === newState) {\r\n\t\t\tconsole.log(\"adding new DFA state: \" + newState);\r\n\t\t}\r\n\t\treturn added;\r\n\t}\r\n\r\n\t@NotNull\r\n\tprotected createDFAState(@NotNull dfa: DFA, @NotNull configs: ATNConfigSet): DFAState {\r\n\t\treturn new DFAState(configs);\r\n\t}\r\n\r\n\tprotected reportAttemptingFullContext(@NotNull dfa: DFA, conflictingAlts: BitSet | undefined, @NotNull conflictState: SimulatorState, startIndex: number, stopIndex: number): void {\r\n\t\tif (ParserATNSimulator.debug || ParserATNSimulator.retry_debug) {\r\n\t\t\tlet interval: Interval = Interval.of(startIndex, stopIndex);\r\n\t\t\tconsole.log(\"reportAttemptingFullContext decision=\" + dfa.decision + \":\" + conflictState.s0.configs +\r\n\t\t\t\t\", input=\" + this._parser.inputStream.getText(interval));\r\n\t\t}\r\n\t\tif (this._parser != null) {\r\n\t\t\tlet listener = this._parser.getErrorListenerDispatch();\r\n\t\t\tif (listener.reportAttemptingFullContext) {\r\n\t\t\t\tlistener.reportAttemptingFullContext(this._parser, dfa, startIndex, stopIndex, conflictingAlts, conflictState);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\tprotected reportContextSensitivity(@NotNull dfa: DFA, prediction: number, @NotNull acceptState: SimulatorState, startIndex: number, stopIndex: number): void {\r\n\t\tif (ParserATNSimulator.debug || ParserATNSimulator.retry_debug) {\r\n\t\t\tlet interval: Interval = Interval.of(startIndex, stopIndex);\r\n\t\t\tconsole.log(\"reportContextSensitivity decision=\" + dfa.decision + \":\" + acceptState.s0.configs +\r\n\t\t\t\t\", input=\" + this._parser.inputStream.getText(interval));\r\n\t\t}\r\n\t\tif (this._parser != null) {\r\n\t\t\tlet listener = this._parser.getErrorListenerDispatch();\r\n\t\t\tif (listener.reportContextSensitivity) {\r\n\t\t\t\tlistener.reportContextSensitivity(this._parser, dfa, startIndex, stopIndex, prediction, acceptState);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t/** If context sensitive parsing, we know it's ambiguity not conflict */\r\n\tprotected reportAmbiguity(\r\n\t\t@NotNull dfa: DFA,\r\n\t\tD: DFAState, // the DFA state from execATN(): void that had SLL conflicts\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\texact: boolean,\r\n\t\t@NotNull ambigAlts: BitSet,\r\n\t\t@NotNull configs: ATNConfigSet) // configs that LL not SLL considered conflicting\r\n\t{\r\n\t\tif (ParserATNSimulator.debug || ParserATNSimulator.retry_debug) {\r\n\t\t\tlet interval: Interval = Interval.of(startIndex, stopIndex);\r\n\t\t\tconsole.log(\"reportAmbiguity \" +\r\n\t\t\t\tambigAlts + \":\" + configs +\r\n\t\t\t\t\", input=\" + this._parser.inputStream.getText(interval));\r\n\t\t}\r\n\t\tif (this._parser != null) {\r\n\t\t\tlet listener = this._parser.getErrorListenerDispatch();\r\n\t\t\tif (listener.reportAmbiguity) {\r\n\t\t\t\tlistener.reportAmbiguity(this._parser, dfa, startIndex, stopIndex, exact, ambigAlts, configs);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\tprotected getReturnState(context: RuleContext): number {\r\n\t\tif (context.isEmpty) {\r\n\t\t\treturn PredictionContext.EMPTY_FULL_STATE_KEY;\r\n\t\t}\r\n\r\n\t\tlet state: ATNState = this.atn.states[context.invokingState];\r\n\t\tlet transition: RuleTransition = state.transition(0) as RuleTransition;\r\n\t\treturn transition.followState.stateNumber;\r\n\t}\r\n\r\n\tprotected skipTailCalls(context: ParserRuleContext): ParserRuleContext {\r\n\t\tif (!this.optimize_tail_calls) {\r\n\t\t\treturn context;\r\n\t\t}\r\n\r\n\t\twhile (!context.isEmpty) {\r\n\t\t\tlet state: ATNState = this.atn.states[context.invokingState];\r\n\t\t\tassert(state.numberOfTransitions === 1 && state.transition(0).serializationType === TransitionType.RULE);\r\n\t\t\tlet transition: RuleTransition = state.transition(0) as RuleTransition;\r\n\t\t\tif (!transition.tailCall) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\t// This method requires that the root ancestor of the ParserRuleContext be empty. If we make it to this\r\n\t\t\t// line, we know the current node is not empty, which means it does have a parent.\r\n\t\t\tcontext = context.parent as ParserRuleContext;\r\n\t\t}\r\n\r\n\t\treturn context;\r\n\t}\r\n\r\n\t/**\r\n\t * @since 4.3\r\n\t */\r\n\tget parser(): Parser {\r\n\t\treturn this._parser;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:34.9572142-07:00\r\n\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { BlockStartState } from \"./BlockStartState\";\r\nimport { Override } from \"../Decorators\";\r\nimport { PlusLoopbackState } from \"./PlusLoopbackState\";\r\n\r\n/** Start of `(A|B|...)+` loop. Technically a decision state, but\r\n * we don't use for code generation; somebody might need it, so I'm defining\r\n * it for completeness. In reality, the {@link PlusLoopbackState} node is the\r\n * real decision-making note for `A+`.\r\n */\r\nexport class PlusBlockStartState extends BlockStartState {\r\n\t// This is always set during ATN deserialization\r\n\tpublic loopBackState!: PlusLoopbackState;\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.PLUS_BLOCK_START;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:35.0257730-07:00\r\n\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { DecisionState } from \"./DecisionState\";\r\nimport { Override } from \"../Decorators\";\r\n\r\n/** Decision state for `A+` and `(A|B)+`. It has two transitions:\r\n * one to the loop back to start of the block and one to exit.\r\n */\r\nexport class PlusLoopbackState extends DecisionState {\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.PLUS_LOOP_BACK;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:35.0994191-07:00\r\n\r\nimport { AbstractPredicateTransition } from \"./AbstractPredicateTransition\";\r\nimport { ATNState } from \"./ATNState\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { SemanticContext } from \"./SemanticContext\";\r\nimport { TransitionType } from \"./TransitionType\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class PrecedencePredicateTransition extends AbstractPredicateTransition {\r\n\tpublic precedence: number;\r\n\r\n\tconstructor( @NotNull target: ATNState, precedence: number) {\r\n\t\tsuper(target);\r\n\t\tthis.precedence = precedence;\r\n\t}\r\n\r\n\t@Override\r\n\tget serializationType(): TransitionType {\r\n\t\treturn TransitionType.PRECEDENCE;\r\n\t}\r\n\r\n\t@Override\r\n\tget isEpsilon(): boolean {\r\n\t\treturn true;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic matches(symbol: number, minVocabSymbol: number, maxVocabSymbol: number): boolean {\r\n\t\treturn false;\r\n\t}\r\n\r\n\tget predicate(): SemanticContext.PrecedencePredicate {\r\n\t\treturn new SemanticContext.PrecedencePredicate(this.precedence);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn this.precedence + \" >= _p\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:36.5959980-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { IntervalSet } from \"../misc/IntervalSet\";\r\nimport { Override, NotNull } from \"../Decorators\";\r\nimport { Transition } from \"./Transition\";\r\nimport { TransitionType } from \"./TransitionType\";\r\n\r\nexport class RangeTransition extends Transition {\r\n\tpublic from: number;\r\n\tpublic to: number;\r\n\r\n\tconstructor(@NotNull target: ATNState, from: number, to: number) {\r\n\t\tsuper(target);\r\n\t\tthis.from = from;\r\n\t\tthis.to = to;\r\n\t}\r\n\r\n\t@Override\r\n\tget serializationType(): TransitionType {\r\n\t\treturn TransitionType.RANGE;\r\n\t}\r\n\r\n\t@Override\r\n\t@NotNull\r\n\tget label(): IntervalSet {\r\n\t\treturn IntervalSet.of(this.from, this.to);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic matches(symbol: number, minVocabSymbol: number, maxVocabSymbol: number): boolean {\r\n\t\treturn symbol >= this.from && symbol <= this.to;\r\n\t}\r\n\r\n\t@Override\r\n\t@NotNull\r\n\tpublic toString(): string {\r\n\t\treturn \"'\" + String.fromCodePoint(this.from) + \"'..'\" + String.fromCodePoint(this.to) + \"'\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:36.6806851-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { Override } from \"../Decorators\";\r\nimport { RuleStopState } from \"./RuleStopState\";\r\n\r\nexport class RuleStartState extends ATNState {\r\n\t// This is always set during ATN deserialization\r\n\tpublic stopState!: RuleStopState;\r\n\tpublic isPrecedenceRule: boolean = false;\r\n\tpublic leftFactored: boolean = false;\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.RULE_START;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:37.5657409-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { BlockStartState } from \"./BlockStartState\";\r\nimport { Override } from \"../Decorators\";\r\n\r\n/** The block that begins a closure loop. */\r\nexport class StarBlockStartState extends BlockStartState {\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.STAR_BLOCK_START;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:37.6368726-07:00\r\n\r\nimport { ATNState } from \"./ATNState\";\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { Override } from \"../Decorators\";\r\nimport { StarLoopEntryState } from \"./StarLoopEntryState\";\r\n\r\nexport class StarLoopbackState extends ATNState {\r\n\tget loopEntryState(): StarLoopEntryState {\r\n\t\treturn this.transition(0).target as StarLoopEntryState;\r\n\t}\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.STAR_LOOP_BACK;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:37.7814046-07:00\r\n\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { DecisionState } from \"./DecisionState\";\r\nimport { Override } from \"../Decorators\";\r\n\r\n/** The Tokens rule start state linking to each lexer rule start state */\r\nexport class TokensStartState extends DecisionState {\r\n\r\n\t@Override\r\n\tget stateType(): ATNStateType {\r\n\t\treturn ATNStateType.TOKEN_START;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nimport { Equatable } from \"./Stubs\";\r\nimport { MurmurHash } from \"./MurmurHash\";\r\n\r\nexport class UUID implements Equatable {\r\n\tprivate readonly data: Uint32Array;\r\n\r\n\tconstructor(mostSigBits: number, moreSigBits: number, lessSigBits: number, leastSigBits: number) {\r\n\t\tthis.data = new Uint32Array(4);\r\n\t\tthis.data[0] = mostSigBits;\r\n\t\tthis.data[1] = moreSigBits;\r\n\t\tthis.data[2] = lessSigBits;\r\n\t\tthis.data[3] = leastSigBits;\r\n\t}\r\n\r\n\tpublic static fromString(data: string): UUID {\r\n\t\tif (!/^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$/.test(data)) {\r\n\t\t\tthrow new Error(\"Incorrectly formatted UUID\");\r\n\t\t}\r\n\r\n\t\tlet segments = data.split(\"-\");\r\n\t\tlet mostSigBits = parseInt(segments[0], 16);\r\n\t\tlet moreSigBits = ((parseInt(segments[1], 16) << 16) >>> 0) + parseInt(segments[2], 16);\r\n\t\tlet lessSigBits = ((parseInt(segments[3], 16) << 16) >>> 0) + parseInt(segments[4].substr(0, 4), 16);\r\n\t\tlet leastSigBits = parseInt(segments[4].substr(-8), 16);\r\n\t\treturn new UUID(mostSigBits, moreSigBits, lessSigBits, leastSigBits);\r\n\t}\r\n\r\n\tpublic hashCode(): number {\r\n\t\treturn MurmurHash.hashCode([this.data[0], this.data[1], this.data[2], this.data[3]]);\r\n\t}\r\n\r\n\tpublic equals(obj: any): boolean {\r\n\t\tif (obj === this) {\r\n\t\t\treturn true;\r\n\t\t} else if (!(obj instanceof UUID)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn this.data[0] === obj.data[0]\r\n\t\t\t&& this.data[1] === obj.data[1]\r\n\t\t\t&& this.data[2] === obj.data[2]\r\n\t\t\t&& this.data[3] === obj.data[3];\r\n\t}\r\n\r\n\tpublic toString(): string {\r\n\t\treturn (\"00000000\" + this.data[0].toString(16)).substr(-8)\r\n\t\t\t+ \"-\" + (\"0000\" + (this.data[1] >>> 16).toString(16)).substr(-4)\r\n\t\t\t+ \"-\" + (\"0000\" + this.data[1].toString(16)).substr(-4)\r\n\t\t\t+ \"-\" + (\"0000\" + (this.data[2] >>> 16).toString(16)).substr(-4)\r\n\t\t\t+ \"-\" + (\"0000\" + this.data[2].toString(16)).substr(-4)\r\n\t\t\t+ (\"00000000\" + this.data[3].toString(16)).substr(-8);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:25.9683447-07:00\r\n\r\nimport { ActionTransition } from \"./ActionTransition\";\r\nimport { Array2DHashSet } from \"../misc/Array2DHashSet\";\r\nimport { ATN } from \"./ATN\";\r\nimport { ATNDeserializationOptions } from \"./ATNDeserializationOptions\";\r\nimport { ATNState } from \"./ATNState\";\r\nimport { ATNStateType } from \"./ATNStateType\";\r\nimport { ATNType } from \"./ATNType\";\r\nimport { AtomTransition } from \"./AtomTransition\";\r\nimport { BasicBlockStartState } from \"./BasicBlockStartState\";\r\nimport { BasicState } from \"./BasicState\";\r\nimport { BitSet } from \"../misc/BitSet\";\r\nimport { BlockEndState } from \"./BlockEndState\";\r\nimport { BlockStartState } from \"./BlockStartState\";\r\nimport { DecisionState } from \"./DecisionState\";\r\nimport { DFA } from \"../dfa/DFA\";\r\nimport { EpsilonTransition } from \"./EpsilonTransition\";\r\nimport { Interval } from \"../misc/Interval\";\r\nimport { IntervalSet } from \"../misc/IntervalSet\";\r\nimport { InvalidState } from \"./InvalidState\";\r\nimport { LexerAction } from \"./LexerAction\";\r\nimport { LexerActionType } from \"./LexerActionType\";\r\nimport { LexerChannelAction } from \"./LexerChannelAction\";\r\nimport { LexerCustomAction } from \"./LexerCustomAction\";\r\nimport { LexerModeAction } from \"./LexerModeAction\";\r\nimport { LexerMoreAction } from \"./LexerMoreAction\";\r\nimport { LexerPopModeAction } from \"./LexerPopModeAction\";\r\nimport { LexerPushModeAction } from \"./LexerPushModeAction\";\r\nimport { LexerSkipAction } from \"./LexerSkipAction\";\r\nimport { LexerTypeAction } from \"./LexerTypeAction\";\r\nimport { LoopEndState } from \"./LoopEndState\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { NotSetTransition } from \"./NotSetTransition\";\r\nimport { ParserATNSimulator } from \"./ParserATNSimulator\";\r\nimport { PlusBlockStartState } from \"./PlusBlockStartState\";\r\nimport { PlusLoopbackState } from \"./PlusLoopbackState\";\r\nimport { PrecedencePredicateTransition } from \"./PrecedencePredicateTransition\";\r\nimport { PredicateTransition } from \"./PredicateTransition\";\r\nimport { RangeTransition } from \"./RangeTransition\";\r\nimport { RuleStartState } from \"./RuleStartState\";\r\nimport { RuleStopState } from \"./RuleStopState\";\r\nimport { RuleTransition } from \"./RuleTransition\";\r\nimport { SetTransition } from \"./SetTransition\";\r\nimport { StarBlockStartState } from \"./StarBlockStartState\";\r\nimport { StarLoopbackState } from \"./StarLoopbackState\";\r\nimport { StarLoopEntryState } from \"./StarLoopEntryState\";\r\nimport { Token } from \"../Token\";\r\nimport { TokensStartState } from \"./TokensStartState\";\r\nimport { Transition } from \"./Transition\";\r\nimport { TransitionType } from \"./TransitionType\";\r\nimport { UUID } from \"../misc/UUID\";\r\nimport { WildcardTransition } from \"./WildcardTransition\";\r\n\r\ninterface UnicodeDeserializer {\r\n\t// Wrapper for readInt() or readInt32()\r\n\treadUnicode(data: Uint16Array, p: number): number;\r\n\r\n\t// Work around Java not allowing mutation of captured variables\r\n\t// by returning amount by which to increment p after each read\r\n\treadonly size: number;\r\n}\r\n\r\nconst enum UnicodeDeserializingMode {\r\n\tUNICODE_BMP,\r\n\tUNICODE_SMP,\r\n}\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class ATNDeserializer {\r\n\tstatic get SERIALIZED_VERSION(): number {\r\n\t\t/* This value should never change. Updates following this version are\r\n\t\t * reflected as change in the unique ID SERIALIZED_UUID.\r\n\t\t */\r\n\t\treturn 3;\r\n\t}\r\n\r\n\t/* WARNING: DO NOT MERGE THESE LINES. If UUIDs differ during a merge,\r\n\t * resolve the conflict by generating a new ID!\r\n\t */\r\n\r\n\t/**\r\n\t * This is the earliest supported serialized UUID.\r\n\t */\r\n\tprivate static readonly BASE_SERIALIZED_UUID: UUID = UUID.fromString(\"E4178468-DF95-44D0-AD87-F22A5D5FB6D3\");\r\n\t/**\r\n\t * This UUID indicates an extension of {@link #ADDED_PRECEDENCE_TRANSITIONS}\r\n\t * for the addition of lexer actions encoded as a sequence of\r\n\t * {@link LexerAction} instances.\r\n\t */\r\n\tprivate static readonly ADDED_LEXER_ACTIONS: UUID = UUID.fromString(\"AB35191A-1603-487E-B75A-479B831EAF6D\");\r\n\t/**\r\n\t * This UUID indicates the serialized ATN contains two sets of\r\n\t * IntervalSets, where the second set's values are encoded as\r\n\t * 32-bit integers to support the full Unicode SMP range up to U+10FFFF.\r\n\t */\r\n\tprivate static readonly ADDED_UNICODE_SMP: UUID = UUID.fromString(\"C23FEA89-0605-4f51-AFB8-058BCAB8C91B\");\r\n\t/**\r\n\t * This list contains all of the currently supported UUIDs, ordered by when\r\n\t * the feature first appeared in this branch.\r\n\t */\r\n\tprivate static readonly SUPPORTED_UUIDS: UUID[] = [\r\n\t\tATNDeserializer.BASE_SERIALIZED_UUID,\r\n\t\tATNDeserializer.ADDED_LEXER_ACTIONS,\r\n\t\tATNDeserializer.ADDED_UNICODE_SMP,\r\n\t];\r\n\r\n\t/**\r\n\t * This is the current serialized UUID.\r\n\t */\r\n\tprivate static readonly SERIALIZED_UUID: UUID = ATNDeserializer.ADDED_UNICODE_SMP;\r\n\r\n\t@NotNull\r\n\tprivate readonly deserializationOptions: ATNDeserializationOptions;\r\n\r\n\tconstructor(deserializationOptions?: ATNDeserializationOptions) {\r\n\t\tif (deserializationOptions === undefined) {\r\n\t\t\tdeserializationOptions = ATNDeserializationOptions.defaultOptions;\r\n\t\t}\r\n\r\n\t\tthis.deserializationOptions = deserializationOptions;\r\n\t}\r\n\r\n\t/**\r\n\t * Determines if a particular serialized representation of an ATN supports\r\n\t * a particular feature, identified by the {@link UUID} used for serializing\r\n\t * the ATN at the time the feature was first introduced.\r\n\t *\r\n\t * @param feature The {@link UUID} marking the first time the feature was\r\n\t * supported in the serialized ATN.\r\n\t * @param actualUuid The {@link UUID} of the actual serialized ATN which is\r\n\t * currently being deserialized.\r\n\t * @returns `true` if the `actualUuid` value represents a\r\n\t * serialized ATN at or after the feature identified by `feature` was\r\n\t * introduced; otherwise, `false`.\r\n\t */\r\n\tprotected static isFeatureSupported(feature: UUID, actualUuid: UUID): boolean {\r\n\t\tlet featureIndex: number = ATNDeserializer.SUPPORTED_UUIDS.findIndex((e) => e.equals(feature));\r\n\t\tif (featureIndex < 0) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\treturn ATNDeserializer.SUPPORTED_UUIDS.findIndex((e) => e.equals(actualUuid)) >= featureIndex;\r\n\t}\r\n\r\n\tprivate static getUnicodeDeserializer(mode: UnicodeDeserializingMode): UnicodeDeserializer {\r\n\t\tif (mode === UnicodeDeserializingMode.UNICODE_BMP) {\r\n\t\t\treturn {\r\n\t\t\t\treadUnicode: (data: Uint16Array, p: number): number => {\r\n\t\t\t\t\treturn ATNDeserializer.toInt(data[p]);\r\n\t\t\t\t},\r\n\t\t\t\tsize: 1,\r\n\t\t\t};\r\n\t\t} else {\r\n\t\t\treturn {\r\n\t\t\t\treadUnicode: (data: Uint16Array, p: number): number => {\r\n\t\t\t\t\treturn ATNDeserializer.toInt32(data, p);\r\n\t\t\t\t},\r\n\t\t\t\tsize: 2,\r\n\t\t\t};\r\n\t\t}\r\n\t}\r\n\r\n\tpublic deserialize(@NotNull data: Uint16Array): ATN {\r\n\t\tdata = data.slice(0);\r\n\r\n\t\t// Each Uint16 value in data is shifted by +2 at the entry to this method. This is an encoding optimization\r\n\t\t// targeting the serialized values 0 and -1 (serialized to 0xFFFF), each of which are very common in the\r\n\t\t// serialized form of the ATN. In the modified UTF-8 that Java uses for compiled string literals, these two\r\n\t\t// character values have multi-byte forms. By shifting each value by +2, they become characters 2 and 1 prior to\r\n\t\t// writing the string, each of which have single-byte representations. Since the shift occurs in the tool during\r\n\t\t// ATN serialization, each target is responsible for adjusting the values during deserialization.\r\n\t\t//\r\n\t\t// As a special case, note that the first element of data is not adjusted because it contains the major version\r\n\t\t// number of the serialized ATN, which was fixed at 3 at the time the value shifting was implemented.\r\n\t\tfor (let i = 1; i < data.length; i++) {\r\n\t\t\tdata[i] = (data[i] - 2) & 0xFFFF;\r\n\t\t}\r\n\r\n\t\tlet p: number = 0;\r\n\t\tlet version: number = ATNDeserializer.toInt(data[p++]);\r\n\t\tif (version !== ATNDeserializer.SERIALIZED_VERSION) {\r\n\t\t\tlet reason = `Could not deserialize ATN with version ${version} (expected ${ATNDeserializer.SERIALIZED_VERSION}).`;\r\n\t\t\tthrow new Error(reason);\r\n\t\t}\r\n\r\n\t\tlet uuid: UUID = ATNDeserializer.toUUID(data, p);\r\n\t\tp += 8;\r\n\t\tif (ATNDeserializer.SUPPORTED_UUIDS.findIndex((e) => e.equals(uuid)) < 0) {\r\n\t\t\tlet reason = `Could not deserialize ATN with UUID ${uuid} (expected ${ATNDeserializer.SERIALIZED_UUID} or a legacy UUID).`;\r\n\t\t\tthrow new Error(reason);\r\n\t\t}\r\n\r\n\t\tlet supportsLexerActions: boolean = ATNDeserializer.isFeatureSupported(ATNDeserializer.ADDED_LEXER_ACTIONS, uuid);\r\n\r\n\t\tlet grammarType: ATNType = ATNDeserializer.toInt(data[p++]);\r\n\t\tlet maxTokenType: number = ATNDeserializer.toInt(data[p++]);\r\n\t\tlet atn: ATN = new ATN(grammarType, maxTokenType);\r\n\r\n\t\t//\r\n\t\t// STATES\r\n\t\t//\r\n\t\tlet loopBackStateNumbers: Array<[LoopEndState, number]> = [];\r\n\t\tlet endStateNumbers: Array<[BlockStartState, number]> = [];\r\n\t\tlet nstates: number = ATNDeserializer.toInt(data[p++]);\r\n\t\tfor (let i = 0; i < nstates; i++) {\r\n\t\t\tlet stype: ATNStateType = ATNDeserializer.toInt(data[p++]);\r\n\t\t\t// ignore bad type of states\r\n\t\t\tif (stype === ATNStateType.INVALID_TYPE) {\r\n\t\t\t\tatn.addState(new InvalidState());\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tlet ruleIndex: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\tif (ruleIndex === 0xFFFF) {\r\n\t\t\t\truleIndex = -1;\r\n\t\t\t}\r\n\r\n\t\t\tlet s: ATNState = this.stateFactory(stype, ruleIndex);\r\n\t\t\tif (stype === ATNStateType.LOOP_END) { // special case\r\n\t\t\t\tlet loopBackStateNumber: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\t\tloopBackStateNumbers.push([s as LoopEndState, loopBackStateNumber]);\r\n\t\t\t}\r\n\t\t\telse if (s instanceof BlockStartState) {\r\n\t\t\t\tlet endStateNumber: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\t\tendStateNumbers.push([s, endStateNumber]);\r\n\t\t\t}\r\n\t\t\tatn.addState(s);\r\n\t\t}\r\n\r\n\t\t// delay the assignment of loop back and end states until we know all the state instances have been initialized\r\n\t\tfor (let pair of loopBackStateNumbers) {\r\n\t\t\tpair[0].loopBackState = atn.states[pair[1]];\r\n\t\t}\r\n\r\n\t\tfor (let pair of endStateNumbers) {\r\n\t\t\tpair[0].endState = atn.states[pair[1]] as BlockEndState;\r\n\t\t}\r\n\r\n\t\tlet numNonGreedyStates: number = ATNDeserializer.toInt(data[p++]);\r\n\t\tfor (let i = 0; i < numNonGreedyStates; i++) {\r\n\t\t\tlet stateNumber: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\t(atn.states[stateNumber] as DecisionState).nonGreedy = true;\r\n\t\t}\r\n\r\n\t\tlet numSllDecisions: number = ATNDeserializer.toInt(data[p++]);\r\n\t\tfor (let i = 0; i < numSllDecisions; i++) {\r\n\t\t\tlet stateNumber: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\t(atn.states[stateNumber] as DecisionState).sll = true;\r\n\t\t}\r\n\r\n\t\tlet numPrecedenceStates: number = ATNDeserializer.toInt(data[p++]);\r\n\t\tfor (let i = 0; i < numPrecedenceStates; i++) {\r\n\t\t\tlet stateNumber: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\t(atn.states[stateNumber] as RuleStartState).isPrecedenceRule = true;\r\n\t\t}\r\n\r\n\t\t//\r\n\t\t// RULES\r\n\t\t//\r\n\t\tlet nrules: number = ATNDeserializer.toInt(data[p++]);\r\n\t\tif (atn.grammarType === ATNType.LEXER) {\r\n\t\t\tatn.ruleToTokenType = new Int32Array(nrules);\r\n\t\t}\r\n\r\n\t\tatn.ruleToStartState = new Array(nrules);\r\n\t\tfor (let i = 0; i < nrules; i++) {\r\n\t\t\tlet s: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\tlet startState: RuleStartState = atn.states[s] as RuleStartState;\r\n\t\t\tstartState.leftFactored = ATNDeserializer.toInt(data[p++]) !== 0;\r\n\t\t\tatn.ruleToStartState[i] = startState;\r\n\t\t\tif (atn.grammarType === ATNType.LEXER) {\r\n\t\t\t\tlet tokenType: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\t\tif (tokenType === 0xFFFF) {\r\n\t\t\t\t\ttokenType = Token.EOF;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tatn.ruleToTokenType[i] = tokenType;\r\n\r\n\t\t\t\tif (!ATNDeserializer.isFeatureSupported(ATNDeserializer.ADDED_LEXER_ACTIONS, uuid)) {\r\n\t\t\t\t\t// this piece of unused metadata was serialized prior to the\r\n\t\t\t\t\t// addition of LexerAction\r\n\t\t\t\t\tlet actionIndexIgnored: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\t\t\tif (actionIndexIgnored === 0xFFFF) {\r\n\t\t\t\t\t\tactionIndexIgnored = -1;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tatn.ruleToStopState = new Array(nrules);\r\n\t\tfor (let state of atn.states) {\r\n\t\t\tif (!(state instanceof RuleStopState)) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tatn.ruleToStopState[state.ruleIndex] = state;\r\n\t\t\tatn.ruleToStartState[state.ruleIndex].stopState = state;\r\n\t\t}\r\n\r\n\t\t//\r\n\t\t// MODES\r\n\t\t//\r\n\t\tlet nmodes: number = ATNDeserializer.toInt(data[p++]);\r\n\t\tfor (let i = 0; i < nmodes; i++) {\r\n\t\t\tlet s: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\tatn.modeToStartState.push(atn.states[s] as TokensStartState);\r\n\t\t}\r\n\r\n\t\tatn.modeToDFA = new Array(nmodes);\r\n\t\tfor (let i = 0; i < nmodes; i++) {\r\n\t\t\tatn.modeToDFA[i] = new DFA(atn.modeToStartState[i]);\r\n\t\t}\r\n\r\n\t\t//\r\n\t\t// SETS\r\n\t\t//\r\n\t\tlet sets: IntervalSet[] = [];\r\n\r\n\t\t// First, read all sets with 16-bit Unicode code points <= U+FFFF.\r\n\t\tp = this.deserializeSets(data, p, sets, ATNDeserializer.getUnicodeDeserializer(UnicodeDeserializingMode.UNICODE_BMP));\r\n\r\n\t\t// Next, if the ATN was serialized with the Unicode SMP feature,\r\n\t\t// deserialize sets with 32-bit arguments <= U+10FFFF.\r\n\t\tif (ATNDeserializer.isFeatureSupported(ATNDeserializer.ADDED_UNICODE_SMP, uuid)) {\r\n\t\t\tp = this.deserializeSets(data, p, sets, ATNDeserializer.getUnicodeDeserializer(UnicodeDeserializingMode.UNICODE_SMP));\r\n\t\t}\r\n\r\n\t\t//\r\n\t\t// EDGES\r\n\t\t//\r\n\t\tlet nedges: number = ATNDeserializer.toInt(data[p++]);\r\n\t\tfor (let i = 0; i < nedges; i++) {\r\n\t\t\tlet src: number = ATNDeserializer.toInt(data[p]);\r\n\t\t\tlet trg: number = ATNDeserializer.toInt(data[p + 1]);\r\n\t\t\tlet ttype: number = ATNDeserializer.toInt(data[p + 2]);\r\n\t\t\tlet arg1: number = ATNDeserializer.toInt(data[p + 3]);\r\n\t\t\tlet arg2: number = ATNDeserializer.toInt(data[p + 4]);\r\n\t\t\tlet arg3: number = ATNDeserializer.toInt(data[p + 5]);\r\n\t\t\tlet trans: Transition = this.edgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets);\r\n\t\t\t// console.log(`EDGE ${trans.constructor.name} ${src}->${trg} ${Transition.serializationNames[ttype]} ${arg1},${arg2},${arg3}`);\r\n\t\t\tlet srcState: ATNState = atn.states[src];\r\n\t\t\tsrcState.addTransition(trans);\r\n\t\t\tp += 6;\r\n\t\t}\r\n\r\n\t\t// edges for rule stop states can be derived, so they aren't serialized\r\n\t\tinterface T { stopState: number; returnState: number; outermostPrecedenceReturn: number; }\r\n\t\tlet returnTransitionsSet = new Array2DHashSet({\r\n\t\t\thashCode: (o: T) => o.stopState ^ o.returnState ^ o.outermostPrecedenceReturn,\r\n\r\n\t\t\tequals: (a: T, b: T): boolean => {\r\n\t\t\t\treturn a.stopState === b.stopState\r\n\t\t\t\t\t&& a.returnState === b.returnState\r\n\t\t\t\t\t&& a.outermostPrecedenceReturn === b.outermostPrecedenceReturn;\r\n\t\t\t},\r\n\t\t});\r\n\t\tlet returnTransitions: T[] = [];\r\n\t\tfor (let state of atn.states) {\r\n\t\t\tlet returningToLeftFactored: boolean = state.ruleIndex >= 0 && atn.ruleToStartState[state.ruleIndex].leftFactored;\r\n\t\t\tfor (let i = 0; i < state.numberOfTransitions; i++) {\r\n\t\t\t\tlet t: Transition = state.transition(i);\r\n\t\t\t\tif (!(t instanceof RuleTransition)) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet ruleTransition: RuleTransition = t;\r\n\t\t\t\tlet returningFromLeftFactored: boolean = atn.ruleToStartState[ruleTransition.target.ruleIndex].leftFactored;\r\n\t\t\t\tif (!returningFromLeftFactored && returningToLeftFactored) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet outermostPrecedenceReturn: number = -1;\r\n\t\t\t\tif (atn.ruleToStartState[ruleTransition.target.ruleIndex].isPrecedenceRule) {\r\n\t\t\t\t\tif (ruleTransition.precedence === 0) {\r\n\t\t\t\t\t\toutermostPrecedenceReturn = ruleTransition.target.ruleIndex;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet current = { stopState: ruleTransition.target.ruleIndex, returnState: ruleTransition.followState.stateNumber, outermostPrecedenceReturn };\r\n\t\t\t\tif (returnTransitionsSet.add(current)) {\r\n\t\t\t\t\treturnTransitions.push(current);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// Add all elements from returnTransitions to the ATN\r\n\t\tfor (let returnTransition of returnTransitions) {\r\n\t\t\tlet transition = new EpsilonTransition(atn.states[returnTransition.returnState], returnTransition.outermostPrecedenceReturn);\r\n\t\t\tatn.ruleToStopState[returnTransition.stopState].addTransition(transition);\r\n\t\t}\r\n\r\n\t\tfor (let state of atn.states) {\r\n\t\t\tif (state instanceof BlockStartState) {\r\n\t\t\t\t// we need to know the end state to set its start state\r\n\t\t\t\tif (state.endState === undefined) {\r\n\t\t\t\t\tthrow new Error(\"IllegalStateException\");\r\n\t\t\t\t}\r\n\r\n\t\t\t\t// block end states can only be associated to a single block start state\r\n\t\t\t\tif (state.endState.startState !== undefined) {\r\n\t\t\t\t\tthrow new Error(\"IllegalStateException\");\r\n\t\t\t\t}\r\n\r\n\t\t\t\tstate.endState.startState = state;\r\n\t\t\t}\r\n\r\n\t\t\tif (state instanceof PlusLoopbackState) {\r\n\t\t\t\tlet loopbackState: PlusLoopbackState = state;\r\n\t\t\t\tfor (let i = 0; i < loopbackState.numberOfTransitions; i++) {\r\n\t\t\t\t\tlet target: ATNState = loopbackState.transition(i).target;\r\n\t\t\t\t\tif (target instanceof PlusBlockStartState) {\r\n\t\t\t\t\t\ttarget.loopBackState = loopbackState;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\telse if (state instanceof StarLoopbackState) {\r\n\t\t\t\tlet loopbackState: StarLoopbackState = state;\r\n\t\t\t\tfor (let i = 0; i < loopbackState.numberOfTransitions; i++) {\r\n\t\t\t\t\tlet target: ATNState = loopbackState.transition(i).target;\r\n\t\t\t\t\tif (target instanceof StarLoopEntryState) {\r\n\t\t\t\t\t\ttarget.loopBackState = loopbackState;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t//\r\n\t\t// DECISIONS\r\n\t\t//\r\n\t\tlet ndecisions: number = ATNDeserializer.toInt(data[p++]);\r\n\t\tfor (let i = 1; i <= ndecisions; i++) {\r\n\t\t\tlet s: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\tlet decState: DecisionState = atn.states[s] as DecisionState;\r\n\t\t\tatn.decisionToState.push(decState);\r\n\t\t\tdecState.decision = i - 1;\r\n\t\t}\r\n\r\n\t\t//\r\n\t\t// LEXER ACTIONS\r\n\t\t//\r\n\t\tif (atn.grammarType === ATNType.LEXER) {\r\n\t\t\tif (supportsLexerActions) {\r\n\t\t\t\tatn.lexerActions = new Array(ATNDeserializer.toInt(data[p++]));\r\n\t\t\t\tfor (let i = 0; i < atn.lexerActions.length; i++) {\r\n\t\t\t\t\tlet actionType: LexerActionType = ATNDeserializer.toInt(data[p++]);\r\n\t\t\t\t\tlet data1: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\t\t\tif (data1 === 0xFFFF) {\r\n\t\t\t\t\t\tdata1 = -1;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tlet data2: number = ATNDeserializer.toInt(data[p++]);\r\n\t\t\t\t\tif (data2 === 0xFFFF) {\r\n\t\t\t\t\t\tdata2 = -1;\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tlet lexerAction: LexerAction = this.lexerActionFactory(actionType, data1, data2);\r\n\r\n\t\t\t\t\tatn.lexerActions[i] = lexerAction;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\t// for compatibility with older serialized ATNs, convert the old\r\n\t\t\t\t// serialized action index for action transitions to the new\r\n\t\t\t\t// form, which is the index of a LexerCustomAction\r\n\t\t\t\tlet legacyLexerActions: LexerAction[] = [];\r\n\t\t\t\tfor (let state of atn.states) {\r\n\t\t\t\t\tfor (let i = 0; i < state.numberOfTransitions; i++) {\r\n\t\t\t\t\t\tlet transition: Transition = state.transition(i);\r\n\t\t\t\t\t\tif (!(transition instanceof ActionTransition)) {\r\n\t\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tlet ruleIndex: number = transition.ruleIndex;\r\n\t\t\t\t\t\tlet actionIndex: number = transition.actionIndex;\r\n\t\t\t\t\t\tlet lexerAction: LexerCustomAction = new LexerCustomAction(ruleIndex, actionIndex);\r\n\t\t\t\t\t\tstate.setTransition(i, new ActionTransition(transition.target, ruleIndex, legacyLexerActions.length, false));\r\n\t\t\t\t\t\tlegacyLexerActions.push(lexerAction);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tatn.lexerActions = legacyLexerActions;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tthis.markPrecedenceDecisions(atn);\r\n\r\n\t\tatn.decisionToDFA = new Array(ndecisions);\r\n\t\tfor (let i = 0; i < ndecisions; i++) {\r\n\t\t\tatn.decisionToDFA[i] = new DFA(atn.decisionToState[i], i);\r\n\t\t}\r\n\r\n\t\tif (this.deserializationOptions.isVerifyATN) {\r\n\t\t\tthis.verifyATN(atn);\r\n\t\t}\r\n\r\n\t\tif (this.deserializationOptions.isGenerateRuleBypassTransitions && atn.grammarType === ATNType.PARSER) {\r\n\t\t\tatn.ruleToTokenType = new Int32Array(atn.ruleToStartState.length);\r\n\t\t\tfor (let i = 0; i < atn.ruleToStartState.length; i++) {\r\n\t\t\t\tatn.ruleToTokenType[i] = atn.maxTokenType + i + 1;\r\n\t\t\t}\r\n\r\n\t\t\tfor (let i = 0; i < atn.ruleToStartState.length; i++) {\r\n\t\t\t\tlet bypassStart: BasicBlockStartState = new BasicBlockStartState();\r\n\t\t\t\tbypassStart.ruleIndex = i;\r\n\t\t\t\tatn.addState(bypassStart);\r\n\r\n\t\t\t\tlet bypassStop: BlockEndState = new BlockEndState();\r\n\t\t\t\tbypassStop.ruleIndex = i;\r\n\t\t\t\tatn.addState(bypassStop);\r\n\r\n\t\t\t\tbypassStart.endState = bypassStop;\r\n\t\t\t\tatn.defineDecisionState(bypassStart);\r\n\r\n\t\t\t\tbypassStop.startState = bypassStart;\r\n\r\n\t\t\t\tlet endState: ATNState | undefined;\r\n\t\t\t\tlet excludeTransition: Transition | undefined;\r\n\t\t\t\tif (atn.ruleToStartState[i].isPrecedenceRule) {\r\n\t\t\t\t\t// wrap from the beginning of the rule to the StarLoopEntryState\r\n\t\t\t\t\tendState = undefined;\r\n\t\t\t\t\tfor (let state of atn.states) {\r\n\t\t\t\t\t\tif (state.ruleIndex !== i) {\r\n\t\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tif (!(state instanceof StarLoopEntryState)) {\r\n\t\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tlet maybeLoopEndState: ATNState = state.transition(state.numberOfTransitions - 1).target;\r\n\t\t\t\t\t\tif (!(maybeLoopEndState instanceof LoopEndState)) {\r\n\t\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tif (maybeLoopEndState.epsilonOnlyTransitions && maybeLoopEndState.transition(0).target instanceof RuleStopState) {\r\n\t\t\t\t\t\t\tendState = state;\r\n\t\t\t\t\t\t\tbreak;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tif (!endState) {\r\n\t\t\t\t\t\tthrow new Error(\"Couldn't identify final state of the precedence rule prefix section.\");\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\texcludeTransition = (endState as StarLoopEntryState).loopBackState.transition(0);\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\tendState = atn.ruleToStopState[i];\r\n\t\t\t\t}\r\n\r\n\t\t\t\t// all non-excluded transitions that currently target end state need to target blockEnd instead\r\n\t\t\t\tfor (let state of atn.states) {\r\n\t\t\t\t\tfor (let i = 0; i < state.numberOfTransitions; i++) {\r\n\t\t\t\t\t\tlet transition = state.transition(i);\r\n\t\t\t\t\t\tif (transition === excludeTransition) {\r\n\t\t\t\t\t\t\tcontinue;\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tif (transition.target === endState) {\r\n\t\t\t\t\t\t\ttransition.target = bypassStop;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\t// all transitions leaving the rule start state need to leave blockStart instead\r\n\t\t\t\twhile (atn.ruleToStartState[i].numberOfTransitions > 0) {\r\n\t\t\t\t\tlet transition: Transition = atn.ruleToStartState[i].removeTransition(atn.ruleToStartState[i].numberOfTransitions - 1);\r\n\t\t\t\t\tbypassStart.addTransition(transition);\r\n\t\t\t\t}\r\n\r\n\t\t\t\t// link the new states\r\n\t\t\t\tatn.ruleToStartState[i].addTransition(new EpsilonTransition(bypassStart));\r\n\t\t\t\tbypassStop.addTransition(new EpsilonTransition(endState));\r\n\r\n\t\t\t\tlet matchState: ATNState = new BasicState();\r\n\t\t\t\tatn.addState(matchState);\r\n\t\t\t\tmatchState.addTransition(new AtomTransition(bypassStop, atn.ruleToTokenType[i]));\r\n\t\t\t\tbypassStart.addTransition(new EpsilonTransition(matchState));\r\n\t\t\t}\r\n\r\n\t\t\tif (this.deserializationOptions.isVerifyATN) {\r\n\t\t\t\t// reverify after modification\r\n\t\t\t\tthis.verifyATN(atn);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (this.deserializationOptions.isOptimize) {\r\n\t\t\twhile (true) {\r\n\t\t\t\tlet optimizationCount: number = 0;\r\n\t\t\t\toptimizationCount += ATNDeserializer.inlineSetRules(atn);\r\n\t\t\t\toptimizationCount += ATNDeserializer.combineChainedEpsilons(atn);\r\n\t\t\t\tlet preserveOrder: boolean = atn.grammarType === ATNType.LEXER;\r\n\t\t\t\toptimizationCount += ATNDeserializer.optimizeSets(atn, preserveOrder);\r\n\t\t\t\tif (optimizationCount === 0) {\r\n\t\t\t\t\tbreak;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (this.deserializationOptions.isVerifyATN) {\r\n\t\t\t\t// reverify after modification\r\n\t\t\t\tthis.verifyATN(atn);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tATNDeserializer.identifyTailCalls(atn);\r\n\r\n\t\treturn atn;\r\n\t}\r\n\r\n\tprivate deserializeSets(data: Uint16Array, p: number, sets: IntervalSet[], unicodeDeserializer: UnicodeDeserializer): number {\r\n\t\tlet nsets: number = ATNDeserializer.toInt(data[p++]);\r\n\t\tfor (let i = 0; i < nsets; i++) {\r\n\t\t\tlet nintervals: number = ATNDeserializer.toInt(data[p]);\r\n\t\t\tp++;\r\n\t\t\tlet set: IntervalSet = new IntervalSet();\r\n\t\t\tsets.push(set);\r\n\r\n\t\t\tlet containsEof: boolean = ATNDeserializer.toInt(data[p++]) !== 0;\r\n\t\t\tif (containsEof) {\r\n\t\t\t\tset.add(-1);\r\n\t\t\t}\r\n\r\n\t\t\tfor (let j: number = 0; j < nintervals; j++) {\r\n\t\t\t\tlet a: number = unicodeDeserializer.readUnicode(data, p);\r\n\t\t\t\tp += unicodeDeserializer.size;\r\n\t\t\t\tlet b: number = unicodeDeserializer.readUnicode(data, p);\r\n\t\t\t\tp += unicodeDeserializer.size;\r\n\t\t\t\tset.add(a, b);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn p;\r\n\t}\r\n\r\n\t/**\r\n\t * Analyze the {@link StarLoopEntryState} states in the specified ATN to set\r\n\t * the {@link StarLoopEntryState#precedenceRuleDecision} field to the\r\n\t * correct value.\r\n\t *\r\n\t * @param atn The ATN.\r\n\t */\r\n\tprotected markPrecedenceDecisions(@NotNull atn: ATN): void {\r\n\t\t// Map rule index -> precedence decision for that rule\r\n\t\tlet rulePrecedenceDecisions = new Map();\r\n\r\n\t\tfor (let state of atn.states) {\r\n\t\t\tif (!(state instanceof StarLoopEntryState)) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\t/* We analyze the ATN to determine if this ATN decision state is the\r\n\t\t\t * decision for the closure block that determines whether a\r\n\t\t\t * precedence rule should continue or complete.\r\n\t\t\t */\r\n\t\t\tif (atn.ruleToStartState[state.ruleIndex].isPrecedenceRule) {\r\n\t\t\t\tlet maybeLoopEndState: ATNState = state.transition(state.numberOfTransitions - 1).target;\r\n\t\t\t\tif (maybeLoopEndState instanceof LoopEndState) {\r\n\t\t\t\t\tif (maybeLoopEndState.epsilonOnlyTransitions && maybeLoopEndState.transition(0).target instanceof RuleStopState) {\r\n\t\t\t\t\t\trulePrecedenceDecisions.set(state.ruleIndex, state);\r\n\t\t\t\t\t\tstate.precedenceRuleDecision = true;\r\n\t\t\t\t\t\tstate.precedenceLoopbackStates = new BitSet(atn.states.length);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// After marking precedence decisions, we go back through and fill in\r\n\t\t// StarLoopEntryState.precedenceLoopbackStates.\r\n\t\tfor (let precedenceDecision of rulePrecedenceDecisions) {\r\n\t\t\tfor (let transition of atn.ruleToStopState[precedenceDecision[0]].getTransitions()) {\r\n\t\t\t\tif (transition.serializationType !== TransitionType.EPSILON) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet epsilonTransition = transition as EpsilonTransition;\r\n\t\t\t\tif (epsilonTransition.outermostPrecedenceReturn !== -1) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tprecedenceDecision[1].precedenceLoopbackStates.set(transition.target.stateNumber);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\tprotected verifyATN(atn: ATN): void {\r\n\t\t// verify assumptions\r\n\t\tfor (let state of atn.states) {\r\n\t\t\tthis.checkCondition(state !== undefined, \"ATN states should not be undefined.\");\r\n\t\t\tif (state.stateType === ATNStateType.INVALID_TYPE) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tthis.checkCondition(state.onlyHasEpsilonTransitions || state.numberOfTransitions <= 1);\r\n\r\n\t\t\tif (state instanceof PlusBlockStartState) {\r\n\t\t\t\tthis.checkCondition(state.loopBackState !== undefined);\r\n\t\t\t}\r\n\r\n\t\t\tif (state instanceof StarLoopEntryState) {\r\n\t\t\t\tlet starLoopEntryState: StarLoopEntryState = state;\r\n\t\t\t\tthis.checkCondition(starLoopEntryState.loopBackState !== undefined);\r\n\t\t\t\tthis.checkCondition(starLoopEntryState.numberOfTransitions === 2);\r\n\r\n\t\t\t\tif (starLoopEntryState.transition(0).target instanceof StarBlockStartState) {\r\n\t\t\t\t\tthis.checkCondition(starLoopEntryState.transition(1).target instanceof LoopEndState);\r\n\t\t\t\t\tthis.checkCondition(!starLoopEntryState.nonGreedy);\r\n\t\t\t\t}\r\n\t\t\t\telse if (starLoopEntryState.transition(0).target instanceof LoopEndState) {\r\n\t\t\t\t\tthis.checkCondition(starLoopEntryState.transition(1).target instanceof StarBlockStartState);\r\n\t\t\t\t\tthis.checkCondition(starLoopEntryState.nonGreedy);\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\tthrow new Error(\"IllegalStateException\");\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (state instanceof StarLoopbackState) {\r\n\t\t\t\tthis.checkCondition(state.numberOfTransitions === 1);\r\n\t\t\t\tthis.checkCondition(state.transition(0).target instanceof StarLoopEntryState);\r\n\t\t\t}\r\n\r\n\t\t\tif (state instanceof LoopEndState) {\r\n\t\t\t\tthis.checkCondition(state.loopBackState !== undefined);\r\n\t\t\t}\r\n\r\n\t\t\tif (state instanceof RuleStartState) {\r\n\t\t\t\tthis.checkCondition(state.stopState !== undefined);\r\n\t\t\t}\r\n\r\n\t\t\tif (state instanceof BlockStartState) {\r\n\t\t\t\tthis.checkCondition(state.endState !== undefined);\r\n\t\t\t}\r\n\r\n\t\t\tif (state instanceof BlockEndState) {\r\n\t\t\t\tthis.checkCondition(state.startState !== undefined);\r\n\t\t\t}\r\n\r\n\t\t\tif (state instanceof DecisionState) {\r\n\t\t\t\tlet decisionState: DecisionState = state;\r\n\t\t\t\tthis.checkCondition(decisionState.numberOfTransitions <= 1 || decisionState.decision >= 0);\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tthis.checkCondition(state.numberOfTransitions <= 1 || state instanceof RuleStopState);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\tprotected checkCondition(condition: boolean, message?: string): void {\r\n\t\tif (!condition) {\r\n\t\t\tthrow new Error(\"IllegalStateException: \" + message);\r\n\t\t}\r\n\t}\r\n\r\n\tprivate static inlineSetRules(atn: ATN): number {\r\n\t\tlet inlinedCalls: number = 0;\r\n\r\n\t\tlet ruleToInlineTransition = new Array(atn.ruleToStartState.length);\r\n\t\tfor (let i = 0; i < atn.ruleToStartState.length; i++) {\r\n\t\t\tlet startState: RuleStartState = atn.ruleToStartState[i];\r\n\t\t\tlet middleState: ATNState = startState;\r\n\t\t\twhile (middleState.onlyHasEpsilonTransitions\r\n\t\t\t\t&& middleState.numberOfOptimizedTransitions === 1\r\n\t\t\t\t&& middleState.getOptimizedTransition(0).serializationType === TransitionType.EPSILON) {\r\n\t\t\t\tmiddleState = middleState.getOptimizedTransition(0).target;\r\n\t\t\t}\r\n\r\n\t\t\tif (middleState.numberOfOptimizedTransitions !== 1) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tlet matchTransition: Transition = middleState.getOptimizedTransition(0);\r\n\t\t\tlet matchTarget: ATNState = matchTransition.target;\r\n\t\t\tif (matchTransition.isEpsilon\r\n\t\t\t\t|| !matchTarget.onlyHasEpsilonTransitions\r\n\t\t\t\t|| matchTarget.numberOfOptimizedTransitions !== 1\r\n\t\t\t\t|| !(matchTarget.getOptimizedTransition(0).target instanceof RuleStopState)) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tswitch (matchTransition.serializationType) {\r\n\t\t\tcase TransitionType.ATOM:\r\n\t\t\tcase TransitionType.RANGE:\r\n\t\t\tcase TransitionType.SET:\r\n\t\t\t\truleToInlineTransition[i] = matchTransition;\r\n\t\t\t\tbreak;\r\n\r\n\t\t\tcase TransitionType.NOT_SET:\r\n\t\t\tcase TransitionType.WILDCARD:\r\n\t\t\t\t// not implemented yet\r\n\t\t\t\tcontinue;\r\n\r\n\t\t\tdefault:\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tfor (let state of atn.states) {\r\n\t\t\tif (state.ruleIndex < 0) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tlet optimizedTransitions: Transition[] | undefined;\r\n\t\t\tfor (let i = 0; i < state.numberOfOptimizedTransitions; i++) {\r\n\t\t\t\tlet transition: Transition = state.getOptimizedTransition(i);\r\n\t\t\t\tif (!(transition instanceof RuleTransition)) {\r\n\t\t\t\t\tif (optimizedTransitions !== undefined) {\r\n\t\t\t\t\t\toptimizedTransitions.push(transition);\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet ruleTransition: RuleTransition = transition;\r\n\t\t\t\tlet effective: Transition | undefined = ruleToInlineTransition[ruleTransition.target.ruleIndex];\r\n\t\t\t\tif (effective === undefined) {\r\n\t\t\t\t\tif (optimizedTransitions !== undefined) {\r\n\t\t\t\t\t\toptimizedTransitions.push(transition);\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (optimizedTransitions === undefined) {\r\n\t\t\t\t\toptimizedTransitions = [];\r\n\t\t\t\t\tfor (let j = 0; j < i; j++) {\r\n\t\t\t\t\t\toptimizedTransitions.push(state.getOptimizedTransition(i));\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tinlinedCalls++;\r\n\t\t\t\tlet target: ATNState = ruleTransition.followState;\r\n\t\t\t\tlet intermediateState: ATNState = new BasicState();\r\n\t\t\t\tintermediateState.setRuleIndex(target.ruleIndex);\r\n\t\t\t\tatn.addState(intermediateState);\r\n\t\t\t\toptimizedTransitions.push(new EpsilonTransition(intermediateState));\r\n\r\n\t\t\t\tswitch (effective.serializationType) {\r\n\t\t\t\tcase TransitionType.ATOM:\r\n\t\t\t\t\tintermediateState.addTransition(new AtomTransition(target, (effective as AtomTransition)._label));\r\n\t\t\t\t\tbreak;\r\n\r\n\t\t\t\tcase TransitionType.RANGE:\r\n\t\t\t\t\tintermediateState.addTransition(new RangeTransition(target, (effective as RangeTransition).from, (effective as RangeTransition).to));\r\n\t\t\t\t\tbreak;\r\n\r\n\t\t\t\tcase TransitionType.SET:\r\n\t\t\t\t\tintermediateState.addTransition(new SetTransition(target, (effective as SetTransition).label));\r\n\t\t\t\t\tbreak;\r\n\r\n\t\t\t\tdefault:\r\n\t\t\t\t\tthrow new Error(\"UnsupportedOperationException\");\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (optimizedTransitions !== undefined) {\r\n\t\t\t\tif (state.isOptimized) {\r\n\t\t\t\t\twhile (state.numberOfOptimizedTransitions > 0) {\r\n\t\t\t\t\t\tstate.removeOptimizedTransition(state.numberOfOptimizedTransitions - 1);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tfor (let transition of optimizedTransitions) {\r\n\t\t\t\t\tstate.addOptimizedTransition(transition);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"ATN runtime optimizer removed \" + inlinedCalls + \" rule invocations by inlining sets.\");\r\n\t\t}\r\n\r\n\t\treturn inlinedCalls;\r\n\t}\r\n\r\n\tprivate static combineChainedEpsilons(atn: ATN): number {\r\n\t\tlet removedEdges: number = 0;\r\n\r\n\t\tfor (let state of atn.states) {\r\n\t\t\tif (!state.onlyHasEpsilonTransitions || state instanceof RuleStopState) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tlet optimizedTransitions: Transition[] | undefined;\r\n\t\t\tnextTransition:\r\n\t\t\tfor (let i = 0; i < state.numberOfOptimizedTransitions; i++) {\r\n\t\t\t\tlet transition: Transition = state.getOptimizedTransition(i);\r\n\t\t\t\tlet intermediate: ATNState = transition.target;\r\n\t\t\t\tif (transition.serializationType !== TransitionType.EPSILON\r\n\t\t\t\t\t|| (transition as EpsilonTransition).outermostPrecedenceReturn !== -1\r\n\t\t\t\t\t|| intermediate.stateType !== ATNStateType.BASIC\r\n\t\t\t\t\t|| !intermediate.onlyHasEpsilonTransitions) {\r\n\t\t\t\t\tif (optimizedTransitions !== undefined) {\r\n\t\t\t\t\t\toptimizedTransitions.push(transition);\r\n\t\t\t\t\t}\r\n\r\n\t\t\t\t\tcontinue nextTransition;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tfor (let j = 0; j < intermediate.numberOfOptimizedTransitions; j++) {\r\n\t\t\t\t\tif (intermediate.getOptimizedTransition(j).serializationType !== TransitionType.EPSILON\r\n\t\t\t\t\t\t|| (intermediate.getOptimizedTransition(j) as EpsilonTransition).outermostPrecedenceReturn !== -1) {\r\n\t\t\t\t\t\tif (optimizedTransitions !== undefined) {\r\n\t\t\t\t\t\t\toptimizedTransitions.push(transition);\r\n\t\t\t\t\t\t}\r\n\r\n\t\t\t\t\t\tcontinue nextTransition;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tremovedEdges++;\r\n\t\t\t\tif (optimizedTransitions === undefined) {\r\n\t\t\t\t\toptimizedTransitions = [];\r\n\t\t\t\t\tfor (let j = 0; j < i; j++) {\r\n\t\t\t\t\t\toptimizedTransitions.push(state.getOptimizedTransition(j));\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tfor (let j = 0; j < intermediate.numberOfOptimizedTransitions; j++) {\r\n\t\t\t\t\tlet target: ATNState = intermediate.getOptimizedTransition(j).target;\r\n\t\t\t\t\toptimizedTransitions.push(new EpsilonTransition(target));\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (optimizedTransitions !== undefined) {\r\n\t\t\t\tif (state.isOptimized) {\r\n\t\t\t\t\twhile (state.numberOfOptimizedTransitions > 0) {\r\n\t\t\t\t\t\tstate.removeOptimizedTransition(state.numberOfOptimizedTransitions - 1);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tfor (let transition of optimizedTransitions) {\r\n\t\t\t\t\tstate.addOptimizedTransition(transition);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"ATN runtime optimizer removed \" + removedEdges + \" transitions by combining chained epsilon transitions.\");\r\n\t\t}\r\n\r\n\t\treturn removedEdges;\r\n\t}\r\n\r\n\tprivate static optimizeSets(atn: ATN, preserveOrder: boolean): number {\r\n\t\tif (preserveOrder) {\r\n\t\t\t// this optimization currently doesn't preserve edge order.\r\n\t\t\treturn 0;\r\n\t\t}\r\n\r\n\t\tlet removedPaths: number = 0;\r\n\t\tlet decisions: DecisionState[] = atn.decisionToState;\r\n\t\tfor (let decision of decisions) {\r\n\t\t\tlet setTransitions: IntervalSet = new IntervalSet();\r\n\t\t\tfor (let i = 0; i < decision.numberOfOptimizedTransitions; i++) {\r\n\t\t\t\tlet epsTransition: Transition = decision.getOptimizedTransition(i);\r\n\t\t\t\tif (!(epsTransition instanceof EpsilonTransition)) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (epsTransition.target.numberOfOptimizedTransitions !== 1) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet transition: Transition = epsTransition.target.getOptimizedTransition(0);\r\n\t\t\t\tif (!(transition.target instanceof BlockEndState)) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (transition instanceof NotSetTransition) {\r\n\t\t\t\t\t// TODO: not yet implemented\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (transition instanceof AtomTransition\r\n\t\t\t\t\t|| transition instanceof RangeTransition\r\n\t\t\t\t\t|| transition instanceof SetTransition) {\r\n\t\t\t\t\tsetTransitions.add(i);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (setTransitions.size <= 1) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tlet optimizedTransitions: Transition[] = [];\r\n\t\t\tfor (let i = 0; i < decision.numberOfOptimizedTransitions; i++) {\r\n\t\t\t\tif (!setTransitions.contains(i)) {\r\n\t\t\t\t\toptimizedTransitions.push(decision.getOptimizedTransition(i));\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tlet blockEndState: ATNState = decision.getOptimizedTransition(setTransitions.minElement).target.getOptimizedTransition(0).target;\r\n\t\t\tlet matchSet: IntervalSet = new IntervalSet();\r\n\t\t\tfor (let interval of setTransitions.intervals) {\r\n\t\t\t\tfor (let j = interval.a; j <= interval.b; j++) {\r\n\t\t\t\t\tlet matchTransition: Transition = decision.getOptimizedTransition(j).target.getOptimizedTransition(0);\r\n\t\t\t\t\tif (matchTransition instanceof NotSetTransition) {\r\n\t\t\t\t\t\tthrow new Error(\"Not yet implemented.\");\r\n\t\t\t\t\t} else {\r\n\t\t\t\t\t\tmatchSet.addAll(matchTransition.label as IntervalSet);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tlet newTransition: Transition;\r\n\t\t\tif (matchSet.intervals.length === 1) {\r\n\t\t\t\tif (matchSet.size === 1) {\r\n\t\t\t\t\tnewTransition = new AtomTransition(blockEndState, matchSet.minElement);\r\n\t\t\t\t} else {\r\n\t\t\t\t\tlet matchInterval: Interval = matchSet.intervals[0];\r\n\t\t\t\t\tnewTransition = new RangeTransition(blockEndState, matchInterval.a, matchInterval.b);\r\n\t\t\t\t}\r\n\t\t\t} else {\r\n\t\t\t\tnewTransition = new SetTransition(blockEndState, matchSet);\r\n\t\t\t}\r\n\r\n\t\t\tlet setOptimizedState: ATNState = new BasicState();\r\n\t\t\tsetOptimizedState.setRuleIndex(decision.ruleIndex);\r\n\t\t\tatn.addState(setOptimizedState);\r\n\r\n\t\t\tsetOptimizedState.addTransition(newTransition);\r\n\t\t\toptimizedTransitions.push(new EpsilonTransition(setOptimizedState));\r\n\r\n\t\t\tremovedPaths += decision.numberOfOptimizedTransitions - optimizedTransitions.length;\r\n\r\n\t\t\tif (decision.isOptimized) {\r\n\t\t\t\twhile (decision.numberOfOptimizedTransitions > 0) {\r\n\t\t\t\t\tdecision.removeOptimizedTransition(decision.numberOfOptimizedTransitions - 1);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tfor (let transition of optimizedTransitions) {\r\n\t\t\t\tdecision.addOptimizedTransition(transition);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tif (ParserATNSimulator.debug) {\r\n\t\t\tconsole.log(\"ATN runtime optimizer removed \" + removedPaths + \" paths by collapsing sets.\");\r\n\t\t}\r\n\r\n\t\treturn removedPaths;\r\n\t}\r\n\r\n\tprivate static identifyTailCalls(atn: ATN): void {\r\n\t\tfor (let state of atn.states) {\r\n\t\t\tfor (let i = 0; i < state.numberOfTransitions; i++) {\r\n\t\t\t\tlet transition = state.transition(i);\r\n\t\t\t\tif (!(transition instanceof RuleTransition)) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\ttransition.tailCall = this.testTailCall(atn, transition, false);\r\n\t\t\t\ttransition.optimizedTailCall = this.testTailCall(atn, transition, true);\r\n\t\t\t}\r\n\r\n\t\t\tif (!state.isOptimized) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tfor (let i = 0; i < state.numberOfOptimizedTransitions; i++) {\r\n\t\t\t\tlet transition = state.getOptimizedTransition(i);\r\n\t\t\t\tif (!(transition instanceof RuleTransition)) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\ttransition.tailCall = this.testTailCall(atn, transition, false);\r\n\t\t\t\ttransition.optimizedTailCall = this.testTailCall(atn, transition, true);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\tprivate static testTailCall(atn: ATN, transition: RuleTransition, optimizedPath: boolean): boolean {\r\n\t\tif (!optimizedPath && transition.tailCall) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\t\tif (optimizedPath && transition.optimizedTailCall) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\tlet reachable: BitSet = new BitSet(atn.states.length);\r\n\t\tlet worklist: ATNState[] = [];\r\n\t\tworklist.push(transition.followState);\r\n\t\twhile (true) {\r\n\t\t\tlet state = worklist.pop();\r\n\t\t\tif (!state) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\tif (reachable.get(state.stateNumber)) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tif (state instanceof RuleStopState) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\r\n\t\t\tif (!state.onlyHasEpsilonTransitions) {\r\n\t\t\t\treturn false;\r\n\t\t\t}\r\n\r\n\t\t\tlet transitionCount = optimizedPath ? state.numberOfOptimizedTransitions : state.numberOfTransitions;\r\n\t\t\tfor (let i = 0; i < transitionCount; i++) {\r\n\t\t\t\tlet t = optimizedPath ? state.getOptimizedTransition(i) : state.transition(i);\r\n\t\t\t\tif (t.serializationType !== TransitionType.EPSILON) {\r\n\t\t\t\t\treturn false;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tworklist.push(t.target);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn true;\r\n\t}\r\n\r\n\tprotected static toInt(c: number): number {\r\n\t\treturn c;\r\n\t}\r\n\r\n\tprotected static toInt32(data: Uint16Array, offset: number): number {\r\n\t\treturn (data[offset] | (data[offset + 1] << 16)) >>> 0;\r\n\t}\r\n\r\n\tprotected static toUUID(data: Uint16Array, offset: number): UUID {\r\n\t\tlet leastSigBits: number = ATNDeserializer.toInt32(data, offset);\r\n\t\tlet lessSigBits: number = ATNDeserializer.toInt32(data, offset + 2);\r\n\t\tlet moreSigBits: number = ATNDeserializer.toInt32(data, offset + 4);\r\n\t\tlet mostSigBits: number = ATNDeserializer.toInt32(data, offset + 6);\r\n\t\treturn new UUID(mostSigBits, moreSigBits, lessSigBits, leastSigBits);\r\n\t}\r\n\r\n\t@NotNull\r\n\tprotected edgeFactory(\r\n\t\t@NotNull atn: ATN,\r\n\t\ttype: TransitionType, src: number, trg: number,\r\n\t\targ1: number, arg2: number, arg3: number,\r\n\t\tsets: IntervalSet[]): Transition {\r\n\t\tlet target: ATNState = atn.states[trg];\r\n\t\tswitch (type) {\r\n\t\t\tcase TransitionType.EPSILON: return new EpsilonTransition(target);\r\n\t\t\tcase TransitionType.RANGE:\r\n\t\t\t\tif (arg3 !== 0) {\r\n\t\t\t\t\treturn new RangeTransition(target, Token.EOF, arg2);\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\treturn new RangeTransition(target, arg1, arg2);\r\n\t\t\t\t}\r\n\t\t\tcase TransitionType.RULE:\r\n\t\t\t\tlet rt: RuleTransition = new RuleTransition(atn.states[arg1] as RuleStartState, arg2, arg3, target);\r\n\t\t\t\treturn rt;\r\n\t\t\tcase TransitionType.PREDICATE:\r\n\t\t\t\tlet pt: PredicateTransition = new PredicateTransition(target, arg1, arg2, arg3 !== 0);\r\n\t\t\t\treturn pt;\r\n\t\t\tcase TransitionType.PRECEDENCE:\r\n\t\t\t\treturn new PrecedencePredicateTransition(target, arg1);\r\n\t\t\tcase TransitionType.ATOM:\r\n\t\t\t\tif (arg3 !== 0) {\r\n\t\t\t\t\treturn new AtomTransition(target, Token.EOF);\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\treturn new AtomTransition(target, arg1);\r\n\t\t\t\t}\r\n\t\t\tcase TransitionType.ACTION:\r\n\t\t\t\tlet a: ActionTransition = new ActionTransition(target, arg1, arg2, arg3 !== 0);\r\n\t\t\t\treturn a;\r\n\t\t\tcase TransitionType.SET: return new SetTransition(target, sets[arg1]);\r\n\t\t\tcase TransitionType.NOT_SET: return new NotSetTransition(target, sets[arg1]);\r\n\t\t\tcase TransitionType.WILDCARD: return new WildcardTransition(target);\r\n\t\t}\r\n\r\n\t\tthrow new Error(\"The specified transition type is not valid.\");\r\n\t}\r\n\r\n\tprotected stateFactory(type: ATNStateType, ruleIndex: number): ATNState {\r\n\t\tlet s: ATNState;\r\n\t\tswitch (type) {\r\n\t\t\tcase ATNStateType.INVALID_TYPE: return new InvalidState();\r\n\t\t\tcase ATNStateType.BASIC: s = new BasicState(); break;\r\n\t\t\tcase ATNStateType.RULE_START: s = new RuleStartState(); break;\r\n\t\t\tcase ATNStateType.BLOCK_START: s = new BasicBlockStartState(); break;\r\n\t\t\tcase ATNStateType.PLUS_BLOCK_START: s = new PlusBlockStartState(); break;\r\n\t\t\tcase ATNStateType.STAR_BLOCK_START: s = new StarBlockStartState(); break;\r\n\t\t\tcase ATNStateType.TOKEN_START: s = new TokensStartState(); break;\r\n\t\t\tcase ATNStateType.RULE_STOP: s = new RuleStopState(); break;\r\n\t\t\tcase ATNStateType.BLOCK_END: s = new BlockEndState(); break;\r\n\t\t\tcase ATNStateType.STAR_LOOP_BACK: s = new StarLoopbackState(); break;\r\n\t\t\tcase ATNStateType.STAR_LOOP_ENTRY: s = new StarLoopEntryState(); break;\r\n\t\t\tcase ATNStateType.PLUS_LOOP_BACK: s = new PlusLoopbackState(); break;\r\n\t\t\tcase ATNStateType.LOOP_END: s = new LoopEndState(); break;\r\n\t\t\tdefault:\r\n\t\t\t\tlet message: string = `The specified state type ${type} is not valid.`;\r\n\t\t\t\tthrow new Error(message);\r\n\t\t}\r\n\r\n\t\ts.ruleIndex = ruleIndex;\r\n\t\treturn s;\r\n\t}\r\n\r\n\tprotected lexerActionFactory(type: LexerActionType, data1: number, data2: number): LexerAction {\r\n\t\tswitch (type) {\r\n\t\tcase LexerActionType.CHANNEL:\r\n\t\t\treturn new LexerChannelAction(data1);\r\n\r\n\t\tcase LexerActionType.CUSTOM:\r\n\t\t\treturn new LexerCustomAction(data1, data2);\r\n\r\n\t\tcase LexerActionType.MODE:\r\n\t\t\treturn new LexerModeAction(data1);\r\n\r\n\t\tcase LexerActionType.MORE:\r\n\t\t\treturn LexerMoreAction.INSTANCE;\r\n\r\n\t\tcase LexerActionType.POP_MODE:\r\n\t\t\treturn LexerPopModeAction.INSTANCE;\r\n\r\n\t\tcase LexerActionType.PUSH_MODE:\r\n\t\t\treturn new LexerPushModeAction(data1);\r\n\r\n\t\tcase LexerActionType.SKIP:\r\n\t\t\treturn LexerSkipAction.INSTANCE;\r\n\r\n\t\tcase LexerActionType.TYPE:\r\n\t\t\treturn new LexerTypeAction(data1);\r\n\r\n\t\tdefault:\r\n\t\t\tlet message: string = `The specified lexer action type ${type} is not valid.`;\r\n\t\t\tthrow new Error(message);\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:31.0349605-07:00\r\n\r\nimport { DecisionInfo } from \"./DecisionInfo\";\r\nimport { DFA } from \"../dfa/DFA\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { ProfilingATNSimulator } from \"./ProfilingATNSimulator\";\r\n\r\n/**\r\n * This class provides access to specific and aggregate statistics gathered\r\n * during profiling of a parser.\r\n *\r\n * @since 4.3\r\n */\r\nexport class ParseInfo {\r\n\tprotected atnSimulator: ProfilingATNSimulator;\r\n\r\n\tconstructor(@NotNull atnSimulator: ProfilingATNSimulator) {\r\n\t\tthis.atnSimulator = atnSimulator;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets an array of {@link DecisionInfo} instances containing the profiling\r\n\t * information gathered for each decision in the ATN.\r\n\t *\r\n\t * @returns An array of {@link DecisionInfo} instances, indexed by decision\r\n\t * number.\r\n\t */\r\n\t@NotNull\r\n\tpublic getDecisionInfo(): DecisionInfo[] {\r\n\t\treturn this.atnSimulator.getDecisionInfo();\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the decision numbers for decisions that required one or more\r\n\t * full-context predictions during parsing. These are decisions for which\r\n\t * {@link DecisionInfo#LL_Fallback} is non-zero.\r\n\t *\r\n\t * @returns A list of decision numbers which required one or more\r\n\t * full-context predictions during parsing.\r\n\t */\r\n\t@NotNull\r\n\tpublic getLLDecisions(): number[] {\r\n\t\tlet decisions: DecisionInfo[] = this.atnSimulator.getDecisionInfo();\r\n\t\tlet LL: number[] = [];\r\n\t\tfor (let i = 0; i < decisions.length; i++) {\r\n\t\t\tlet fallBack: number = decisions[i].LL_Fallback;\r\n\t\t\tif (fallBack > 0) {\r\n\t\t\t\tLL.push(i);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn LL;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the total time spent during prediction across all decisions made\r\n\t * during parsing. This value is the sum of\r\n\t * {@link DecisionInfo#timeInPrediction} for all decisions.\r\n\t */\r\n\tpublic getTotalTimeInPrediction(): number {\r\n\t\tlet decisions: DecisionInfo[] = this.atnSimulator.getDecisionInfo();\r\n\t\tlet t: number = 0;\r\n\t\tfor (let decision of decisions) {\r\n\t\t\tt += decision.timeInPrediction;\r\n\t\t}\r\n\r\n\t\treturn t;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the total number of SLL lookahead operations across all decisions\r\n\t * made during parsing. This value is the sum of\r\n\t * {@link DecisionInfo#SLL_TotalLook} for all decisions.\r\n\t */\r\n\tpublic getTotalSLLLookaheadOps(): number {\r\n\t\tlet decisions: DecisionInfo[] = this.atnSimulator.getDecisionInfo();\r\n\t\tlet k: number = 0;\r\n\t\tfor (let decision of decisions) {\r\n\t\t\tk += decision.SLL_TotalLook;\r\n\t\t}\r\n\r\n\t\treturn k;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the total number of LL lookahead operations across all decisions\r\n\t * made during parsing. This value is the sum of\r\n\t * {@link DecisionInfo#LL_TotalLook} for all decisions.\r\n\t */\r\n\tpublic getTotalLLLookaheadOps(): number {\r\n\t\tlet decisions: DecisionInfo[] = this.atnSimulator.getDecisionInfo();\r\n\t\tlet k: number = 0;\r\n\t\tfor (let decision of decisions) {\r\n\t\t\tk += decision.LL_TotalLook;\r\n\t\t}\r\n\r\n\t\treturn k;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the total number of ATN lookahead operations for SLL prediction\r\n\t * across all decisions made during parsing.\r\n\t */\r\n\tpublic getTotalSLLATNLookaheadOps(): number {\r\n\t\tlet decisions: DecisionInfo[] = this.atnSimulator.getDecisionInfo();\r\n\t\tlet k: number = 0;\r\n\t\tfor (let decision of decisions) {\r\n\t\t\tk += decision.SLL_ATNTransitions;\r\n\t\t}\r\n\r\n\t\treturn k;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the total number of ATN lookahead operations for LL prediction\r\n\t * across all decisions made during parsing.\r\n\t */\r\n\tpublic getTotalLLATNLookaheadOps(): number {\r\n\t\tlet decisions: DecisionInfo[] = this.atnSimulator.getDecisionInfo();\r\n\t\tlet k: number = 0;\r\n\t\tfor (let decision of decisions) {\r\n\t\t\tk += decision.LL_ATNTransitions;\r\n\t\t}\r\n\r\n\t\treturn k;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the total number of ATN lookahead operations for SLL and LL\r\n\t * prediction across all decisions made during parsing.\r\n\t *\r\n\t * This value is the sum of {@link #getTotalSLLATNLookaheadOps} and\r\n\t * {@link #getTotalLLATNLookaheadOps}.\r\n\t */\r\n\tpublic getTotalATNLookaheadOps(): number {\r\n\t\tlet decisions: DecisionInfo[] = this.atnSimulator.getDecisionInfo();\r\n\t\tlet k: number = 0;\r\n\t\tfor (let decision of decisions) {\r\n\t\t\tk += decision.SLL_ATNTransitions;\r\n\t\t\tk += decision.LL_ATNTransitions;\r\n\t\t}\r\n\r\n\t\treturn k;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the total number of DFA states stored in the DFA cache for all\r\n\t * decisions in the ATN.\r\n\t */\r\n\tpublic getDFASize(): number;\r\n\r\n\t/**\r\n\t * Gets the total number of DFA states stored in the DFA cache for a\r\n\t * particular decision.\r\n\t */\r\n\tpublic getDFASize(decision: number): number;\r\n\r\n\tpublic getDFASize(decision?: number): number {\r\n\t\tif (decision) {\r\n\t\t\tlet decisionToDFA: DFA = this.atnSimulator.atn.decisionToDFA[decision];\r\n\t\t\treturn decisionToDFA.states.size;\r\n\t\t} else {\r\n\t\t\tlet n: number = 0;\r\n\t\t\tlet decisionToDFA: DFA[] = this.atnSimulator.atn.decisionToDFA;\r\n\t\t\tfor (let i = 0; i < decisionToDFA.length; i++) {\r\n\t\t\t\tn += this.getDFASize(i);\r\n\t\t\t}\r\n\r\n\t\t\treturn n;\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:56.9812284-07:00\r\nimport { ANTLRErrorListener } from \"./ANTLRErrorListener\";\r\nimport { ATNConfigSet } from \"./atn/ATNConfigSet\";\r\nimport { BitSet } from \"./misc/BitSet\";\r\nimport { DFA } from \"./dfa/DFA\";\r\nimport { Parser } from \"./Parser\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { Recognizer } from \"./Recognizer\";\r\nimport { ProxyErrorListener } from \"./ProxyErrorListener\";\r\nimport { ParserErrorListener } from \"./ParserErrorListener\";\r\nimport { SimulatorState } from \"./atn/SimulatorState\";\r\nimport { Token } from \"./Token\";\r\nimport { Override } from \"./Decorators\";\r\n\r\n/**\r\n * @author Sam Harwell\r\n */\r\nexport class ProxyParserErrorListener extends ProxyErrorListener\r\n\timplements ParserErrorListener {\r\n\r\n\tconstructor(delegates: ParserErrorListener[]) {\r\n\t\tsuper(delegates);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic reportAmbiguity(\r\n\t\trecognizer: Parser,\r\n\t\tdfa: DFA,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\texact: boolean,\r\n\t\tambigAlts: BitSet | undefined,\r\n\t\tconfigs: ATNConfigSet): void {\r\n\t\tthis.getDelegates()\r\n\t\t\t.forEach((listener) => {\r\n\t\t\t\tif (listener.reportAmbiguity) {\r\n\t\t\t\t\tlistener.reportAmbiguity(\r\n\t\t\t\t\t\trecognizer,\r\n\t\t\t\t\t\tdfa,\r\n\t\t\t\t\t\tstartIndex,\r\n\t\t\t\t\t\tstopIndex,\r\n\t\t\t\t\t\texact,\r\n\t\t\t\t\t\tambigAlts,\r\n\t\t\t\t\t\tconfigs);\r\n\t\t\t\t}\r\n\r\n\t\t\t});\r\n\t}\r\n\r\n\t@Override\r\n\tpublic reportAttemptingFullContext(\r\n\t\trecognizer: Parser,\r\n\t\tdfa: DFA,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\tconflictingAlts: BitSet | undefined,\r\n\t\tconflictState: SimulatorState): void {\r\n\t\tthis.getDelegates()\r\n\t\t\t.forEach((listener) => {\r\n\t\t\t\tif (listener.reportAttemptingFullContext) {\r\n\t\t\t\t\tlistener.reportAttemptingFullContext(\r\n\t\t\t\t\t\trecognizer,\r\n\t\t\t\t\t\tdfa,\r\n\t\t\t\t\t\tstartIndex,\r\n\t\t\t\t\t\tstopIndex,\r\n\t\t\t\t\t\tconflictingAlts,\r\n\t\t\t\t\t\tconflictState);\r\n\t\t\t\t}\r\n\t\t\t});\r\n\t}\r\n\r\n\t@Override\r\n\tpublic reportContextSensitivity(\r\n\t\trecognizer: Parser,\r\n\t\tdfa: DFA,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\tprediction: number,\r\n\t\tacceptState: SimulatorState): void {\r\n\t\tthis.getDelegates()\r\n\t\t\t.forEach((listener) => {\r\n\t\t\t\tif (listener.reportContextSensitivity) {\r\n\t\t\t\t\tlistener.reportContextSensitivity(\r\n\t\t\t\t\t\trecognizer,\r\n\t\t\t\t\t\tdfa,\r\n\t\t\t\t\t\tstartIndex,\r\n\t\t\t\t\t\tstopIndex,\r\n\t\t\t\t\t\tprediction,\r\n\t\t\t\t\t\tacceptState);\r\n\t\t\t\t}\r\n\t\t\t});\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nexport function isHighSurrogate(ch: number): boolean {\r\n\treturn ch >= 0xD800 && ch <= 0xDBFF;\r\n}\r\n\r\nexport function isLowSurrogate(ch: number): boolean {\r\n\treturn ch >= 0xDC00 && ch <= 0xDFFF;\r\n}\r\n\r\nexport function isSupplementaryCodePoint(ch: number): boolean {\r\n\treturn ch >= 0x10000;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nimport * as assert from \"assert\";\r\nimport * as Character from \"./misc/Character\";\r\n\r\n/**\r\n * Wrapper for `Uint8Array` / `Uint16Array` / `Int32Array`.\r\n */\r\nexport class CodePointBuffer {\r\n\tprivate readonly buffer: Uint8Array | Uint16Array | Int32Array;\r\n\tprivate _position: number;\r\n\tprivate _size: number;\r\n\r\n\tconstructor(buffer: Uint8Array | Uint16Array | Int32Array, size: number) {\r\n\t\tthis.buffer = buffer;\r\n\t\tthis._position = 0;\r\n\t\tthis._size = size;\r\n\t}\r\n\r\n\tpublic static withArray(buffer: Uint8Array | Uint16Array | Int32Array): CodePointBuffer {\r\n\t\treturn new CodePointBuffer(buffer, buffer.length);\r\n\t}\r\n\r\n\tpublic get position(): number {\r\n\t\treturn this._position;\r\n\t}\r\n\r\n\tpublic set position(newPosition: number) {\r\n\t\tif (newPosition < 0 || newPosition > this._size) {\r\n\t\t\tthrow new RangeError();\r\n\t\t}\r\n\r\n\t\tthis._position = newPosition;\r\n\t}\r\n\r\n\tpublic get remaining(): number {\r\n\t\treturn this._size - this.position;\r\n\t}\r\n\r\n\tpublic get(offset: number): number {\r\n\t\treturn this.buffer[offset];\r\n\t}\r\n\r\n\tpublic array(): Uint8Array | Uint16Array | Int32Array {\r\n\t\treturn this.buffer.slice(0, this._size);\r\n\t}\r\n\r\n\tpublic static builder(initialBufferSize: number): CodePointBuffer.Builder {\r\n\t\treturn new CodePointBuffer.Builder(initialBufferSize);\r\n\t}\r\n}\r\n\r\nexport namespace CodePointBuffer {\r\n\tconst enum Type {\r\n\t\tBYTE,\r\n\t\tCHAR,\r\n\t\tINT,\r\n\t}\r\n\r\n\texport class Builder {\r\n\t\tprivate type: Type;\r\n\t\tprivate buffer: Uint8Array | Uint16Array | Int32Array;\r\n\t\tprivate prevHighSurrogate: number;\r\n\t\tprivate position: number;\r\n\r\n\t\tconstructor(initialBufferSize: number) {\r\n\t\t\tthis.type = Type.BYTE;\r\n\t\t\tthis.buffer = new Uint8Array(initialBufferSize);\r\n\t\t\tthis.prevHighSurrogate = -1;\r\n\t\t\tthis.position = 0;\r\n\t\t}\r\n\r\n\t\tpublic build(): CodePointBuffer {\r\n\t\t\treturn new CodePointBuffer(this.buffer, this.position);\r\n\t\t}\r\n\r\n\t\tprivate static roundUpToNextPowerOfTwo(i: number): number {\r\n\t\t\tlet nextPowerOfTwo: number = 32 - Math.clz32(i - 1);\r\n\t\t\treturn Math.pow(2, nextPowerOfTwo);\r\n\t\t}\r\n\r\n\t\tpublic ensureRemaining(remainingNeeded: number): void {\r\n\t\t\tswitch (this.type) {\r\n\t\t\t\tcase Type.BYTE:\r\n\t\t\t\t\tif (this.buffer.length - this.position < remainingNeeded) {\r\n\t\t\t\t\t\tlet newCapacity: number = Builder.roundUpToNextPowerOfTwo(this.buffer.length + remainingNeeded);\r\n\t\t\t\t\t\tlet newBuffer: Uint8Array = new Uint8Array(newCapacity);\r\n\t\t\t\t\t\tnewBuffer.set(this.buffer.subarray(0, this.position), 0);\r\n\t\t\t\t\t\tthis.buffer = newBuffer;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tbreak;\r\n\t\t\t\tcase Type.CHAR:\r\n\t\t\t\t\tif (this.buffer.length - this.position < remainingNeeded) {\r\n\t\t\t\t\t\tlet newCapacity: number = Builder.roundUpToNextPowerOfTwo(this.buffer.length + remainingNeeded);\r\n\t\t\t\t\t\tlet newBuffer: Uint16Array = new Uint16Array(newCapacity);\r\n\t\t\t\t\t\tnewBuffer.set(this.buffer.subarray(0, this.position), 0);\r\n\t\t\t\t\t\tthis.buffer = newBuffer;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tbreak;\r\n\t\t\t\tcase Type.INT:\r\n\t\t\t\t\tif (this.buffer.length - this.position < remainingNeeded) {\r\n\t\t\t\t\t\tlet newCapacity: number = Builder.roundUpToNextPowerOfTwo(this.buffer.length + remainingNeeded);\r\n\t\t\t\t\t\tlet newBuffer: Int32Array = new Int32Array(newCapacity);\r\n\t\t\t\t\t\tnewBuffer.set(this.buffer.subarray(0, this.position), 0);\r\n\t\t\t\t\t\tthis.buffer = newBuffer;\r\n\t\t\t\t\t}\r\n\t\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tpublic append(utf16In: Uint16Array): void {\r\n\t\t\tthis.ensureRemaining(utf16In.length);\r\n\t\t\tthis.appendArray(utf16In);\r\n\t\t}\r\n\r\n\t\tprivate appendArray(utf16In: Uint16Array): void {\r\n\t\t\tswitch (this.type) {\r\n\t\t\t\tcase Type.BYTE:\r\n\t\t\t\t\tthis.appendArrayByte(utf16In);\r\n\t\t\t\t\tbreak;\r\n\t\t\t\tcase Type.CHAR:\r\n\t\t\t\t\tthis.appendArrayChar(utf16In);\r\n\t\t\t\t\tbreak;\r\n\t\t\t\tcase Type.INT:\r\n\t\t\t\t\tthis.appendArrayInt(utf16In);\r\n\t\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tprivate appendArrayByte(utf16In: Uint16Array): void {\r\n\t\t\tassert(this.prevHighSurrogate === -1);\r\n\r\n\t\t\tlet input: Uint16Array = utf16In;\r\n\t\t\tlet inOffset: number = 0;\r\n\t\t\tlet inLimit: number = utf16In.length;\r\n\r\n\t\t\tlet outByte = this.buffer;\r\n\t\t\tlet outOffset: number = this.position;\r\n\r\n\t\t\twhile (inOffset < inLimit) {\r\n\t\t\t\tlet c: number = input[inOffset];\r\n\t\t\t\tif (c <= 0xFF) {\r\n\t\t\t\t\toutByte[outOffset] = c;\r\n\t\t\t\t} else {\r\n\t\t\t\t\tutf16In = utf16In.subarray(inOffset, inLimit);\r\n\t\t\t\t\tthis.position = outOffset;\r\n\t\t\t\t\tif (!Character.isHighSurrogate(c)) {\r\n\t\t\t\t\t\tthis.byteToCharBuffer(utf16In.length);\r\n\t\t\t\t\t\tthis.appendArrayChar(utf16In);\r\n\t\t\t\t\t\treturn;\r\n\t\t\t\t\t} else {\r\n\t\t\t\t\t\tthis.byteToIntBuffer(utf16In.length);\r\n\t\t\t\t\t\tthis.appendArrayInt(utf16In);\r\n\t\t\t\t\t\treturn;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tinOffset++;\r\n\t\t\t\toutOffset++;\r\n\t\t\t}\r\n\r\n\t\t\tthis.position = outOffset;\r\n\t\t}\r\n\r\n\t\tprivate appendArrayChar(utf16In: Uint16Array): void {\r\n\t\t\tassert(this.prevHighSurrogate === -1);\r\n\r\n\t\t\tlet input: Uint16Array = utf16In;\r\n\t\t\tlet inOffset: number = 0;\r\n\t\t\tlet inLimit: number = utf16In.length;\r\n\r\n\t\t\tlet outChar = this.buffer;\r\n\t\t\tlet outOffset: number = this.position;\r\n\r\n\t\t\twhile (inOffset < inLimit) {\r\n\t\t\t\tlet c: number = input[inOffset];\r\n\t\t\t\tif (!Character.isHighSurrogate(c)) {\r\n\t\t\t\t\toutChar[outOffset] = c;\r\n\t\t\t\t} else {\r\n\t\t\t\t\tutf16In = utf16In.subarray(inOffset, inLimit);\r\n\t\t\t\t\tthis.position = outOffset;\r\n\t\t\t\t\tthis.charToIntBuffer(utf16In.length);\r\n\t\t\t\t\tthis.appendArrayInt(utf16In);\r\n\t\t\t\t\treturn;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tinOffset++;\r\n\t\t\t\toutOffset++;\r\n\t\t\t}\r\n\r\n\t\t\tthis.position = outOffset;\r\n\t\t}\r\n\r\n\t\tprivate appendArrayInt(utf16In: Uint16Array): void {\r\n\t\t\tlet input: Uint16Array = utf16In;\r\n\t\t\tlet inOffset: number = 0;\r\n\t\t\tlet inLimit: number = utf16In.length;\r\n\r\n\t\t\tlet outInt = this.buffer;\r\n\t\t\tlet outOffset = this.position;\r\n\r\n\t\t\twhile (inOffset < inLimit) {\r\n\t\t\t\tlet c: number = input[inOffset];\r\n\t\t\t\tinOffset++;\r\n\t\t\t\tif (this.prevHighSurrogate !== -1) {\r\n\t\t\t\t\tif (Character.isLowSurrogate(c)) {\r\n\t\t\t\t\t\toutInt[outOffset] = String.fromCharCode(this.prevHighSurrogate, c).codePointAt(0)!;\r\n\t\t\t\t\t\toutOffset++;\r\n\t\t\t\t\t\tthis.prevHighSurrogate = -1;\r\n\t\t\t\t\t} else {\r\n\t\t\t\t\t\t// Dangling high surrogate\r\n\t\t\t\t\t\toutInt[outOffset] = this.prevHighSurrogate;\r\n\t\t\t\t\t\toutOffset++;\r\n\t\t\t\t\t\tif (Character.isHighSurrogate(c)) {\r\n\t\t\t\t\t\t\tthis.prevHighSurrogate = c;\r\n\t\t\t\t\t\t} else {\r\n\t\t\t\t\t\t\toutInt[outOffset] = c;\r\n\t\t\t\t\t\t\toutOffset++;\r\n\t\t\t\t\t\t\tthis.prevHighSurrogate = -1;\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t} else if (Character.isHighSurrogate(c)) {\r\n\t\t\t\t\tthis.prevHighSurrogate = c;\r\n\t\t\t\t} else {\r\n\t\t\t\t\toutInt[outOffset] = c;\r\n\t\t\t\t\toutOffset++;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (this.prevHighSurrogate !== -1) {\r\n\t\t\t\t// Dangling high surrogate\r\n\t\t\t\toutInt[outOffset] = this.prevHighSurrogate;\r\n\t\t\t\toutOffset++;\r\n\t\t\t}\r\n\r\n\t\t\tthis.position = outOffset;\r\n\t\t}\r\n\r\n\t\tprivate byteToCharBuffer(toAppend: number): void {\r\n\t\t\t// CharBuffers hold twice as much per unit as ByteBuffers, so start with half the capacity.\r\n\t\t\tlet newBuffer: Uint16Array = new Uint16Array(Math.max(this.position + toAppend, this.buffer.length >> 1));\r\n\t\t\tnewBuffer.set(this.buffer.subarray(0, this.position), 0);\r\n\r\n\t\t\tthis.type = Type.CHAR;\r\n\t\t\tthis.buffer = newBuffer;\r\n\t\t}\r\n\r\n\t\tprivate byteToIntBuffer(toAppend: number): void {\r\n\t\t\t// IntBuffers hold four times as much per unit as ByteBuffers, so start with one quarter the capacity.\r\n\t\t\tlet newBuffer: Int32Array = new Int32Array(Math.max(this.position + toAppend, this.buffer.length >> 2));\r\n\t\t\tnewBuffer.set(this.buffer.subarray(0, this.position), 0);\r\n\r\n\t\t\tthis.type = Type.INT;\r\n\t\t\tthis.buffer = newBuffer;\r\n\t\t}\r\n\r\n\t\tprivate charToIntBuffer(toAppend: number): void {\r\n\t\t\t// IntBuffers hold two times as much per unit as ByteBuffers, so start with one half the capacity.\r\n\t\t\tlet newBuffer: Int32Array = new Int32Array(Math.max(this.position + toAppend, this.buffer.length >> 1));\r\n\t\t\tnewBuffer.set(this.buffer.subarray(0, this.position), 0);\r\n\r\n\t\t\tthis.type = Type.INT;\r\n\t\t\tthis.buffer = newBuffer;\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nimport * as assert from \"assert\";\r\nimport { CharStream } from \"./CharStream\";\r\nimport { CodePointBuffer } from \"./CodePointBuffer\";\r\nimport { IntStream } from \"./IntStream\";\r\nimport { Interval } from \"./misc/Interval\";\r\nimport { Override } from \"./Decorators\";\r\n\r\n/**\r\n * Alternative to {@link ANTLRInputStream} which treats the input\r\n * as a series of Unicode code points, instead of a series of UTF-16\r\n * code units.\r\n *\r\n * Use this if you need to parse input which potentially contains\r\n * Unicode values > U+FFFF.\r\n */\r\nexport class CodePointCharStream implements CharStream {\r\n\tprivate readonly _array: Uint8Array | Uint16Array | Int32Array;\r\n\tprivate readonly _size: number;\r\n\tprivate readonly _name: string;\r\n\r\n\tprivate _position: number;\r\n\r\n\t// Use the factory method {@link #fromBuffer(CodePointBuffer)} to\r\n\t// construct instances of this type.\r\n\tprotected constructor(array: Uint8Array | Uint16Array | Int32Array, position: number, remaining: number, name: string) {\r\n\t\t// TODO\r\n\t\tassert(position === 0);\r\n\t\tthis._array = array;\r\n\t\tthis._size = remaining;\r\n\t\tthis._name = name;\r\n\t\tthis._position = 0;\r\n\t}\r\n\r\n\tpublic get internalStorage(): Uint8Array | Uint16Array | Int32Array {\r\n\t\treturn this._array;\r\n\t}\r\n\r\n\t/**\r\n\t * Constructs a {@link CodePointCharStream} which provides access\r\n\t * to the Unicode code points stored in {@code codePointBuffer}.\r\n\t */\r\n\tpublic static fromBuffer(codePointBuffer: CodePointBuffer): CodePointCharStream;\r\n\r\n\t/**\r\n\t * Constructs a named {@link CodePointCharStream} which provides access\r\n\t * to the Unicode code points stored in {@code codePointBuffer}.\r\n\t */\r\n\tpublic static fromBuffer(codePointBuffer: CodePointBuffer, name: string): CodePointCharStream;\r\n\tpublic static fromBuffer(codePointBuffer: CodePointBuffer, name?: string): CodePointCharStream {\r\n\t\tif (name === undefined || name.length === 0) {\r\n\t\t\tname = IntStream.UNKNOWN_SOURCE_NAME;\r\n\t\t}\r\n\r\n\t\t// Java lacks generics on primitive types.\r\n\t\t//\r\n\t\t// To avoid lots of calls to virtual methods in the\r\n\t\t// very hot codepath of LA() below, we construct one\r\n\t\t// of three concrete subclasses.\r\n\t\t//\r\n\t\t// The concrete subclasses directly access the code\r\n\t\t// points stored in the underlying array (byte[],\r\n\t\t// char[], or int[]), so we can avoid lots of virtual\r\n\t\t// method calls to ByteBuffer.get(offset).\r\n\t\treturn new CodePointCharStream(\r\n\t\t\tcodePointBuffer.array(),\r\n\t\t\tcodePointBuffer.position,\r\n\t\t\tcodePointBuffer.remaining,\r\n\t\t\tname);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic consume(): void {\r\n\t\tif (this._size - this._position === 0) {\r\n\t\t\tassert(this.LA(1) === IntStream.EOF);\r\n\t\t\tthrow new RangeError(\"cannot consume EOF\");\r\n\t\t}\r\n\r\n\t\tthis._position++;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic get index(): number {\r\n\t\treturn this._position;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic get size(): number {\r\n\t\treturn this._size;\r\n\t}\r\n\r\n\t/** mark/release do nothing; we have entire buffer */\r\n\t@Override\r\n\tpublic mark(): number {\r\n\t\treturn -1;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic release(marker: number): void {\r\n\t\t// No default implementation since this stream buffers the entire input\r\n\t}\r\n\r\n\t@Override\r\n\tpublic seek(index: number): void {\r\n\t\tthis._position = index;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic get sourceName(): string {\r\n\t\treturn this._name;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn this.getText(Interval.of(0, this.size - 1));\r\n\t}\r\n\r\n\t@Override\r\n\tpublic LA(i: number): number {\r\n\t\tlet offset: number;\r\n\t\tswitch (Math.sign(i)) {\r\n\t\t\tcase -1:\r\n\t\t\t\toffset = this.index + i;\r\n\t\t\t\tif (offset < 0) {\r\n\t\t\t\t\treturn IntStream.EOF;\r\n\t\t\t\t}\r\n\r\n\t\t\t\treturn this._array[offset];\r\n\r\n\t\t\tcase 0:\r\n\t\t\t\t// Undefined\r\n\t\t\t\treturn 0;\r\n\r\n\t\t\tcase 1:\r\n\t\t\t\toffset = this.index + i - 1;\r\n\t\t\t\tif (offset >= this.size) {\r\n\t\t\t\t\treturn IntStream.EOF;\r\n\t\t\t\t}\r\n\r\n\t\t\t\treturn this._array[offset];\r\n\t\t}\r\n\r\n\t\tthrow new RangeError(\"Not reached\");\r\n\t}\r\n\r\n\t/** Return the UTF-16 encoded string for the given interval */\r\n\t@Override\r\n\tpublic getText(interval: Interval): string {\r\n\t\tconst startIdx: number = Math.min(interval.a, this.size);\r\n\t\tconst len: number = Math.min(interval.b - interval.a + 1, this.size - startIdx);\r\n\r\n\t\tif (this._array instanceof Int32Array) {\r\n\t\t\treturn String.fromCodePoint(...Array.from(this._array.subarray(startIdx, startIdx + len)));\r\n\t\t} else {\r\n\t\t\treturn String.fromCharCode(...Array.from(this._array.subarray(startIdx, startIdx + len)));\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nimport { CodePointBuffer } from \"./CodePointBuffer\";\r\nimport { CodePointCharStream } from \"./CodePointCharStream\";\r\nimport { IntStream } from \"./IntStream\";\r\n\r\n// const DEFAULT_BUFFER_SIZE: number = 4096;\r\n\r\n/** This class represents the primary interface for creating {@link CharStream}s\r\n * from a variety of sources as of 4.7. The motivation was to support\r\n * Unicode code points > U+FFFF. {@link ANTLRInputStream} and\r\n * {@link ANTLRFileStream} are now deprecated in favor of the streams created\r\n * by this interface.\r\n *\r\n * DEPRECATED: {@code new ANTLRFileStream(\"myinputfile\")}\r\n * NEW: {@code CharStreams.fromFileName(\"myinputfile\")}\r\n *\r\n * WARNING: If you use both the deprecated and the new streams, you will see\r\n * a nontrivial performance degradation. This speed hit is because the\r\n * {@link Lexer}'s internal code goes from a monomorphic to megamorphic\r\n * dynamic dispatch to get characters from the input stream. Java's\r\n * on-the-fly compiler (JIT) is unable to perform the same optimizations\r\n * so stick with either the old or the new streams, if performance is\r\n * a primary concern. See the extreme debugging and spelunking\r\n * needed to identify this issue in our timing rig:\r\n *\r\n * https://github.com/antlr/antlr4/pull/1781\r\n *\r\n * The ANTLR character streams still buffer all the input when you create\r\n * the stream, as they have done for ~20 years. If you need unbuffered\r\n * access, please note that it becomes challenging to create\r\n * parse trees. The parse tree has to point to tokens which will either\r\n * point into a stale location in an unbuffered stream or you have to copy\r\n * the characters out of the buffer into the token. That defeats the purpose\r\n * of unbuffered input. Per the ANTLR book, unbuffered streams are primarily\r\n * useful for processing infinite streams *during the parse.*\r\n *\r\n * The new streams also use 8-bit buffers when possible so this new\r\n * interface supports character streams that use half as much memory\r\n * as the old {@link ANTLRFileStream}, which assumed 16-bit characters.\r\n *\r\n * A big shout out to Ben Hamilton (github bhamiltoncx) for his superhuman\r\n * efforts across all targets to get true Unicode 3.1 support for U+10FFFF.\r\n *\r\n * @since 4.7\r\n */\r\nexport namespace CharStreams {\r\n\t// /**\r\n\t// * Creates a {@link CharStream} given a path to a UTF-8\r\n\t// * encoded file on disk.\r\n\t// *\r\n\t// * Reads the entire contents of the file into the result before returning.\r\n\t// */\r\n\t// export function fromFile(file: File): CharStream;\r\n\t// export function fromFile(file: File, charset: Charset): CharStream;\r\n\t// export function fromFile(file: File, charset?: Charset): CharStream {\r\n\t// \tif (charset === undefined) {\r\n\t// \t\tcharset = Charset.forName(\"UTF-8\");\r\n\t// \t}\r\n\r\n\t// \tlet size: number = file.length();\r\n\t// \treturn fromStream(new FileInputStream(file), charset, file.toString(), size);\r\n\t// }\r\n\r\n\t// /**\r\n\t// * Creates a {@link CharStream} given a string containing a\r\n\t// * path to a UTF-8 file on disk.\r\n\t// *\r\n\t// * Reads the entire contents of the file into the result before returning.\r\n\t// */\r\n\t// export function fromFileName(fileName: string): CharStream;\r\n\r\n\t// /**\r\n\t// * Creates a {@link CharStream} given a string containing a\r\n\t// * path to a file on disk and the charset of the bytes\r\n\t// * contained in the file.\r\n\t// *\r\n\t// * Reads the entire contents of the file into the result before returning.\r\n\t// */\r\n\t// export function fromFileName(fileName: string, charset: Charset): CharStream;\r\n\t// export function fromFileName(fileName: string, charset?: Charset): CharStream {\r\n\t// \tif (charset === undefined) {\r\n\t// \t\tcharset = Charset.forName(\"UTF-8\");\r\n\t// \t}\r\n\r\n\t// \treturn fromFile(new File(fileName), charset);\r\n\t// }\r\n\r\n\t// /**\r\n\t// * Creates a {@link CharStream} given an opened {@link InputStream}\r\n\t// * containing UTF-8 bytes.\r\n\t// *\r\n\t// * Reads the entire contents of the {@code InputStream} into\r\n\t// * the result before returning, then closes the {@code InputStream}.\r\n\t// */\r\n\t// export function fromStream(is: InputStream): CharStream;\r\n\r\n\t// /**\r\n\t// * Creates a {@link CharStream} given an opened {@link InputStream} and the\r\n\t// * charset of the bytes contained in the stream.\r\n\t// *\r\n\t// * Reads the entire contents of the {@code InputStream} into\r\n\t// * the result before returning, then closes the {@code InputStream}.\r\n\t// */\r\n\t// export function fromStream(is: InputStream, charset: Charset): CharStream;\r\n\r\n\t// export function fromStream(is: InputStream, charset: Charset, sourceName: string, inputSize: number): CharStream;\r\n\t// export function fromStream(is: InputStream, charset?: Charset, sourceName?: string, inputSize?: number): CharStream {\r\n\t// \tif (charset === undefined) {\r\n\t// \t\tcharset = Charset.forName(\"UTF-8\");\r\n\t// \t}\r\n\r\n\t// \tif (sourceName === undefined) {\r\n\t// \t\tsourceName = IntStream.UNKNOWN_SOURCE_NAME;\r\n\t// \t}\r\n\r\n\t// \tif (inputSize === undefined) {\r\n\t// \t\tinputSize = -1;\r\n\t// \t}\r\n\r\n\t// \treturn fromChannel(\r\n\t// \t\tChannels.newChannel(is),\r\n\t// \t\tcharset,\r\n\t// \t\tDEFAULT_BUFFER_SIZE,\r\n\t// \t\tCodingErrorAction.REPLACE,\r\n\t// \t\tsourceName,\r\n\t// \t\tinputSize);\r\n\t// }\r\n\r\n\t// /**\r\n\t// * Creates a {@link CharStream} given an opened {@link ReadableByteChannel}\r\n\t// * containing UTF-8 bytes.\r\n\t// *\r\n\t// * Reads the entire contents of the {@code channel} into\r\n\t// * the result before returning, then closes the {@code channel}.\r\n\t// */\r\n\t// export function fromChannel(channel: ReadableByteChannel): CharStream;\r\n\r\n\t// /**\r\n\t// * Creates a {@link CharStream} given an opened {@link ReadableByteChannel} and the\r\n\t// * charset of the bytes contained in the channel.\r\n\t// *\r\n\t// * Reads the entire contents of the {@code channel} into\r\n\t// * the result before returning, then closes the {@code channel}.\r\n\t// */\r\n\t// export function fromChannel(channel: ReadableByteChannel, charset: Charset): CharStream;\r\n\r\n\t// /**\r\n\t// * Creates a {@link CharStream} given an opened {@link ReadableByteChannel}\r\n\t// * containing UTF-8 bytes.\r\n\t// *\r\n\t// * Reads the entire contents of the {@code channel} into\r\n\t// * the result before returning, then closes the {@code channel}.\r\n\t// */\r\n\t// export function fromChannel(\r\n\t// \tchannel: ReadableByteChannel,\r\n\t// \tcharset: Charset,\r\n\t// \tbufferSize: number,\r\n\t// \tdecodingErrorAction: CodingErrorAction,\r\n\t// \tsourceName: string): CodePointCharStream;\r\n\r\n\t// export function fromChannel(\r\n\t// \tchannel: ReadableByteChannel,\r\n\t// \tcharset: Charset,\r\n\t// \tbufferSize: number,\r\n\t// \tdecodingErrorAction: CodingErrorAction,\r\n\t// \tsourceName: string,\r\n\t// \tinputSize: number): CodePointCharStream;\r\n\t// export function fromChannel(\r\n\t// \tchannel: ReadableByteChannel,\r\n\t// \tcharset?: Charset,\r\n\t// \tbufferSize?: number,\r\n\t// \tdecodingErrorAction?: CodingErrorAction,\r\n\t// \tsourceName?: string,\r\n\t// \tinputSize?: number): CodePointCharStream\r\n\t// {\r\n\t// \tif (charset === undefined) {\r\n\t// \t\tcharset = Charset.forName(\"UTF-8\");\r\n\t// \t}\r\n\r\n\t// \tif (bufferSize === undefined) {\r\n\t// \t\tbufferSize = DEFAULT_BUFFER_SIZE;\r\n\t// \t}\r\n\r\n\t// \tif (decodingErrorAction === undefined) {\r\n\t// \t\tdecodingErrorAction = CodingErrorAction.REPLACE;\r\n\t// \t}\r\n\r\n\t// \tif (sourceName === undefined || sourceName.length === 0) {\r\n\t// \t\tsourceName = IntStream.UNKNOWN_SOURCE_NAME;\r\n\t// \t}\r\n\r\n\t// \tif (inputSize === undefined) {\r\n\t// \t\tinputSize = -1;\r\n\t// \t}\r\n\r\n\t// \tlet codePointBuffer: CodePointBuffer = bufferFromChannel(channel, charset, bufferSize, decodingErrorAction, inputSize);\r\n\t// \treturn CodePointCharStream.fromBuffer(codePointBuffer, sourceName);\r\n\t// }\r\n\r\n\t// /**\r\n\t// * Creates a {@link CharStream} given a {@link Reader}. Closes\r\n\t// * the reader before returning.\r\n\t// */\r\n\t// export function fromReader(r: Reader): CodePointCharStream;\r\n\r\n\t// /**\r\n\t// * Creates a {@link CharStream} given a {@link Reader} and its\r\n\t// * source name. Closes the reader before returning.\r\n\t// */\r\n\t// export function fromReader(r: Reader, sourceName: string): CodePointCharStream;\r\n\t// export function fromReader(r: Reader, sourceName?: string): CodePointCharStream {\r\n\t// \tif (sourceName === undefined) {\r\n\t// \t\tsourceName = IntStream.UNKNOWN_SOURCE_NAME;\r\n\t// \t}\r\n\r\n\t// \ttry {\r\n\t// \t\tlet codePointBufferBuilder: CodePointBuffer.Builder = CodePointBuffer.builder(DEFAULT_BUFFER_SIZE);\r\n\t// \t\tlet charBuffer: CharBuffer = CharBuffer.allocate(DEFAULT_BUFFER_SIZE);\r\n\t// \t\twhile ((r.read(charBuffer)) !== -1) {\r\n\t// \t\t\tcharBuffer.flip();\r\n\t// \t\t\tcodePointBufferBuilder.append(charBuffer);\r\n\t// \t\t\tcharBuffer.compact();\r\n\t// \t\t}\r\n\r\n\t// \t\treturn CodePointCharStream.fromBuffer(codePointBufferBuilder.build(), sourceName);\r\n\t// \t} finally {\r\n\t// \t\tr.close();\r\n\t// \t}\r\n\t// }\r\n\r\n\t/**\r\n\t * Creates a {@link CharStream} given a {@link String}.\r\n\t */\r\n\texport function fromString(s: string): CodePointCharStream;\r\n\r\n\t/**\r\n\t * Creates a {@link CharStream} given a {@link String} and the {@code sourceName}\r\n\t * from which it came.\r\n\t */\r\n\texport function fromString(s: string, sourceName: string): CodePointCharStream;\r\n\texport function fromString(s: string, sourceName?: string): CodePointCharStream {\r\n\t\tif (sourceName === undefined || sourceName.length === 0) {\r\n\t\t\tsourceName = IntStream.UNKNOWN_SOURCE_NAME;\r\n\t\t}\r\n\r\n\t\t// Initial guess assumes no code points > U+FFFF: one code\r\n\t\t// point for each code unit in the string\r\n\t\tlet codePointBufferBuilder: CodePointBuffer.Builder = CodePointBuffer.builder(s.length);\r\n\r\n\t\t// TODO: CharBuffer.wrap(String) rightfully returns a read-only buffer\r\n\t\t// which doesn't expose its array, so we make a copy.\r\n\t\tlet cb: Uint16Array = new Uint16Array(s.length);\r\n\t\tfor (let i = 0; i < s.length; i++) {\r\n\t\t\tcb[i] = s.charCodeAt(i);\r\n\t\t}\r\n\r\n\t\tcodePointBufferBuilder.append(cb);\r\n\t\treturn CodePointCharStream.fromBuffer(codePointBufferBuilder.build(), sourceName);\r\n\t}\r\n\r\n\t// export function bufferFromChannel(\r\n\t// \tchannel: ReadableByteChannel,\r\n\t// \tcharset: Charset,\r\n\t// \tbufferSize: number,\r\n\t// \tdecodingErrorAction: CodingErrorAction,\r\n\t// \tinputSize: number): CodePointBuffer {\r\n\t// \ttry {\r\n\t// \t\tlet utf8BytesIn: Uint8Array = new Uint8Array(bufferSize);\r\n\t// \t\tlet utf16CodeUnitsOut: Uint16Array = new Uint16Array(bufferSize);\r\n\t// \t\tif (inputSize === -1) {\r\n\t// \t\t\tinputSize = bufferSize;\r\n\t// \t\t} else if (inputSize > Integer.MAX_VALUE) {\r\n\t// \t\t\t// ByteBuffer et al don't support long sizes\r\n\t// \t\t\tthrow new RangeError(`inputSize ${inputSize} larger than max ${Integer.MAX_VALUE}`);\r\n\t// \t\t}\r\n\r\n\t// \t\tlet codePointBufferBuilder: CodePointBuffer.Builder = CodePointBuffer.builder(inputSize);\r\n\t// \t\tlet decoder: CharsetDecoder = charset\r\n\t// \t\t\t\t.newDecoder()\r\n\t// \t\t\t\t.onMalformedInput(decodingErrorAction)\r\n\t// \t\t\t\t.onUnmappableCharacter(decodingErrorAction);\r\n\r\n\t// \t\tlet endOfInput: boolean = false;\r\n\t// \t\twhile (!endOfInput) {\r\n\t// \t\t\tlet bytesRead: number = channel.read(utf8BytesIn);\r\n\t// \t\t\tendOfInput = (bytesRead === -1);\r\n\t// \t\t\tutf8BytesIn.flip();\r\n\t// \t\t\tlet result: CoderResult = decoder.decode(\r\n\t// \t\t\t\tutf8BytesIn,\r\n\t// \t\t\t\tutf16CodeUnitsOut,\r\n\t// \t\t\t\tendOfInput);\r\n\t// \t\t\tif (result.isError() && decodingErrorAction === CodingErrorAction.REPORT) {\r\n\t// \t\t\t\tresult.throwException();\r\n\t// \t\t\t}\r\n\r\n\t// \t\t\tutf16CodeUnitsOut.flip();\r\n\t// \t\t\tcodePointBufferBuilder.append(utf16CodeUnitsOut);\r\n\t// \t\t\tutf8BytesIn.compact();\r\n\t// \t\t\tutf16CodeUnitsOut.compact();\r\n\t// \t\t}\r\n\t// \t\t// Handle any bytes at the end of the file which need to\r\n\t// \t\t// be represented as errors or substitution characters.\r\n\t// \t\tlet flushResult: CoderResult = decoder.flush(utf16CodeUnitsOut);\r\n\t// \t\tif (flushResult.isError() && decodingErrorAction === CodingErrorAction.REPORT) {\r\n\t// \t\t\tflushResult.throwException();\r\n\t// \t\t}\r\n\r\n\t// \t\tutf16CodeUnitsOut.flip();\r\n\t// \t\tcodePointBufferBuilder.append(utf16CodeUnitsOut);\r\n\r\n\t// \t\treturn codePointBufferBuilder.build();\r\n\t// \t}\r\n\t// \tfinally {\r\n\t// \t\tchannel.close();\r\n\t// \t}\r\n\t// }\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:49.6074365-07:00\r\n\r\nimport * as assert from \"assert\";\r\nimport { CommonToken } from \"./CommonToken\";\r\nimport { Interval } from \"./misc/Interval\";\r\nimport { Lexer } from \"./Lexer\";\r\nimport { NotNull, Override } from \"./Decorators\";\r\nimport { RuleContext } from \"./RuleContext\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenSource } from \"./TokenSource\";\r\nimport { TokenStream } from \"./TokenStream\";\r\nimport { WritableToken } from \"./WritableToken\";\r\n\r\n/**\r\n * This implementation of {@link TokenStream} loads tokens from a\r\n * {@link TokenSource} on-demand, and places the tokens in a buffer to provide\r\n * access to any previous token by index.\r\n *\r\n * This token stream ignores the value of {@link Token#getChannel}. If your\r\n * parser requires the token stream filter tokens to only those on a particular\r\n * channel, such as {@link Token#DEFAULT_CHANNEL} or\r\n * {@link Token#HIDDEN_CHANNEL}, use a filtering token stream such a\r\n * {@link CommonTokenStream}.\r\n */\r\nexport class BufferedTokenStream implements TokenStream {\r\n\t/**\r\n\t * The {@link TokenSource} from which tokens for this stream are fetched.\r\n\t */\r\n\t@NotNull\r\n\tprivate _tokenSource: TokenSource;\r\n\r\n\t/**\r\n\t * A collection of all tokens fetched from the token source. The list is\r\n\t * considered a complete view of the input once {@link #fetchedEOF} is set\r\n\t * to `true`.\r\n\t */\r\n\tprotected tokens: Token[] = [];\r\n\r\n\t/**\r\n\t * The index into {@link #tokens} of the current token (next token to\r\n\t * {@link #consume}). {@link #tokens}`[`{@link #p}`]` should be\r\n\t * {@link #LT LT(1)}.\r\n\t *\r\n\t * This field is set to -1 when the stream is first constructed or when\r\n\t * {@link #setTokenSource} is called, indicating that the first token has\r\n\t * not yet been fetched from the token source. For additional information,\r\n\t * see the documentation of {@link IntStream} for a description of\r\n\t * Initializing Methods.\r\n\t */\r\n\tprotected p: number = -1;\r\n\r\n\t/**\r\n\t * Indicates whether the {@link Token#EOF} token has been fetched from\r\n\t * {@link #tokenSource} and added to {@link #tokens}. This field improves\r\n\t * performance for the following cases:\r\n\t *\r\n\t * * {@link #consume}: The lookahead check in {@link #consume} to prevent\r\n\t * consuming the EOF symbol is optimized by checking the values of\r\n\t * {@link #fetchedEOF} and {@link #p} instead of calling {@link #LA}.\r\n\t * * {@link #fetch}: The check to prevent adding multiple EOF symbols into\r\n\t * {@link #tokens} is trivial with this field.\r\n\t */\r\n\tprotected fetchedEOF: boolean = false;\r\n\r\n\tconstructor(@NotNull tokenSource: TokenSource) {\r\n\t\tif (tokenSource == null) {\r\n\t\t\tthrow new Error(\"tokenSource cannot be null\");\r\n\t\t}\r\n\r\n\t\tthis._tokenSource = tokenSource;\r\n\t}\r\n\r\n\t@Override\r\n\tget tokenSource(): TokenSource {\r\n\t\treturn this._tokenSource;\r\n\t}\r\n\r\n\t/** Reset this token stream by setting its token source. */\r\n\tset tokenSource(tokenSource: TokenSource) {\r\n\t\tthis._tokenSource = tokenSource;\r\n\t\tthis.tokens.length = 0;\r\n\t\tthis.p = -1;\r\n\t\tthis.fetchedEOF = false;\r\n\t}\r\n\r\n\t@Override\r\n\tget index(): number {\r\n\t\treturn this.p;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic mark(): number {\r\n\t\treturn 0;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic release(marker: number): void {\r\n\t\t// no resources to release\r\n\t}\r\n\r\n\t@Override\r\n\tpublic seek(index: number): void {\r\n\t\tthis.lazyInit();\r\n\t\tthis.p = this.adjustSeekIndex(index);\r\n\t}\r\n\r\n\t@Override\r\n\tget size(): number {\r\n\t\treturn this.tokens.length;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic consume(): void {\r\n\t\tlet skipEofCheck: boolean;\r\n\t\tif (this.p >= 0) {\r\n\t\t\tif (this.fetchedEOF) {\r\n\t\t\t\t// the last token in tokens is EOF. skip check if p indexes any\r\n\t\t\t\t// fetched token except the last.\r\n\t\t\t\tskipEofCheck = this.p < this.tokens.length - 1;\r\n\t\t\t} else {\r\n\t\t\t\t// no EOF token in tokens. skip check if p indexes a fetched token.\r\n\t\t\t\tskipEofCheck = this.p < this.tokens.length;\r\n\t\t\t}\r\n\t\t} else {\r\n\t\t\t// not yet initialized\r\n\t\t\tskipEofCheck = false;\r\n\t\t}\r\n\r\n\t\tif (!skipEofCheck && this.LA(1) === Token.EOF) {\r\n\t\t\tthrow new Error(\"cannot consume EOF\");\r\n\t\t}\r\n\r\n\t\tif (this.sync(this.p + 1)) {\r\n\t\t\tthis.p = this.adjustSeekIndex(this.p + 1);\r\n\t\t}\r\n\t}\r\n\r\n\t/** Make sure index `i` in tokens has a token.\r\n\t *\r\n\t * @returns `true` if a token is located at index `i`, otherwise\r\n\t * `false`.\r\n\t * @see #get(int i)\r\n\t */\r\n\tprotected sync(i: number): boolean {\r\n\t\tassert(i >= 0);\r\n\t\tlet n: number = i - this.tokens.length + 1; // how many more elements we need?\r\n\t\t//System.out.println(\"sync(\"+i+\") needs \"+n);\r\n\t\tif (n > 0) {\r\n\t\t\tlet fetched: number = this.fetch(n);\r\n\t\t\treturn fetched >= n;\r\n\t\t}\r\n\r\n\t\treturn true;\r\n\t}\r\n\r\n\t/** Add `n` elements to buffer.\r\n\t *\r\n\t * @returns The actual number of elements added to the buffer.\r\n\t */\r\n\tprotected fetch(n: number): number {\r\n\t\tif (this.fetchedEOF) {\r\n\t\t\treturn 0;\r\n\t\t}\r\n\r\n\t\tfor (let i = 0; i < n; i++) {\r\n\t\t\tlet t: Token = this.tokenSource.nextToken();\r\n\t\t\tif (this.isWritableToken(t)) {\r\n\t\t\t\tt.tokenIndex = this.tokens.length;\r\n\t\t\t}\r\n\r\n\t\t\tthis.tokens.push(t);\r\n\t\t\tif (t.type === Token.EOF) {\r\n\t\t\t\tthis.fetchedEOF = true;\r\n\t\t\t\treturn i + 1;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn n;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic get(i: number): Token {\r\n\t\tif (i < 0 || i >= this.tokens.length) {\r\n\t\t\tthrow new RangeError(\"token index \" + i + \" out of range 0..\" + (this.tokens.length - 1));\r\n\t\t}\r\n\r\n\t\treturn this.tokens[i];\r\n\t}\r\n\r\n\t/** Get all tokens from start..stop inclusively. */\r\n\tpublic getRange(start: number, stop: number): Token[] {\r\n\t\tif (start < 0 || stop < 0) {\r\n\t\t\treturn [];\r\n\t\t}\r\n\r\n\t\tthis.lazyInit();\r\n\t\tlet subset: Token[] = new Array();\r\n\t\tif (stop >= this.tokens.length) {\r\n\t\t\tstop = this.tokens.length - 1;\r\n\t\t}\r\n\r\n\t\tfor (let i = start; i <= stop; i++) {\r\n\t\t\tlet t: Token = this.tokens[i];\r\n\t\t\tif (t.type === Token.EOF) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\tsubset.push(t);\r\n\t\t}\r\n\r\n\t\treturn subset;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic LA(i: number): number {\r\n\t\tlet token = this.LT(i);\r\n\t\tif (!token) {\r\n\t\t\treturn Token.INVALID_TYPE;\r\n\t\t}\r\n\r\n\t\treturn token.type;\r\n\t}\r\n\r\n\tprotected tryLB(k: number): Token | undefined {\r\n\t\tif ((this.p - k) < 0) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\treturn this.tokens[this.p - k];\r\n\t}\r\n\r\n\t@NotNull\r\n\t@Override\r\n\tpublic LT(k: number): Token {\r\n\t\tlet result = this.tryLT(k);\r\n\t\tif (result === undefined) {\r\n\t\t\tthrow new RangeError(\"requested lookback index out of range\");\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\tpublic tryLT(k: number): Token | undefined {\r\n\t\tthis.lazyInit();\r\n\t\tif (k === 0) {\r\n\t\t\tthrow new RangeError(\"0 is not a valid lookahead index\");\r\n\t\t}\r\n\r\n\t\tif (k < 0) {\r\n\t\t\treturn this.tryLB(-k);\r\n\t\t}\r\n\r\n\t\tlet i: number = this.p + k - 1;\r\n\t\tthis.sync(i);\r\n\t\tif (i >= this.tokens.length) {\r\n\t\t\t// return EOF token\r\n\t\t\t// EOF must be last token\r\n\t\t\treturn this.tokens[this.tokens.length - 1];\r\n\t\t}\r\n\r\n\t\t//\t\tif ( i>range ) range = i;\r\n\t\treturn this.tokens[i];\r\n\t}\r\n\r\n\t/**\r\n\t * Allowed derived classes to modify the behavior of operations which change\r\n\t * the current stream position by adjusting the target token index of a seek\r\n\t * operation. The default implementation simply returns `i`. If an\r\n\t * exception is thrown in this method, the current stream index should not be\r\n\t * changed.\r\n\t *\r\n\t * For example, {@link CommonTokenStream} overrides this method to ensure that\r\n\t * the seek target is always an on-channel token.\r\n\t *\r\n\t * @param i The target token index.\r\n\t * @returns The adjusted target token index.\r\n\t */\r\n\tprotected adjustSeekIndex(i: number): number {\r\n\t\treturn i;\r\n\t}\r\n\r\n\tprotected lazyInit(): void {\r\n\t\tif (this.p === -1) {\r\n\t\t\tthis.setup();\r\n\t\t}\r\n\t}\r\n\r\n\tprotected setup(): void {\r\n\t\tthis.sync(0);\r\n\t\tthis.p = this.adjustSeekIndex(0);\r\n\t}\r\n\r\n\tpublic getTokens(): Token[];\r\n\r\n\tpublic getTokens(start: number, stop: number): Token[];\r\n\r\n\tpublic getTokens(start: number, stop: number, types: Set): Token[];\r\n\r\n\tpublic getTokens(start: number, stop: number, ttype: number): Token[];\r\n\r\n\t/** Given a start and stop index, return a `List` of all tokens in\r\n\t * the token type `BitSet`. Return an empty array if no tokens were found. This\r\n\t * method looks at both on and off channel tokens.\r\n\t */\r\n\tpublic getTokens(start?: number, stop?: number, types?: Set | number): Token[] {\r\n\t\tthis.lazyInit();\r\n\r\n\t\tif (start === undefined) {\r\n\t\t\tassert(stop === undefined && types === undefined);\r\n\t\t\treturn this.tokens;\r\n\t\t} else if (stop === undefined) {\r\n\t\t\tstop = this.tokens.length - 1;\r\n\t\t}\r\n\r\n\t\tif (start < 0 || stop >= this.tokens.length || stop < 0 || start >= this.tokens.length) {\r\n\t\t\tthrow new RangeError(\"start \" + start + \" or stop \" + stop + \" not in 0..\" + (this.tokens.length - 1));\r\n\t\t}\r\n\r\n\t\tif (start > stop) {\r\n\t\t\treturn [];\r\n\t\t}\r\n\r\n\t\tif (types === undefined) {\r\n\t\t\treturn this.tokens.slice(start, stop + 1);\r\n\t\t} else if (typeof types === \"number\") {\r\n\t\t\ttypes = new Set().add(types);\r\n\t\t}\r\n\r\n\t\tlet typesSet = types;\r\n\r\n\t\t// list = tokens[start:stop]:{T t, t.type in types}\r\n\t\tlet filteredTokens: Token[] = this.tokens.slice(start, stop + 1);\r\n\t\tfilteredTokens = filteredTokens.filter((value) => typesSet.has(value.type));\r\n\r\n\t\treturn filteredTokens;\r\n\t}\r\n\r\n\t/**\r\n\t * Given a starting index, return the index of the next token on channel.\r\n\t * Return `i` if `tokens[i]` is on channel. Return the index of\r\n\t * the EOF token if there are no tokens on channel between `i` and\r\n\t * EOF.\r\n\t */\r\n\tprotected nextTokenOnChannel(i: number, channel: number): number {\r\n\t\tthis.sync(i);\r\n\t\tif (i >= this.size) {\r\n\t\t\treturn this.size - 1;\r\n\t\t}\r\n\r\n\t\tlet token: Token = this.tokens[i];\r\n\t\twhile (token.channel !== channel) {\r\n\t\t\tif (token.type === Token.EOF) {\r\n\t\t\t\treturn i;\r\n\t\t\t}\r\n\r\n\t\t\ti++;\r\n\t\t\tthis.sync(i);\r\n\t\t\ttoken = this.tokens[i];\r\n\t\t}\r\n\r\n\t\treturn i;\r\n\t}\r\n\r\n\t/**\r\n\t * Given a starting index, return the index of the previous token on\r\n\t * channel. Return `i` if `tokens[i]` is on channel. Return -1\r\n\t * if there are no tokens on channel between `i` and 0.\r\n\t *\r\n\t * If `i` specifies an index at or after the EOF token, the EOF token\r\n\t * index is returned. This is due to the fact that the EOF token is treated\r\n\t * as though it were on every channel.\r\n\t */\r\n\tprotected previousTokenOnChannel(i: number, channel: number): number {\r\n\t\tthis.sync(i);\r\n\t\tif (i >= this.size) {\r\n\t\t\t// the EOF token is on every channel\r\n\t\t\treturn this.size - 1;\r\n\t\t}\r\n\r\n\t\twhile (i >= 0) {\r\n\t\t\tlet token: Token = this.tokens[i];\r\n\t\t\tif (token.type === Token.EOF || token.channel === channel) {\r\n\t\t\t\treturn i;\r\n\t\t\t}\r\n\r\n\t\t\ti--;\r\n\t\t}\r\n\r\n\t\treturn i;\r\n\t}\r\n\r\n\t/** Collect all tokens on specified channel to the right of\r\n\t * the current token up until we see a token on {@link Lexer#DEFAULT_TOKEN_CHANNEL} or\r\n\t * EOF. If `channel` is `-1`, find any non default channel token.\r\n\t */\r\n\tpublic getHiddenTokensToRight(tokenIndex: number, channel: number = -1): Token[] {\r\n\t\tthis.lazyInit();\r\n\t\tif (tokenIndex < 0 || tokenIndex >= this.tokens.length) {\r\n\t\t\tthrow new RangeError(tokenIndex + \" not in 0..\" + (this.tokens.length - 1));\r\n\t\t}\r\n\r\n\t\tlet nextOnChannel: number = this.nextTokenOnChannel(tokenIndex + 1, Lexer.DEFAULT_TOKEN_CHANNEL);\r\n\t\tlet to: number;\r\n\t\tlet from: number = tokenIndex + 1;\r\n\t\t// if none onchannel to right, nextOnChannel=-1 so set to = last token\r\n\t\tif (nextOnChannel === -1) {\r\n\t\t\tto = this.size - 1;\r\n\t\t} else {\r\n\t\t\tto = nextOnChannel;\r\n\t\t}\r\n\r\n\t\treturn this.filterForChannel(from, to, channel);\r\n\t}\r\n\r\n\t/** Collect all tokens on specified channel to the left of\r\n\t * the current token up until we see a token on {@link Lexer#DEFAULT_TOKEN_CHANNEL}.\r\n\t * If `channel` is `-1`, find any non default channel token.\r\n\t */\r\n\tpublic getHiddenTokensToLeft(tokenIndex: number, channel: number = -1): Token[] {\r\n\t\tthis.lazyInit();\r\n\t\tif (tokenIndex < 0 || tokenIndex >= this.tokens.length) {\r\n\t\t\tthrow new RangeError(tokenIndex + \" not in 0..\" + (this.tokens.length - 1));\r\n\t\t}\r\n\r\n\t\tif (tokenIndex === 0) {\r\n\t\t\t// obviously no tokens can appear before the first token\r\n\t\t\treturn [];\r\n\t\t}\r\n\r\n\t\tlet prevOnChannel: number = this.previousTokenOnChannel(tokenIndex - 1, Lexer.DEFAULT_TOKEN_CHANNEL);\r\n\t\tif (prevOnChannel === tokenIndex - 1) {\r\n\t\t\treturn [];\r\n\t\t}\r\n\r\n\t\t// if none onchannel to left, prevOnChannel=-1 then from=0\r\n\t\tlet from: number = prevOnChannel + 1;\r\n\t\tlet to: number = tokenIndex - 1;\r\n\r\n\t\treturn this.filterForChannel(from, to, channel);\r\n\t}\r\n\r\n\tprotected filterForChannel(from: number, to: number, channel: number): Token[] {\r\n\t\tlet hidden: Token[] = new Array();\r\n\t\tfor (let i = from; i <= to; i++) {\r\n\t\t\tlet t: Token = this.tokens[i];\r\n\t\t\tif (channel === -1) {\r\n\t\t\t\tif (t.channel !== Lexer.DEFAULT_TOKEN_CHANNEL) {\r\n\t\t\t\t\thidden.push(t);\r\n\t\t\t\t}\r\n\t\t\t} else {\r\n\t\t\t\tif (t.channel === channel) {\r\n\t\t\t\t\thidden.push(t);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn hidden;\r\n\t}\r\n\r\n\t@Override\r\n\tget sourceName(): string {\r\n\t\treturn this.tokenSource.sourceName;\r\n\t}\r\n\r\n\t/** Get the text of all tokens in this buffer. */\r\n\tpublic getText(): string;\r\n\tpublic getText(interval: Interval): string;\r\n\tpublic getText(context: RuleContext): string;\r\n\t@NotNull\r\n\t@Override\r\n\tpublic getText(interval?: Interval | RuleContext): string {\r\n\t\tif (interval === undefined) {\r\n\t\t\tinterval = Interval.of(0, this.size - 1);\r\n\t\t} else if (!(interval instanceof Interval)) {\r\n\t\t\t// Note: the more obvious check for 'instanceof RuleContext' results in a circular dependency problem\r\n\t\t\tinterval = interval.sourceInterval;\r\n\t\t}\r\n\r\n\t\tlet start: number = interval.a;\r\n\t\tlet stop: number = interval.b;\r\n\t\tif (start < 0 || stop < 0) {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\r\n\t\tthis.fill();\r\n\t\tif (stop >= this.tokens.length) {\r\n\t\t\tstop = this.tokens.length - 1;\r\n\t\t}\r\n\r\n\t\tlet buf: string = \"\";\r\n\t\tfor (let i = start; i <= stop; i++) {\r\n\t\t\tlet t: Token = this.tokens[i];\r\n\t\t\tif (t.type === Token.EOF) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\tbuf += t.text;\r\n\t\t}\r\n\r\n\t\treturn buf.toString();\r\n\t}\r\n\r\n\t@NotNull\r\n\t@Override\r\n\tpublic getTextFromRange(start: any, stop: any): string {\r\n\t\tif (this.isToken(start) && this.isToken(stop)) {\r\n\t\t\treturn this.getText(Interval.of(start.tokenIndex, stop.tokenIndex));\r\n\t\t}\r\n\r\n\t\treturn \"\";\r\n\t}\r\n\r\n\t/** Get all tokens from lexer until EOF. */\r\n\tpublic fill(): void {\r\n\t\tthis.lazyInit();\r\n\t\tconst blockSize: number = 1000;\r\n\t\twhile (true) {\r\n\t\t\tlet fetched: number = this.fetch(blockSize);\r\n\t\t\tif (fetched < blockSize) {\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t// TODO: Figure out a way to make this more flexible?\r\n\tprivate isWritableToken(t: Token): t is WritableToken {\r\n\t\treturn t instanceof CommonToken;\r\n\t}\r\n\r\n\t// TODO: Figure out a way to make this more flexible?\r\n\tprivate isToken(t: any): t is Token {\r\n\t\treturn t instanceof CommonToken;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:50.3953157-07:00\r\n\r\nimport { BufferedTokenStream } from \"./BufferedTokenStream\";\r\nimport { NotNull, Override } from \"./Decorators\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenSource } from \"./TokenSource\";\r\n\r\n/**\r\n * This class extends {@link BufferedTokenStream} with functionality to filter\r\n * token streams to tokens on a particular channel (tokens where\r\n * {@link Token#getChannel} returns a particular value).\r\n *\r\n * This token stream provides access to all tokens by index or when calling\r\n * methods like {@link #getText}. The channel filtering is only used for code\r\n * accessing tokens via the lookahead methods {@link #LA}, {@link #LT}, and\r\n * {@link #LB}.\r\n *\r\n * By default, tokens are placed on the default channel\r\n * ({@link Token#DEFAULT_CHANNEL}), but may be reassigned by using the\r\n * `->channel(HIDDEN)` lexer command, or by using an embedded action to\r\n * call {@link Lexer#setChannel}.\r\n *\r\n * Note: lexer rules which use the `->skip` lexer command or call\r\n * {@link Lexer#skip} do not produce tokens at all, so input text matched by\r\n * such a rule will not be available as part of the token stream, regardless of\r\n * channel.\r\n */\r\nexport class CommonTokenStream extends BufferedTokenStream {\r\n\t/**\r\n\t * Specifies the channel to use for filtering tokens.\r\n\t *\r\n\t * The default value is {@link Token#DEFAULT_CHANNEL}, which matches the\r\n\t * default channel assigned to tokens created by the lexer.\r\n\t */\r\n\tprotected channel: number;\r\n\r\n\t/**\r\n\t * Constructs a new {@link CommonTokenStream} using the specified token\r\n\t * source and filtering tokens to the specified channel. Only tokens whose\r\n\t * {@link Token#getChannel} matches `channel` or have the\r\n\t * `Token.type` equal to {@link Token#EOF} will be returned by the\r\n\t * token stream lookahead methods.\r\n\t *\r\n\t * @param tokenSource The token source.\r\n\t * @param channel The channel to use for filtering tokens.\r\n\t */\r\n\tconstructor(@NotNull tokenSource: TokenSource, channel: number = Token.DEFAULT_CHANNEL) {\r\n\t\tsuper(tokenSource);\r\n\t\tthis.channel = channel;\r\n\t}\r\n\r\n\t@Override\r\n\tprotected adjustSeekIndex(i: number): number {\r\n\t\treturn this.nextTokenOnChannel(i, this.channel);\r\n\t}\r\n\r\n\t@Override\r\n\tprotected tryLB(k: number): Token | undefined {\r\n\t\tif ((this.p - k) < 0) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\tlet i: number = this.p;\r\n\t\tlet n: number = 1;\r\n\t\t// find k good tokens looking backwards\r\n\t\twhile (n <= k && i > 0) {\r\n\t\t\t// skip off-channel tokens\r\n\t\t\ti = this.previousTokenOnChannel(i - 1, this.channel);\r\n\t\t\tn++;\r\n\t\t}\r\n\r\n\t\tif (i < 0) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\treturn this.tokens[i];\r\n\t}\r\n\r\n\t@Override\r\n\tpublic tryLT(k: number): Token | undefined {\r\n\t\t//System.out.println(\"enter LT(\"+k+\")\");\r\n\t\tthis.lazyInit();\r\n\t\tif (k === 0) {\r\n\t\t\tthrow new RangeError(\"0 is not a valid lookahead index\");\r\n\t\t}\r\n\r\n\t\tif (k < 0) {\r\n\t\t\treturn this.tryLB(-k);\r\n\t\t}\r\n\r\n\t\tlet i: number = this.p;\r\n\t\tlet n: number = 1; // we know tokens[p] is a good one\r\n\t\t// find k good tokens\r\n\t\twhile (n < k) {\r\n\t\t\t// skip off-channel tokens, but make sure to not look past EOF\r\n\t\t\tif (this.sync(i + 1)) {\r\n\t\t\t\ti = this.nextTokenOnChannel(i + 1, this.channel);\r\n\t\t\t}\r\n\t\t\tn++;\r\n\t\t}\r\n\r\n\t\t//\t\tif ( i>range ) range = i;\r\n\t\treturn this.tokens[i];\r\n\t}\r\n\r\n\t/** Count EOF just once. */\r\n\tpublic getNumberOfOnChannelTokens(): number {\r\n\t\tlet n: number = 0;\r\n\t\tthis.fill();\r\n\t\tfor (let t of this.tokens) {\r\n\t\t\tif (t.channel === this.channel) {\r\n\t\t\t\tn++;\r\n\t\t\t}\r\n\r\n\t\t\tif (t.type === Token.EOF) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn n;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:52.1916955-07:00\r\n\r\nimport { CharStream } from \"./CharStream\";\r\nimport { CommonTokenFactory } from \"./CommonTokenFactory\";\r\nimport { NotNull, Override } from \"./Decorators\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenFactory } from \"./TokenFactory\";\r\nimport { TokenSource } from \"./TokenSource\";\r\n\r\n/**\r\n * Provides an implementation of {@link TokenSource} as a wrapper around a list\r\n * of {@link Token} objects.\r\n *\r\n * If the final token in the list is an {@link Token#EOF} token, it will be used\r\n * as the EOF token for every call to {@link #nextToken} after the end of the\r\n * list is reached. Otherwise, an EOF token will be created.\r\n */\r\nexport class ListTokenSource implements TokenSource {\r\n\t/**\r\n\t * The wrapped collection of {@link Token} objects to return.\r\n\t */\r\n\tprotected tokens: Token[];\r\n\r\n\t/**\r\n\t * The name of the input source. If this value is `undefined`, a call to\r\n\t * {@link #getSourceName} should return the source name used to create the\r\n\t * the next token in {@link #tokens} (or the previous token if the end of\r\n\t * the input has been reached).\r\n\t */\r\n\tprivate _sourceName?: string;\r\n\r\n\t/**\r\n\t * The index into {@link #tokens} of token to return by the next call to\r\n\t * {@link #nextToken}. The end of the input is indicated by this value\r\n\t * being greater than or equal to the number of items in {@link #tokens}.\r\n\t */\r\n\tprotected i: number = 0;\r\n\r\n\t/**\r\n\t * This field caches the EOF token for the token source.\r\n\t */\r\n\tprotected eofToken?: Token;\r\n\r\n\t/**\r\n\t * This is the backing field for {@link #getTokenFactory} and\r\n\t * {@link setTokenFactory}.\r\n\t */\r\n\tprivate _factory: TokenFactory = CommonTokenFactory.DEFAULT;\r\n\r\n\t/**\r\n\t * Constructs a new {@link ListTokenSource} instance from the specified\r\n\t * collection of {@link Token} objects and source name.\r\n\t *\r\n\t * @param tokens The collection of {@link Token} objects to provide as a\r\n\t * {@link TokenSource}.\r\n\t * @param sourceName The name of the {@link TokenSource}. If this value is\r\n\t * `undefined`, {@link #getSourceName} will attempt to infer the name from\r\n\t * the next {@link Token} (or the previous token if the end of the input has\r\n\t * been reached).\r\n\t *\r\n\t * @exception NullPointerException if `tokens` is `undefined`\r\n\t */\r\n\tconstructor(@NotNull tokens: Token[], sourceName?: string) {\r\n\t\tif (tokens == null) {\r\n\t\t\tthrow new Error(\"tokens cannot be null\");\r\n\t\t}\r\n\r\n\t\tthis.tokens = tokens;\r\n\t\tthis._sourceName = sourceName;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t */\r\n\t@Override\r\n\tget charPositionInLine(): number {\r\n\t\tif (this.i < this.tokens.length) {\r\n\t\t\treturn this.tokens[this.i].charPositionInLine;\r\n\t\t} else if (this.eofToken != null) {\r\n\t\t\treturn this.eofToken.charPositionInLine;\r\n\t\t} else if (this.tokens.length > 0) {\r\n\t\t\t// have to calculate the result from the line/column of the previous\r\n\t\t\t// token, along with the text of the token.\r\n\t\t\tlet lastToken: Token = this.tokens[this.tokens.length - 1];\r\n\t\t\tlet tokenText: string | undefined = lastToken.text;\r\n\t\t\tif (tokenText != null) {\r\n\t\t\t\tlet lastNewLine: number = tokenText.lastIndexOf(\"\\n\");\r\n\t\t\t\tif (lastNewLine >= 0) {\r\n\t\t\t\t\treturn tokenText.length - lastNewLine - 1;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\treturn lastToken.charPositionInLine + lastToken.stopIndex - lastToken.startIndex + 1;\r\n\t\t}\r\n\r\n\t\t// only reach this if tokens is empty, meaning EOF occurs at the first\r\n\t\t// position in the input\r\n\t\treturn 0;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t */\r\n\t@Override\r\n\tpublic nextToken(): Token {\r\n\t\tif (this.i >= this.tokens.length) {\r\n\t\t\tif (this.eofToken == null) {\r\n\t\t\t\tlet start: number = -1;\r\n\t\t\t\tif (this.tokens.length > 0) {\r\n\t\t\t\t\tlet previousStop: number = this.tokens[this.tokens.length - 1].stopIndex;\r\n\t\t\t\t\tif (previousStop !== -1) {\r\n\t\t\t\t\t\tstart = previousStop + 1;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet stop: number = Math.max(-1, start - 1);\r\n\t\t\t\tthis.eofToken = this._factory.create({ source: this, stream: this.inputStream }, Token.EOF, \"EOF\", Token.DEFAULT_CHANNEL, start, stop, this.line, this.charPositionInLine);\r\n\t\t\t}\r\n\r\n\t\t\treturn this.eofToken;\r\n\t\t}\r\n\r\n\t\tlet t: Token = this.tokens[this.i];\r\n\t\tif (this.i === this.tokens.length - 1 && t.type === Token.EOF) {\r\n\t\t\tthis.eofToken = t;\r\n\t\t}\r\n\r\n\t\tthis.i++;\r\n\t\treturn t;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t */\r\n\t@Override\r\n\tget line(): number {\r\n\t\tif (this.i < this.tokens.length) {\r\n\t\t\treturn this.tokens[this.i].line;\r\n\t\t} else if (this.eofToken != null) {\r\n\t\t\treturn this.eofToken.line;\r\n\t\t} else if (this.tokens.length > 0) {\r\n\t\t\t// have to calculate the result from the line/column of the previous\r\n\t\t\t// token, along with the text of the token.\r\n\t\t\tlet lastToken: Token = this.tokens[this.tokens.length - 1];\r\n\t\t\tlet line: number = lastToken.line;\r\n\r\n\t\t\tlet tokenText: string | undefined = lastToken.text;\r\n\t\t\tif (tokenText != null) {\r\n\t\t\t\tfor (let i = 0; i < tokenText.length; i++) {\r\n\t\t\t\t\tif (tokenText.charAt(i) === \"\\n\") {\r\n\t\t\t\t\t\tline++;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\t// if no text is available, assume the token did not contain any newline characters.\r\n\t\t\treturn line;\r\n\t\t}\r\n\r\n\t\t// only reach this if tokens is empty, meaning EOF occurs at the first\r\n\t\t// position in the input\r\n\t\treturn 1;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t */\r\n\t@Override\r\n\tget inputStream(): CharStream | undefined {\r\n\t\tif (this.i < this.tokens.length) {\r\n\t\t\treturn this.tokens[this.i].inputStream;\r\n\t\t} else if (this.eofToken != null) {\r\n\t\t\treturn this.eofToken.inputStream;\r\n\t\t} else if (this.tokens.length > 0) {\r\n\t\t\treturn this.tokens[this.tokens.length - 1].inputStream;\r\n\t\t}\r\n\r\n\t\t// no input stream information is available\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t */\r\n\t@Override\r\n\tget sourceName(): string {\r\n\t\tif (this._sourceName) {\r\n\t\t\treturn this._sourceName;\r\n\t\t}\r\n\r\n\t\tlet inputStream: CharStream | undefined = this.inputStream;\r\n\t\tif (inputStream != null) {\r\n\t\t\treturn inputStream.sourceName;\r\n\t\t}\r\n\r\n\t\treturn \"List\";\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t */\r\n\t// @Override\r\n\tset tokenFactory(@NotNull factory: TokenFactory) {\r\n\t\tthis._factory = factory;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t */\r\n\t@Override\r\n\t@NotNull\r\n\tget tokenFactory(): TokenFactory {\r\n\t\treturn this._factory;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:42.1346951-07:00\r\n\r\nexport class MultiMap extends Map {\r\n\tconstructor() {\r\n\t\tsuper();\r\n\t}\r\n\r\n\tpublic map(key: K, value: V): void {\r\n\t\tlet elementsForKey = super.get(key);\r\n\t\tif (!elementsForKey) {\r\n\t\t\telementsForKey = [] as V[];\r\n\t\t\tsuper.set(key, elementsForKey);\r\n\t\t}\r\n\t\telementsForKey.push(value);\r\n\t}\r\n\r\n\tpublic getPairs(): Array<[K, V]> {\r\n\t\tlet pairs: Array<[K, V]> = [];\r\n\t\tthis.forEach((values: V[], key: K) => {\r\n\t\t\tvalues.forEach((v) => {\r\n\t\t\t\tpairs.push([key, v]);\r\n\t\t\t});\r\n\t\t});\r\n\t\treturn pairs;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:42.5447085-07:00\r\n\r\n/**\r\n * This exception is thrown to cancel a parsing operation. This exception does\r\n * not extend {@link RecognitionException}, allowing it to bypass the standard\r\n * error recovery mechanisms. {@link BailErrorStrategy} throws this exception in\r\n * response to a parse error.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class ParseCancellationException extends Error {\r\n\t// private static serialVersionUID: number = -3529552099366979683L;\r\n\tpublic readonly stack?: string;\r\n\r\n\tconstructor(public cause: Error) {\r\n\t\tsuper(cause.message);\r\n\t\tthis.stack = cause.stack;\r\n\t}\r\n\r\n\tpublic getCause(): Error {\r\n\t\treturn this.cause;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:51.5898546-07:00\r\n\r\nimport { Override } from \"./Decorators\";\r\nimport { ParserRuleContext } from \"./ParserRuleContext\";\r\n\r\n/**\r\n * This class extends {@link ParserRuleContext} by allowing the value of\r\n * {@link #getRuleIndex} to be explicitly set for the context.\r\n *\r\n * {@link ParserRuleContext} does not include field storage for the rule index\r\n * since the context classes created by the code generator override the\r\n * {@link #getRuleIndex} method to return the correct value for that context.\r\n * Since the parser interpreter does not use the context classes generated for a\r\n * parser, this class (with slightly more memory overhead per node) is used to\r\n * provide equivalent functionality.\r\n */\r\nexport class InterpreterRuleContext extends ParserRuleContext {\r\n\t/**\r\n\t * This is the backing field for {@link #getRuleIndex}.\r\n\t */\r\n\tprivate _ruleIndex: number;\r\n\r\n\tconstructor(ruleIndex: number);\r\n\r\n\t/**\r\n\t * Constructs a new {@link InterpreterRuleContext} with the specified\r\n\t * parent, invoking state, and rule index.\r\n\t *\r\n\t * @param ruleIndex The rule index for the current context.\r\n\t * @param parent The parent context.\r\n\t * @param invokingStateNumber The invoking state number.\r\n\t */\r\n\tconstructor(ruleIndex: number, parent: ParserRuleContext | undefined, invokingStateNumber: number);\r\n\r\n\tconstructor(ruleIndex: number, parent?: ParserRuleContext, invokingStateNumber?: number) {\r\n\t\tif (invokingStateNumber !== undefined) {\r\n\t\t\tsuper(parent, invokingStateNumber);\r\n\t\t} else {\r\n\t\t\tsuper();\r\n\t\t}\r\n\r\n\t\tthis._ruleIndex = ruleIndex;\r\n\t}\r\n\r\n\t@Override\r\n\tget ruleIndex(): number {\r\n\t\treturn this._ruleIndex;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:53.1043451-07:00\r\n\r\nimport { ActionTransition } from \"./atn/ActionTransition\";\r\nimport { ATN } from \"./atn/ATN\";\r\nimport { ATNState } from \"./atn/ATNState\";\r\nimport { ATNStateType } from \"./atn/ATNStateType\";\r\nimport { AtomTransition } from \"./atn/AtomTransition\";\r\nimport { BitSet } from \"./misc/BitSet\";\r\nimport { DecisionState } from \"./atn/DecisionState\";\r\nimport { FailedPredicateException } from \"./FailedPredicateException\";\r\nimport { InputMismatchException } from \"./InputMismatchException\";\r\nimport { InterpreterRuleContext } from \"./InterpreterRuleContext\";\r\nimport { LoopEndState } from \"./atn/LoopEndState\";\r\nimport { NotNull } from \"./Decorators\";\r\nimport { Override } from \"./Decorators\";\r\nimport { Parser } from \"./Parser\";\r\nimport { ParserATNSimulator } from \"./atn/ParserATNSimulator\";\r\nimport { ParserRuleContext } from \"./ParserRuleContext\";\r\nimport { PrecedencePredicateTransition } from \"./atn/PrecedencePredicateTransition\";\r\nimport { PredicateTransition } from \"./atn/PredicateTransition\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { RuleStartState } from \"./atn/RuleStartState\";\r\nimport { RuleTransition } from \"./atn/RuleTransition\";\r\nimport { StarLoopEntryState } from \"./atn/StarLoopEntryState\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenStream } from \"./TokenStream\";\r\nimport { Transition } from \"./atn/Transition\";\r\nimport { TransitionType } from \"./atn/TransitionType\";\r\nimport { Vocabulary } from \"./Vocabulary\";\r\n\r\n/** A parser simulator that mimics what ANTLR's generated\r\n * parser code does. A ParserATNSimulator is used to make\r\n * predictions via adaptivePredict but this class moves a pointer through the\r\n * ATN to simulate parsing. ParserATNSimulator just\r\n * makes us efficient rather than having to backtrack, for example.\r\n *\r\n * This properly creates parse trees even for left recursive rules.\r\n *\r\n * We rely on the left recursive rule invocation and special predicate\r\n * transitions to make left recursive rules work.\r\n *\r\n * See TestParserInterpreter for examples.\r\n */\r\nexport class ParserInterpreter extends Parser {\r\n\tprotected _grammarFileName: string;\r\n\tprotected _atn: ATN;\r\n\t/** This identifies StarLoopEntryState's that begin the (...)*\r\n\t * precedence loops of left recursive rules.\r\n\t */\r\n\tprotected pushRecursionContextStates: BitSet;\r\n\r\n\tprotected _ruleNames: string[];\r\n\t@NotNull\r\n\tprivate _vocabulary: Vocabulary;\r\n\r\n\t/** This stack corresponds to the _parentctx, _parentState pair of locals\r\n\t * that would exist on call stack frames with a recursive descent parser;\r\n\t * in the generated function for a left-recursive rule you'd see:\r\n\t *\r\n\t * private EContext e(int _p) {\r\n\t * ParserRuleContext _parentctx = _ctx; // Pair.a\r\n\t * int _parentState = state; // Pair.b\r\n\t * ...\r\n\t * }\r\n\t *\r\n\t * Those values are used to create new recursive rule invocation contexts\r\n\t * associated with left operand of an alt like \"expr '*' expr\".\r\n\t */\r\n\tprotected readonly _parentContextStack: Array<[ParserRuleContext, number]> = [];\r\n\r\n\t/** We need a map from (decision,inputIndex)->forced alt for computing ambiguous\r\n\t * parse trees. For now, we allow exactly one override.\r\n\t */\r\n\tprotected overrideDecision: number = -1;\r\n\tprotected overrideDecisionInputIndex: number = -1;\r\n\tprotected overrideDecisionAlt: number = -1;\r\n\tprotected overrideDecisionReached: boolean = false; // latch and only override once; error might trigger infinite loop\r\n\r\n\t/** What is the current context when we override a decisions? This tells\r\n\t * us what the root of the parse tree is when using override\r\n\t * for an ambiguity/lookahead check.\r\n\t */\r\n\tprotected _overrideDecisionRoot?: InterpreterRuleContext = undefined;\r\n\r\n\tprotected _rootContext!: InterpreterRuleContext;\r\n\r\n\t/** A copy constructor that creates a new parser interpreter by reusing\r\n\t * the fields of a previous interpreter.\r\n\t *\r\n\t * @param old The interpreter to copy\r\n\t *\r\n\t * @since 4.5\r\n\t */\r\n\tconstructor(/*@NotNull*/ old: ParserInterpreter);\r\n\tconstructor(\r\n\t\tgrammarFileName: string, /*@NotNull*/ vocabulary: Vocabulary,\r\n\t\truleNames: string[], atn: ATN, input: TokenStream);\r\n\tconstructor(\r\n\t\tgrammarFileName: ParserInterpreter | string, @NotNull vocabulary?: Vocabulary,\r\n\t\truleNames?: string[], atn?: ATN, input?: TokenStream) {\r\n\t\tsuper(grammarFileName instanceof ParserInterpreter ? grammarFileName.inputStream : input!);\r\n\t\tif (grammarFileName instanceof ParserInterpreter) {\r\n\t\t\tlet old: ParserInterpreter = grammarFileName;\r\n\t\t\tthis._grammarFileName = old._grammarFileName;\r\n\t\t\tthis._atn = old._atn;\r\n\t\t\tthis.pushRecursionContextStates = old.pushRecursionContextStates;\r\n\t\t\tthis._ruleNames = old._ruleNames;\r\n\t\t\tthis._vocabulary = old._vocabulary;\r\n\t\t\tthis.interpreter = new ParserATNSimulator(this._atn, this);\r\n\t\t} else {\r\n\t\t\t// The second constructor requires non-null arguments\r\n\t\t\tvocabulary = vocabulary!;\r\n\t\t\truleNames = ruleNames!;\r\n\t\t\tatn = atn!;\r\n\r\n\t\t\tthis._grammarFileName = grammarFileName;\r\n\t\t\tthis._atn = atn;\r\n\t\t\tthis._ruleNames = ruleNames.slice(0);\r\n\t\t\tthis._vocabulary = vocabulary;\r\n\r\n\t\t\t// identify the ATN states where pushNewRecursionContext() must be called\r\n\t\t\tthis.pushRecursionContextStates = new BitSet(atn.states.length);\r\n\t\t\tfor (let state of atn.states) {\r\n\t\t\t\tif (!(state instanceof StarLoopEntryState)) {\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tif (state.precedenceRuleDecision) {\r\n\t\t\t\t\tthis.pushRecursionContextStates.set(state.stateNumber);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\t// get atn simulator that knows how to do predictions\r\n\t\t\tthis.interpreter = new ParserATNSimulator(atn, this);\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\tpublic reset(resetInput?: boolean): void {\r\n\t\tif (resetInput === undefined) {\r\n\t\t\tsuper.reset();\r\n\t\t} else {\r\n\t\t\tsuper.reset(resetInput);\r\n\t\t}\r\n\r\n\t\tthis.overrideDecisionReached = false;\r\n\t\tthis._overrideDecisionRoot = undefined;\r\n\t}\r\n\r\n\t@Override\r\n\tget atn(): ATN {\r\n\t\treturn this._atn;\r\n\t}\r\n\r\n\t@Override\r\n\tget vocabulary(): Vocabulary {\r\n\t\treturn this._vocabulary;\r\n\t}\r\n\r\n\t@Override\r\n\tget ruleNames(): string[] {\r\n\t\treturn this._ruleNames;\r\n\t}\r\n\r\n\t@Override\r\n\tget grammarFileName(): string {\r\n\t\treturn this._grammarFileName;\r\n\t}\r\n\r\n\t/** Begin parsing at startRuleIndex */\r\n\tpublic parse(startRuleIndex: number): ParserRuleContext {\r\n\t\tlet startRuleStartState: RuleStartState = this._atn.ruleToStartState[startRuleIndex];\r\n\r\n\t\tthis._rootContext = this.createInterpreterRuleContext(undefined, ATNState.INVALID_STATE_NUMBER, startRuleIndex);\r\n\t\tif (startRuleStartState.isPrecedenceRule) {\r\n\t\t\tthis.enterRecursionRule(this._rootContext, startRuleStartState.stateNumber, startRuleIndex, 0);\r\n\t\t}\r\n\t\telse {\r\n\t\t\tthis.enterRule(this._rootContext, startRuleStartState.stateNumber, startRuleIndex);\r\n\t\t}\r\n\r\n\t\twhile (true) {\r\n\t\t\tlet p: ATNState = this.atnState;\r\n\t\t\tswitch (p.stateType) {\r\n\t\t\tcase ATNStateType.RULE_STOP:\r\n\t\t\t\t// pop; return from rule\r\n\t\t\t\tif (this._ctx.isEmpty) {\r\n\t\t\t\t\tif (startRuleStartState.isPrecedenceRule) {\r\n\t\t\t\t\t\tlet result: ParserRuleContext = this._ctx;\r\n\t\t\t\t\t\tlet parentContext: [ParserRuleContext, number] = this._parentContextStack.pop() !;\r\n\t\t\t\t\t\tthis.unrollRecursionContexts(parentContext[0]);\r\n\t\t\t\t\t\treturn result;\r\n\t\t\t\t\t}\r\n\t\t\t\t\telse {\r\n\t\t\t\t\t\tthis.exitRule();\r\n\t\t\t\t\t\treturn this._rootContext;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tthis.visitRuleStopState(p);\r\n\t\t\t\tbreak;\r\n\r\n\t\t\tdefault:\r\n\t\t\t\ttry {\r\n\t\t\t\t\tthis.visitState(p);\r\n\t\t\t\t}\r\n\t\t\t\tcatch (e) {\r\n\t\t\t\t\tif (e instanceof RecognitionException) {\r\n\t\t\t\t\t\tthis.state = this._atn.ruleToStopState[p.ruleIndex].stateNumber;\r\n\t\t\t\t\t\tthis.context.exception = e;\r\n\t\t\t\t\t\tthis.errorHandler.reportError(this, e);\r\n\t\t\t\t\t\tthis.recover(e);\r\n\t\t\t\t\t} else {\r\n\t\t\t\t\t\tthrow e;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\tpublic enterRecursionRule(localctx: ParserRuleContext, state: number, ruleIndex: number, precedence: number): void {\r\n\t\tthis._parentContextStack.push([this._ctx, localctx.invokingState]);\r\n\t\tsuper.enterRecursionRule(localctx, state, ruleIndex, precedence);\r\n\t}\r\n\r\n\tprotected get atnState(): ATNState {\r\n\t\treturn this._atn.states[this.state];\r\n\t}\r\n\r\n\tprotected visitState(p: ATNState): void {\r\n\t\tlet predictedAlt: number = 1;\r\n\t\tif (p.numberOfTransitions > 1) {\r\n\t\t\tpredictedAlt = this.visitDecisionState(p as DecisionState);\r\n\t\t}\r\n\r\n\t\tlet transition: Transition = p.transition(predictedAlt - 1);\r\n\t\tswitch (transition.serializationType) {\r\n\t\tcase TransitionType.EPSILON:\r\n\t\t\tif (this.pushRecursionContextStates.get(p.stateNumber) &&\r\n\t\t\t\t!(transition.target instanceof LoopEndState)) {\r\n\t\t\t\t// We are at the start of a left recursive rule's (...)* loop\r\n\t\t\t\t// and we're not taking the exit branch of loop.\r\n\t\t\t\tlet parentContext = this._parentContextStack[this._parentContextStack.length - 1];\r\n\t\t\t\tlet localctx: InterpreterRuleContext =\r\n\t\t\t\t\tthis.createInterpreterRuleContext(parentContext[0], parentContext[1], this._ctx.ruleIndex);\r\n\t\t\t\tthis.pushNewRecursionContext(localctx,\r\n\t\t\t\t\tthis._atn.ruleToStartState[p.ruleIndex].stateNumber,\r\n\t\t\t\t\tthis._ctx.ruleIndex);\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\r\n\t\tcase TransitionType.ATOM:\r\n\t\t\tthis.match((transition as AtomTransition)._label);\r\n\t\t\tbreak;\r\n\r\n\t\tcase TransitionType.RANGE:\r\n\t\tcase TransitionType.SET:\r\n\t\tcase TransitionType.NOT_SET:\r\n\t\t\tif (!transition.matches(this._input.LA(1), Token.MIN_USER_TOKEN_TYPE, 65535)) {\r\n\t\t\t\tthis.recoverInline();\r\n\t\t\t}\r\n\t\t\tthis.matchWildcard();\r\n\t\t\tbreak;\r\n\r\n\t\tcase TransitionType.WILDCARD:\r\n\t\t\tthis.matchWildcard();\r\n\t\t\tbreak;\r\n\r\n\t\tcase TransitionType.RULE:\r\n\t\t\tlet ruleStartState: RuleStartState = transition.target as RuleStartState;\r\n\t\t\tlet ruleIndex: number = ruleStartState.ruleIndex;\r\n\t\t\tlet newctx: InterpreterRuleContext = this.createInterpreterRuleContext(this._ctx, p.stateNumber, ruleIndex);\r\n\t\t\tif (ruleStartState.isPrecedenceRule) {\r\n\t\t\t\tthis.enterRecursionRule(newctx, ruleStartState.stateNumber, ruleIndex, (transition as RuleTransition).precedence);\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tthis.enterRule(newctx, transition.target.stateNumber, ruleIndex);\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\r\n\t\tcase TransitionType.PREDICATE:\r\n\t\t\tlet predicateTransition: PredicateTransition = transition as PredicateTransition;\r\n\t\t\tif (!this.sempred(this._ctx, predicateTransition.ruleIndex, predicateTransition.predIndex)) {\r\n\t\t\t\tthrow new FailedPredicateException(this);\r\n\t\t\t}\r\n\r\n\t\t\tbreak;\r\n\r\n\t\tcase TransitionType.ACTION:\r\n\t\t\tlet actionTransition: ActionTransition = transition as ActionTransition;\r\n\t\t\tthis.action(this._ctx, actionTransition.ruleIndex, actionTransition.actionIndex);\r\n\t\t\tbreak;\r\n\r\n\t\tcase TransitionType.PRECEDENCE:\r\n\t\t\tif (!this.precpred(this._ctx, (transition as PrecedencePredicateTransition).precedence)) {\r\n\t\t\t\tlet precedence = (transition as PrecedencePredicateTransition).precedence;\r\n\t\t\t\tthrow new FailedPredicateException(this, `precpred(_ctx, ${precedence})`);\r\n\t\t\t}\r\n\t\t\tbreak;\r\n\r\n\t\tdefault:\r\n\t\t\tthrow new Error(\"UnsupportedOperationException: Unrecognized ATN transition type.\");\r\n\t\t}\r\n\r\n\t\tthis.state = transition.target.stateNumber;\r\n\t}\r\n\r\n\t/** Method visitDecisionState() is called when the interpreter reaches\r\n\t * a decision state (instance of DecisionState). It gives an opportunity\r\n\t * for subclasses to track interesting things.\r\n\t */\r\n\tprotected visitDecisionState(p: DecisionState): number {\r\n\t\tlet predictedAlt: number;\r\n\t\tthis.errorHandler.sync(this);\r\n\t\tlet decision: number = p.decision;\r\n\t\tif (decision === this.overrideDecision && this._input.index === this.overrideDecisionInputIndex && !this.overrideDecisionReached) {\r\n\t\t\tpredictedAlt = this.overrideDecisionAlt;\r\n\t\t\tthis.overrideDecisionReached = true;\r\n\t\t}\r\n\t\telse {\r\n\t\t\tpredictedAlt = this.interpreter.adaptivePredict(this._input, decision, this._ctx);\r\n\t\t}\r\n\t\treturn predictedAlt;\r\n\t}\r\n\r\n\t/** Provide simple \"factory\" for InterpreterRuleContext's.\r\n\t * @since 4.5.1\r\n\t */\r\n\tprotected createInterpreterRuleContext(\r\n\t\tparent: ParserRuleContext | undefined,\r\n\t\tinvokingStateNumber: number,\r\n\t\truleIndex: number): InterpreterRuleContext {\r\n\t\treturn new InterpreterRuleContext(ruleIndex, parent, invokingStateNumber);\r\n\t}\r\n\r\n\tprotected visitRuleStopState(p: ATNState): void {\r\n\t\tlet ruleStartState: RuleStartState = this._atn.ruleToStartState[p.ruleIndex];\r\n\t\tif (ruleStartState.isPrecedenceRule) {\r\n\t\t\tlet parentContext: [ParserRuleContext, number] = this._parentContextStack.pop()!;\r\n\t\t\tthis.unrollRecursionContexts(parentContext[0]);\r\n\t\t\tthis.state = parentContext[1];\r\n\t\t}\r\n\t\telse {\r\n\t\t\tthis.exitRule();\r\n\t\t}\r\n\r\n\t\tlet ruleTransition: RuleTransition = this._atn.states[this.state].transition(0) as RuleTransition;\r\n\t\tthis.state = ruleTransition.followState.stateNumber;\r\n\t}\r\n\r\n\t/** Override this parser interpreters normal decision-making process\r\n\t * at a particular decision and input token index. Instead of\r\n\t * allowing the adaptive prediction mechanism to choose the\r\n\t * first alternative within a block that leads to a successful parse,\r\n\t * force it to take the alternative, 1..n for n alternatives.\r\n\t *\r\n\t * As an implementation limitation right now, you can only specify one\r\n\t * override. This is sufficient to allow construction of different\r\n\t * parse trees for ambiguous input. It means re-parsing the entire input\r\n\t * in general because you're never sure where an ambiguous sequence would\r\n\t * live in the various parse trees. For example, in one interpretation,\r\n\t * an ambiguous input sequence would be matched completely in expression\r\n\t * but in another it could match all the way back to the root.\r\n\t *\r\n\t * s : e '!'? ;\r\n\t * e : ID\r\n\t * | ID '!'\r\n\t * ;\r\n\t *\r\n\t * Here, x! can be matched as (s (e ID) !) or (s (e ID !)). In the first\r\n\t * case, the ambiguous sequence is fully contained only by the root.\r\n\t * In the second case, the ambiguous sequences fully contained within just\r\n\t * e, as in: (e ID !).\r\n\t *\r\n\t * Rather than trying to optimize this and make\r\n\t * some intelligent decisions for optimization purposes, I settled on\r\n\t * just re-parsing the whole input and then using\r\n\t * {link Trees#getRootOfSubtreeEnclosingRegion} to find the minimal\r\n\t * subtree that contains the ambiguous sequence. I originally tried to\r\n\t * record the call stack at the point the parser detected and ambiguity but\r\n\t * left recursive rules create a parse tree stack that does not reflect\r\n\t * the actual call stack. That impedance mismatch was enough to make\r\n\t * it it challenging to restart the parser at a deeply nested rule\r\n\t * invocation.\r\n\t *\r\n\t * Only parser interpreters can override decisions so as to avoid inserting\r\n\t * override checking code in the critical ALL(*) prediction execution path.\r\n\t *\r\n\t * @since 4.5\r\n\t */\r\n\tpublic addDecisionOverride(decision: number, tokenIndex: number, forcedAlt: number): void {\r\n\t\tthis.overrideDecision = decision;\r\n\t\tthis.overrideDecisionInputIndex = tokenIndex;\r\n\t\tthis.overrideDecisionAlt = forcedAlt;\r\n\t}\r\n\r\n\tget overrideDecisionRoot(): InterpreterRuleContext | undefined {\r\n\t\treturn this._overrideDecisionRoot;\r\n\t}\r\n\r\n\t/** Rely on the error handler for this parser but, if no tokens are consumed\r\n\t * to recover, add an error node. Otherwise, nothing is seen in the parse\r\n\t * tree.\r\n\t */\r\n\tprotected recover(e: RecognitionException): void {\r\n\t\tlet i: number = this._input.index;\r\n\t\tthis.errorHandler.recover(this, e);\r\n\t\tif (this._input.index === i) {\r\n\t\t\t// no input consumed, better add an error node\r\n\t\t\tlet tok: Token | undefined = e.getOffendingToken();\r\n\t\t\tif (!tok) {\r\n\t\t\t\tthrow new Error(\"Expected exception to have an offending token\");\r\n\t\t\t}\r\n\r\n\t\t\tlet source = tok.tokenSource;\r\n\t\t\tlet stream = source !== undefined ? source.inputStream : undefined;\r\n\t\t\tlet sourcePair = { source, stream };\r\n\r\n\t\t\tif (e instanceof InputMismatchException) {\r\n\t\t\t\tlet expectedTokens = e.expectedTokens;\r\n\t\t\t\tif (expectedTokens === undefined) {\r\n\t\t\t\t\tthrow new Error(\"Expected the exception to provide expected tokens\");\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet expectedTokenType: number = Token.INVALID_TYPE;\r\n\t\t\t\tif (!expectedTokens.isNil) {\r\n\t\t\t\t\t// get any element\r\n\t\t\t\t\texpectedTokenType = expectedTokens.minElement;\r\n\t\t\t\t}\r\n\r\n\t\t\t\tlet errToken: Token =\r\n\t\t\t\t\tthis.tokenFactory.create(sourcePair,\r\n\t\t\t\t\t\texpectedTokenType, tok.text,\r\n\t\t\t\t\t\tToken.DEFAULT_CHANNEL,\r\n\t\t\t\t\t\t-1, -1, // invalid start/stop\r\n\t\t\t\t\t\ttok.line, tok.charPositionInLine);\r\n\t\t\t\tthis._ctx.addErrorNode(this.createErrorNode(this._ctx, errToken));\r\n\t\t\t}\r\n\t\t\telse { // NoViableAlt\r\n\t\t\t\tlet source = tok.tokenSource;\r\n\t\t\t\tlet errToken: Token =\r\n\t\t\t\t\tthis.tokenFactory.create(sourcePair,\r\n\t\t\t\t\t\tToken.INVALID_TYPE, tok.text,\r\n\t\t\t\t\t\tToken.DEFAULT_CHANNEL,\r\n\t\t\t\t\t\t-1, -1, // invalid start/stop\r\n\t\t\t\t\t\ttok.line, tok.charPositionInLine);\r\n\t\t\t\tthis._ctx.addErrorNode(this.createErrorNode(this._ctx, errToken));\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\tprotected recoverInline(): Token {\r\n\t\treturn this._errHandler.recoverInline(this);\r\n\t}\r\n\r\n\t/** Return the root of the parse, which can be useful if the parser\r\n\t * bails out. You still can access the top node. Note that,\r\n\t * because of the way left recursive rules add children, it's possible\r\n\t * that the root will not have any children if the start rule immediately\r\n\t * called and left recursive rule that fails.\r\n\t *\r\n\t * @since 4.5.1\r\n\t */\r\n\tget rootContext(): InterpreterRuleContext {\r\n\t\treturn this._rootContext;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// CONVERSTION complete, Burt Harris 10/14/2016\r\nimport { MultiMap } from \"../../misc/MultiMap\";\r\nimport { NotNull, Override } from \"../../Decorators\";\r\nimport { ParseTree } from \"../ParseTree\";\r\nimport { ParseTreePattern } from \"./ParseTreePattern\";\r\n\r\n/**\r\n * Represents the result of matching a {@link ParseTree} against a tree pattern.\r\n */\r\nexport class ParseTreeMatch {\r\n\t/**\r\n\t * This is the backing field for `tree`.\r\n\t */\r\n\tprivate _tree: ParseTree;\r\n\r\n\t/**\r\n\t * This is the backing field for `pattern`.\r\n\t */\r\n\tprivate _pattern: ParseTreePattern;\r\n\r\n\t/**\r\n\t * This is the backing field for `labels`.\r\n\t */\r\n\tprivate _labels: MultiMap;\r\n\r\n\t/**\r\n\t * This is the backing field for `mismatchedNode`.\r\n\t */\r\n\tprivate _mismatchedNode?: ParseTree;\r\n\r\n\t/**\r\n\t * Constructs a new instance of {@link ParseTreeMatch} from the specified\r\n\t * parse tree and pattern.\r\n\t *\r\n\t * @param tree The parse tree to match against the pattern.\r\n\t * @param pattern The parse tree pattern.\r\n\t * @param labels A mapping from label names to collections of\r\n\t * {@link ParseTree} objects located by the tree pattern matching process.\r\n\t * @param mismatchedNode The first node which failed to match the tree\r\n\t * pattern during the matching process.\r\n\t *\r\n\t * @throws {@link Error} if `tree` is not defined\r\n\t * @throws {@link Error} if `pattern` is not defined\r\n\t * @throws {@link Error} if `labels` is not defined\r\n\t */\r\n\tconstructor(\r\n\t\t@NotNull tree: ParseTree,\r\n\t\t@NotNull pattern: ParseTreePattern,\r\n\t\t@NotNull labels: MultiMap,\r\n\t\tmismatchedNode: ParseTree | undefined) {\r\n\t\tif (!tree) {\r\n\t\t\tthrow new Error(\"tree cannot be null\");\r\n\t\t}\r\n\r\n\t\tif (!pattern) {\r\n\t\t\tthrow new Error(\"pattern cannot be null\");\r\n\t\t}\r\n\r\n\t\tif (!labels) {\r\n\t\t\tthrow new Error(\"labels cannot be null\");\r\n\t\t}\r\n\r\n\t\tthis._tree = tree;\r\n\t\tthis._pattern = pattern;\r\n\t\tthis._labels = labels;\r\n\t\tthis._mismatchedNode = mismatchedNode;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the last node associated with a specific `label`.\r\n\t *\r\n\t * For example, for pattern ``, `get(\"id\")` returns the\r\n\t * node matched for that `ID`. If more than one node\r\n\t * matched the specified label, only the last is returned. If there is\r\n\t * no node associated with the label, this returns `undefined`.\r\n\t *\r\n\t * Pattern tags like `` and `` without labels are\r\n\t * considered to be labeled with `ID` and `expr`, respectively.\r\n\t *\r\n\t * @param label The label to check.\r\n\t *\r\n\t * @returns The last {@link ParseTree} to match a tag with the specified\r\n\t * label, or `undefined` if no parse tree matched a tag with the label.\r\n\t */\r\n\tpublic get(label: string): ParseTree | undefined {\r\n\t\tlet parseTrees = this._labels.get(label);\r\n\t\tif (!parseTrees || parseTrees.length === 0) {\r\n\t\t\treturn undefined;\r\n\t\t}\r\n\r\n\t\treturn parseTrees[parseTrees.length - 1]; // return last if multiple\r\n\t}\r\n\r\n\t/**\r\n\t * Return all nodes matching a rule or token tag with the specified label.\r\n\t *\r\n\t * If the `label` is the name of a parser rule or token in the\r\n\t * grammar, the resulting list will contain both the parse trees matching\r\n\t * rule or tags explicitly labeled with the label and the complete set of\r\n\t * parse trees matching the labeled and unlabeled tags in the pattern for\r\n\t * the parser rule or token. For example, if `label` is `\"foo\"`,\r\n\t * the result will contain *all* of the following.\r\n\t *\r\n\t * * Parse tree nodes matching tags of the form `` and\r\n\t * ``.\r\n\t * * Parse tree nodes matching tags of the form ``.\r\n\t * * Parse tree nodes matching tags of the form ``.\r\n\t *\r\n\t * @param label The label.\r\n\t *\r\n\t * @returns A collection of all {@link ParseTree} nodes matching tags with\r\n\t * the specified `label`. If no nodes matched the label, an empty list\r\n\t * is returned.\r\n\t */\r\n\t@NotNull\r\n\tpublic getAll(@NotNull label: string): ParseTree[] {\r\n\t\tconst nodes = this._labels.get(label);\r\n\t\tif (!nodes) {\r\n\t\t\treturn [];\r\n\t\t}\r\n\t\treturn nodes;\r\n\t}\r\n\r\n\t/**\r\n\t * Return a mapping from label → [list of nodes].\r\n\t *\r\n\t * The map includes special entries corresponding to the names of rules and\r\n\t * tokens referenced in tags in the original pattern. For additional\r\n\t * information, see the description of {@link #getAll(String)}.\r\n\t *\r\n\t * @returns A mapping from labels to parse tree nodes. If the parse tree\r\n\t * pattern did not contain any rule or token tags, this map will be empty.\r\n\t */\r\n\t@NotNull\r\n\tget labels(): MultiMap {\r\n\t\treturn this._labels;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the node at which we first detected a mismatch.\r\n\t *\r\n\t * @returns the node at which we first detected a mismatch, or `undefined`\r\n\t * if the match was successful.\r\n\t */\r\n\tget mismatchedNode(): ParseTree | undefined {\r\n\t\treturn this._mismatchedNode;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets a value indicating whether the match operation succeeded.\r\n\t *\r\n\t * @returns `true` if the match operation succeeded; otherwise,\r\n\t * `false`.\r\n\t */\r\n\tget succeeded(): boolean {\r\n\t\treturn !this._mismatchedNode;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the tree pattern we are matching against.\r\n\t *\r\n\t * @returns The tree pattern we are matching against.\r\n\t */\r\n\t@NotNull\r\n\tget pattern(): ParseTreePattern {\r\n\t\treturn this._pattern;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the parse tree we are trying to match to a pattern.\r\n\t *\r\n\t * @returns The {@link ParseTree} we are trying to match to a pattern.\r\n\t */\r\n\t@NotNull\r\n\tget tree(): ParseTree {\r\n\t\treturn this._tree;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t */\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn `Match ${\r\n\t\t\tthis.succeeded ? \"succeeded\" : \"failed\"}; found ${\r\n\t\t\tthis.labels.size} labels`;\r\n\t}\r\n}\r\n", "// Generated from XPathLexer.g4 by ANTLR 4.9.0-SNAPSHOT\r\n\r\n\r\nimport { ATN } from \"../../atn/ATN\";\r\nimport { ATNDeserializer } from \"../../atn/ATNDeserializer\";\r\nimport { CharStream } from \"../../CharStream\";\r\nimport { Lexer } from \"../../Lexer\";\r\nimport { LexerATNSimulator } from \"../../atn/LexerATNSimulator\";\r\nimport { NotNull } from \"../../Decorators\";\r\nimport { Override } from \"../../Decorators\";\r\nimport { RuleContext } from \"../../RuleContext\";\r\nimport { Vocabulary } from \"../../Vocabulary\";\r\nimport { VocabularyImpl } from \"../../VocabularyImpl\";\r\n\r\nimport * as Utils from \"../../misc/Utils\";\r\n\r\n\r\nexport class XPathLexer extends Lexer {\r\n\tpublic static readonly TOKEN_REF = 1;\r\n\tpublic static readonly RULE_REF = 2;\r\n\tpublic static readonly ANYWHERE = 3;\r\n\tpublic static readonly ROOT = 4;\r\n\tpublic static readonly WILDCARD = 5;\r\n\tpublic static readonly BANG = 6;\r\n\tpublic static readonly ID = 7;\r\n\tpublic static readonly STRING = 8;\r\n\r\n\t// tslint:disable:no-trailing-whitespace\r\n\tpublic static readonly channelNames: string[] = [\r\n\t\t\"DEFAULT_TOKEN_CHANNEL\", \"HIDDEN\",\r\n\t];\r\n\r\n\t// tslint:disable:no-trailing-whitespace\r\n\tpublic static readonly modeNames: string[] = [\r\n\t\t\"DEFAULT_MODE\",\r\n\t];\r\n\r\n\tpublic static readonly ruleNames: string[] = [\r\n\t\t\"ANYWHERE\", \"ROOT\", \"WILDCARD\", \"BANG\", \"ID\", \"NameChar\", \"NameStartChar\", \r\n\t\t\"STRING\",\r\n\t];\r\n\r\n\tprivate static readonly _LITERAL_NAMES: Array = [\r\n\t\tundefined, undefined, undefined, \"'//'\", \"'/'\", \"'*'\", \"'!'\",\r\n\t];\r\n\tprivate static readonly _SYMBOLIC_NAMES: Array = [\r\n\t\tundefined, \"TOKEN_REF\", \"RULE_REF\", \"ANYWHERE\", \"ROOT\", \"WILDCARD\", \"BANG\", \r\n\t\t\"ID\", \"STRING\",\r\n\t];\r\n\tpublic static readonly VOCABULARY: Vocabulary = new VocabularyImpl(XPathLexer._LITERAL_NAMES, XPathLexer._SYMBOLIC_NAMES, []);\r\n\r\n\t// @Override\r\n\t// @NotNull\r\n\tpublic get vocabulary(): Vocabulary {\r\n\t\treturn XPathLexer.VOCABULARY;\r\n\t}\r\n\t// tslint:enable:no-trailing-whitespace\r\n\r\n\r\n\tconstructor(input: CharStream) {\r\n\t\tsuper(input);\r\n\t\tthis._interp = new LexerATNSimulator(XPathLexer._ATN, this);\r\n\t}\r\n\r\n\t// @Override\r\n\tpublic get grammarFileName(): string { return \"XPathLexer.g4\"; }\r\n\r\n\t// @Override\r\n\tpublic get ruleNames(): string[] { return XPathLexer.ruleNames; }\r\n\r\n\t// @Override\r\n\tpublic get serializedATN(): string { return XPathLexer._serializedATN; }\r\n\r\n\t// @Override\r\n\tpublic get channelNames(): string[] { return XPathLexer.channelNames; }\r\n\r\n\t// @Override\r\n\tpublic get modeNames(): string[] { return XPathLexer.modeNames; }\r\n\r\n\t// @Override\r\n\tpublic action(_localctx: RuleContext, ruleIndex: number, actionIndex: number): void {\r\n\t\tswitch (ruleIndex) {\r\n\t\tcase 4:\r\n\t\t\tthis.ID_action(_localctx, actionIndex);\r\n\t\t\tbreak;\r\n\t\t}\r\n\t}\r\n\tprivate ID_action(_localctx: RuleContext, actionIndex: number): void {\r\n\t\tswitch (actionIndex) {\r\n\t\tcase 0:\r\n\r\n\t\t\tlet text = this.text;\r\n\t\t\tif (text.charAt(0) === text.charAt(0).toUpperCase()) {\r\n\t\t\t\tthis.type = XPathLexer.TOKEN_REF;\r\n\t\t\t} else {\r\n\t\t\t\tthis.type = XPathLexer.RULE_REF;\r\n\t\t\t}\r\n\r\n\t\t\tbreak;\r\n\t\t}\r\n\t}\r\n\r\n\tprivate static readonly _serializedATNSegments: number = 2;\r\n\tprivate static readonly _serializedATNSegment0: string =\r\n\t\t\"\\x03\\uC91D\\uCABA\\u058D\\uAFBA\\u4F53\\u0607\\uEA8B\\uC241\\x02\\n2\\b\\x01\\x04\" +\r\n\t\t\"\\x02\\t\\x02\\x04\\x03\\t\\x03\\x04\\x04\\t\\x04\\x04\\x05\\t\\x05\\x04\\x06\\t\\x06\\x04\" +\r\n\t\t\"\\x07\\t\\x07\\x04\\b\\t\\b\\x04\\t\\t\\t\\x03\\x02\\x03\\x02\\x03\\x02\\x03\\x03\\x03\\x03\" +\r\n\t\t\"\\x03\\x04\\x03\\x04\\x03\\x05\\x03\\x05\\x03\\x06\\x03\\x06\\x07\\x06\\x1F\\n\\x06\\f\\x06\" +\r\n\t\t\"\\x0E\\x06\\\"\\v\\x06\\x03\\x06\\x03\\x06\\x03\\x07\\x03\\x07\\x03\\b\\x03\\b\\x03\\t\\x03\" +\r\n\t\t\"\\t\\x07\\t,\\n\\t\\f\\t\\x0E\\t/\\v\\t\\x03\\t\\x03\\t\\x03-\\x02\\x02\\n\\x03\\x02\\x05\\x05\" +\r\n\t\t\"\\x02\\x06\\x07\\x02\\x07\\t\\x02\\b\\v\\x02\\t\\r\\x02\\x02\\x0F\\x02\\x02\\x11\\x02\\n\\x03\" +\r\n\t\t\"\\x02\\x02\\x04\\u02B6\\x02\\x02\\x02\\n\\x02\\x10\\x02\\x1D\\x022\\x02;\\x02C\\x02\\\\\" +\r\n\t\t\"\\x02a\\x02a\\x02c\\x02|\\x02\\x81\\x02\\xA1\\x02\\xAC\\x02\\xAC\\x02\\xAF\\x02\\xAF\\x02\" +\r\n\t\t\"\\xB7\\x02\\xB7\\x02\\xBC\\x02\\xBC\\x02\\xC2\\x02\\xD8\\x02\\xDA\\x02\\xF8\\x02\\xFA\\x02\" +\r\n\t\t\"\\u02C3\\x02\\u02C8\\x02\\u02D3\\x02\\u02E2\\x02\\u02E6\\x02\\u02EE\\x02\\u02EE\\x02\" +\r\n\t\t\"\\u02F0\\x02\\u02F0\\x02\\u0302\\x02\\u0376\\x02\\u0378\\x02\\u0379\\x02\\u037C\\x02\" +\r\n\t\t\"\\u037F\\x02\\u0381\\x02\\u0381\\x02\\u0388\\x02\\u0388\\x02\\u038A\\x02\\u038C\\x02\" +\r\n\t\t\"\\u038E\\x02\\u038E\\x02\\u0390\\x02\\u03A3\\x02\\u03A5\\x02\\u03F7\\x02\\u03F9\\x02\" +\r\n\t\t\"\\u0483\\x02\\u0485\\x02\\u0489\\x02\\u048C\\x02\\u0531\\x02\\u0533\\x02\\u0558\\x02\" +\r\n\t\t\"\\u055B\\x02\\u055B\\x02\\u0563\\x02\\u0589\\x02\\u0593\\x02\\u05BF\\x02\\u05C1\\x02\" +\r\n\t\t\"\\u05C1\\x02\\u05C3\\x02\\u05C4\\x02\\u05C6\\x02\\u05C7\\x02\\u05C9\\x02\\u05C9\\x02\" +\r\n\t\t\"\\u05D2\\x02\\u05EC\\x02\\u05F2\\x02\\u05F4\\x02\\u0602\\x02\\u0607\\x02\\u0612\\x02\" +\r\n\t\t\"\\u061C\\x02\\u061E\\x02\\u061E\\x02\\u0622\\x02\\u066B\\x02\\u0670\\x02\\u06D5\\x02\" +\r\n\t\t\"\\u06D7\\x02\\u06DF\\x02\\u06E1\\x02\\u06EA\\x02\\u06EC\\x02\\u06FE\\x02\\u0701\\x02\" +\r\n\t\t\"\\u0701\\x02\\u0711\\x02\\u074C\\x02\\u074F\\x02\\u07B3\\x02\\u07C2\\x02\\u07F7\\x02\" +\r\n\t\t\"\\u07FC\\x02\\u07FC\\x02\\u0802\\x02\\u082F\\x02\\u0842\\x02\\u085D\\x02\\u08A2\\x02\" +\r\n\t\t\"\\u08B6\\x02\\u08B8\\x02\\u08BF\\x02\\u08D6\\x02\\u0965\\x02\\u0968\\x02\\u0971\\x02\" +\r\n\t\t\"\\u0973\\x02\\u0985\\x02\\u0987\\x02\\u098E\\x02\\u0991\\x02\\u0992\\x02\\u0995\\x02\" +\r\n\t\t\"\\u09AA\\x02\\u09AC\\x02\\u09B2\\x02\\u09B4\\x02\\u09B4\\x02\\u09B8\\x02\\u09BB\\x02\" +\r\n\t\t\"\\u09BE\\x02\\u09C6\\x02\\u09C9\\x02\\u09CA\\x02\\u09CD\\x02\\u09D0\\x02\\u09D9\\x02\" +\r\n\t\t\"\\u09D9\\x02\\u09DE\\x02\\u09DF\\x02\\u09E1\\x02\\u09E5\\x02\\u09E8\\x02\\u09F3\\x02\" +\r\n\t\t\"\\u0A03\\x02\\u0A05\\x02\\u0A07\\x02\\u0A0C\\x02\\u0A11\\x02\\u0A12\\x02\\u0A15\\x02\" +\r\n\t\t\"\\u0A2A\\x02\\u0A2C\\x02\\u0A32\\x02\\u0A34\\x02\\u0A35\\x02\\u0A37\\x02\\u0A38\\x02\" +\r\n\t\t\"\\u0A3A\\x02\\u0A3B\\x02\\u0A3E\\x02\\u0A3E\\x02\\u0A40\\x02\\u0A44\\x02\\u0A49\\x02\" +\r\n\t\t\"\\u0A4A\\x02\\u0A4D\\x02\\u0A4F\\x02\\u0A53\\x02\\u0A53\\x02\\u0A5B\\x02\\u0A5E\\x02\" +\r\n\t\t\"\\u0A60\\x02\\u0A60\\x02\\u0A68\\x02\\u0A77\\x02\\u0A83\\x02\\u0A85\\x02\\u0A87\\x02\" +\r\n\t\t\"\\u0A8F\\x02\\u0A91\\x02\\u0A93\\x02\\u0A95\\x02\\u0AAA\\x02\\u0AAC\\x02\\u0AB2\\x02\" +\r\n\t\t\"\\u0AB4\\x02\\u0AB5\\x02\\u0AB7\\x02\\u0ABB\\x02\\u0ABE\\x02\\u0AC7\\x02\\u0AC9\\x02\" +\r\n\t\t\"\\u0ACB\\x02\\u0ACD\\x02\\u0ACF\\x02\\u0AD2\\x02\\u0AD2\\x02\\u0AE2\\x02\\u0AE5\\x02\" +\r\n\t\t\"\\u0AE8\\x02\\u0AF1\\x02\\u0AFB\\x02\\u0AFB\\x02\\u0B03\\x02\\u0B05\\x02\\u0B07\\x02\" +\r\n\t\t\"\\u0B0E\\x02\\u0B11\\x02\\u0B12\\x02\\u0B15\\x02\\u0B2A\\x02\\u0B2C\\x02\\u0B32\\x02\" +\r\n\t\t\"\\u0B34\\x02\\u0B35\\x02\\u0B37\\x02\\u0B3B\\x02\\u0B3E\\x02\\u0B46\\x02\\u0B49\\x02\" +\r\n\t\t\"\\u0B4A\\x02\\u0B4D\\x02\\u0B4F\\x02\\u0B58\\x02\\u0B59\\x02\\u0B5E\\x02\\u0B5F\\x02\" +\r\n\t\t\"\\u0B61\\x02\\u0B65\\x02\\u0B68\\x02\\u0B71\\x02\\u0B73\\x02\\u0B73\\x02\\u0B84\\x02\" +\r\n\t\t\"\\u0B85\\x02\\u0B87\\x02\\u0B8C\\x02\\u0B90\\x02\\u0B92\\x02\\u0B94\\x02\\u0B97\\x02\" +\r\n\t\t\"\\u0B9B\\x02\\u0B9C\\x02\\u0B9E\\x02\\u0B9E\\x02\\u0BA0\\x02\\u0BA1\\x02\\u0BA5\\x02\" +\r\n\t\t\"\\u0BA6\\x02\\u0BAA\\x02\\u0BAC\\x02\\u0BB0\\x02\\u0BBB\\x02\\u0BC0\\x02\\u0BC4\\x02\" +\r\n\t\t\"\\u0BC8\\x02\\u0BCA\\x02\\u0BCC\\x02\\u0BCF\\x02\\u0BD2\\x02\\u0BD2\\x02\\u0BD9\\x02\" +\r\n\t\t\"\\u0BD9\\x02\\u0BE8\\x02\\u0BF1\\x02\\u0C02\\x02\\u0C05\\x02\\u0C07\\x02\\u0C0E\\x02\" +\r\n\t\t\"\\u0C10\\x02\\u0C12\\x02\\u0C14\\x02\\u0C2A\\x02\\u0C2C\\x02\\u0C3B\\x02\\u0C3F\\x02\" +\r\n\t\t\"\\u0C46\\x02\\u0C48\\x02\\u0C4A\\x02\\u0C4C\\x02\\u0C4F\\x02\\u0C57\\x02\\u0C58\\x02\" +\r\n\t\t\"\\u0C5A\\x02\\u0C5C\\x02\\u0C62\\x02\\u0C65\\x02\\u0C68\\x02\\u0C71\\x02\\u0C82\\x02\" +\r\n\t\t\"\\u0C85\\x02\\u0C87\\x02\\u0C8E\\x02\\u0C90\\x02\\u0C92\\x02\\u0C94\\x02\\u0CAA\\x02\" +\r\n\t\t\"\\u0CAC\\x02\\u0CB5\\x02\\u0CB7\\x02\\u0CBB\\x02\\u0CBE\\x02\\u0CC6\\x02\\u0CC8\\x02\" +\r\n\t\t\"\\u0CCA\\x02\\u0CCC\\x02\\u0CCF\\x02\\u0CD7\\x02\\u0CD8\\x02\\u0CE0\\x02\\u0CE0\\x02\" +\r\n\t\t\"\\u0CE2\\x02\\u0CE5\\x02\\u0CE8\\x02\\u0CF1\\x02\\u0CF3\\x02\\u0CF4\\x02\\u0D03\\x02\" +\r\n\t\t\"\\u0D05\\x02\\u0D07\\x02\\u0D0E\\x02\\u0D10\\x02\\u0D12\\x02\\u0D14\\x02\\u0D3C\\x02\" +\r\n\t\t\"\\u0D3F\\x02\\u0D46\\x02\\u0D48\\x02\\u0D4A\\x02\\u0D4C\\x02\\u0D50\\x02\\u0D56\\x02\" +\r\n\t\t\"\\u0D59\\x02\\u0D61\\x02\\u0D65\\x02\\u0D68\\x02\\u0D71\\x02\\u0D7C\\x02\\u0D81\\x02\" +\r\n\t\t\"\\u0D84\\x02\\u0D85\\x02\\u0D87\\x02\\u0D98\\x02\\u0D9C\\x02\\u0DB3\\x02\\u0DB5\\x02\" +\r\n\t\t\"\\u0DBD\\x02\\u0DBF\\x02\\u0DBF\\x02\\u0DC2\\x02\\u0DC8\\x02\\u0DCC\\x02\\u0DCC\\x02\" +\r\n\t\t\"\\u0DD1\\x02\\u0DD6\\x02\\u0DD8\\x02\\u0DD8\\x02\\u0DDA\\x02\\u0DE1\\x02\\u0DE8\\x02\" +\r\n\t\t\"\\u0DF1\\x02\\u0DF4\\x02\\u0DF5\\x02\\u0E03\\x02\\u0E3C\\x02\\u0E42\\x02\\u0E50\\x02\" +\r\n\t\t\"\\u0E52\\x02\\u0E5B\\x02\\u0E83\\x02\\u0E84\\x02\\u0E86\\x02\\u0E86\\x02\\u0E89\\x02\" +\r\n\t\t\"\\u0E8A\\x02\\u0E8C\\x02\\u0E8C\\x02\\u0E8F\\x02\\u0E8F\\x02\\u0E96\\x02\\u0E99\\x02\" +\r\n\t\t\"\\u0E9B\\x02\\u0EA1\\x02\\u0EA3\\x02\\u0EA5\\x02\\u0EA7\\x02\\u0EA7\\x02\\u0EA9\\x02\" +\r\n\t\t\"\\u0EA9\\x02\\u0EAC\\x02\\u0EAD\\x02\\u0EAF\\x02\\u0EBB\\x02\\u0EBD\\x02\\u0EBF\\x02\" +\r\n\t\t\"\\u0EC2\\x02\\u0EC6\\x02\\u0EC8\\x02\\u0EC8\\x02\\u0ECA\\x02\\u0ECF\\x02\\u0ED2\\x02\" +\r\n\t\t\"\\u0EDB\\x02\\u0EDE\\x02\\u0EE1\\x02\\u0F02\\x02\\u0F02\\x02\\u0F1A\\x02\\u0F1B\\x02\" +\r\n\t\t\"\\u0F22\\x02\\u0F2B\\x02\\u0F37\\x02\\u0F37\\x02\\u0F39\\x02\\u0F39\\x02\\u0F3B\\x02\" +\r\n\t\t\"\\u0F3B\\x02\\u0F40\\x02\\u0F49\\x02\\u0F4B\\x02\\u0F6E\\x02\\u0F73\\x02\\u0F86\\x02\" +\r\n\t\t\"\\u0F88\\x02\\u0F99\\x02\\u0F9B\\x02\\u0FBE\\x02\\u0FC8\\x02\\u0FC8\\x02\\u1002\\x02\" +\r\n\t\t\"\\u104B\\x02\\u1052\\x02\\u109F\\x02\\u10A2\\x02\\u10C7\\x02\\u10C9\\x02\\u10C9\\x02\" +\r\n\t\t\"\\u10CF\\x02\\u10CF\\x02\\u10D2\\x02\\u10FC\\x02\\u10FE\\x02\\u124A\\x02\\u124C\\x02\" +\r\n\t\t\"\\u124F\\x02\\u1252\\x02\\u1258\\x02\\u125A\\x02\\u125A\\x02\\u125C\\x02\\u125F\\x02\" +\r\n\t\t\"\\u1262\\x02\\u128A\\x02\\u128C\\x02\\u128F\\x02\\u1292\\x02\\u12B2\\x02\\u12B4\\x02\" +\r\n\t\t\"\\u12B7\\x02\\u12BA\\x02\\u12C0\\x02\\u12C2\\x02\\u12C2\\x02\\u12C4\\x02\\u12C7\\x02\" +\r\n\t\t\"\\u12CA\\x02\\u12D8\\x02\\u12DA\\x02\\u1312\\x02\\u1314\\x02\\u1317\\x02\\u131A\\x02\" +\r\n\t\t\"\\u135C\\x02\\u135F\\x02\\u1361\\x02\\u1382\\x02\\u1391\\x02\\u13A2\\x02\\u13F7\\x02\" +\r\n\t\t\"\\u13FA\\x02\\u13FF\\x02\\u1403\\x02\\u166E\\x02\\u1671\\x02\\u1681\\x02\\u1683\\x02\" +\r\n\t\t\"\\u169C\\x02\\u16A2\\x02\\u16EC\\x02\\u16F0\\x02\\u16FA\\x02\\u1702\\x02\\u170E\\x02\" +\r\n\t\t\"\\u1710\\x02\\u1716\\x02\\u1722\\x02\\u1736\\x02\\u1742\\x02\\u1755\\x02\\u1762\\x02\" +\r\n\t\t\"\\u176E\\x02\\u1770\\x02\\u1772\\x02\\u1774\\x02\\u1775\\x02\\u1782\\x02\\u17D5\\x02\" +\r\n\t\t\"\\u17D9\\x02\\u17D9\\x02\\u17DE\\x02\\u17DF\\x02\\u17E2\\x02\\u17EB\\x02\\u180D\\x02\" +\r\n\t\t\"\\u1810\\x02\\u1812\\x02\\u181B\\x02\\u1822\\x02\\u1879\\x02\\u1882\\x02\\u18AC\\x02\" +\r\n\t\t\"\\u18B2\\x02\\u18F7\\x02\\u1902\\x02\\u1920\\x02\\u1922\\x02\\u192D\\x02\\u1932\\x02\" +\r\n\t\t\"\\u193D\\x02\\u1948\\x02\\u196F\\x02\\u1972\\x02\\u1976\\x02\\u1982\\x02\\u19AD\\x02\" +\r\n\t\t\"\\u19B2\\x02\\u19CB\\x02\\u19D2\\x02\\u19DB\\x02\\u1A02\\x02\\u1A1D\\x02\\u1A22\\x02\" +\r\n\t\t\"\\u1A60\\x02\\u1A62\\x02\\u1A7E\\x02\\u1A81\\x02\\u1A8B\\x02\\u1A92\\x02\\u1A9B\\x02\" +\r\n\t\t\"\\u1AA9\\x02\\u1AA9\\x02\\u1AB2\\x02\\u1ABF\\x02\\u1B02\\x02\\u1B4D\\x02\\u1B52\\x02\" +\r\n\t\t\"\\u1B5B\\x02\\u1B6D\\x02\\u1B75\\x02\\u1B82\\x02\\u1BF5\\x02\\u1C02\\x02\\u1C39\\x02\" +\r\n\t\t\"\\u1C42\\x02\\u1C4B\\x02\\u1C4F\\x02\\u1C7F\\x02\\u1C82\\x02\\u1C8A\\x02\\u1CD2\\x02\" +\r\n\t\t\"\\u1CD4\\x02\\u1CD6\\x02\\u1CF8\\x02\\u1CFA\\x02\\u1CFB\\x02\\u1D02\\x02\\u1DF7\\x02\" +\r\n\t\t\"\\u1DFD\\x02\\u1F17\\x02\\u1F1A\\x02\\u1F1F\\x02\\u1F22\\x02\\u1F47\\x02\\u1F4A\\x02\" +\r\n\t\t\"\\u1F4F\\x02\\u1F52\\x02\\u1F59\\x02\\u1F5B\\x02\\u1F5B\\x02\\u1F5D\\x02\\u1F5D\\x02\" +\r\n\t\t\"\\u1F5F\\x02\\u1F5F\\x02\\u1F61\\x02\\u1F7F\\x02\\u1F82\\x02\\u1FB6\\x02\\u1FB8\\x02\" +\r\n\t\t\"\\u1FBE\\x02\\u1FC0\\x02\\u1FC0\\x02\\u1FC4\\x02\\u1FC6\\x02\\u1FC8\\x02\\u1FCE\\x02\" +\r\n\t\t\"\\u1FD2\\x02\\u1FD5\\x02\\u1FD8\\x02\\u1FDD\\x02\\u1FE2\\x02\\u1FEE\\x02\\u1FF4\\x02\" +\r\n\t\t\"\\u1FF6\\x02\\u1FF8\\x02\\u1FFE\\x02\\u200D\\x02\\u2011\\x02\\u202C\\x02\\u2030\\x02\" +\r\n\t\t\"\\u2041\\x02\\u2042\\x02\\u2056\\x02\\u2056\\x02\\u2062\\x02\\u2066\\x02\\u2068\\x02\" +\r\n\t\t\"\\u2071\\x02\\u2073\\x02\\u2073\\x02\\u2081\\x02\\u2081\\x02\\u2092\\x02\\u209E\\x02\" +\r\n\t\t\"\\u20D2\\x02\\u20DE\\x02\\u20E3\\x02\\u20E3\\x02\\u20E7\\x02\\u20F2\\x02\\u2104\\x02\" +\r\n\t\t\"\\u2104\\x02\\u2109\\x02\\u2109\\x02\\u210C\\x02\\u2115\\x02\\u2117\\x02\\u2117\\x02\" +\r\n\t\t\"\\u211B\\x02\\u211F\\x02\\u2126\\x02\\u2126\\x02\\u2128\\x02\\u2128\\x02\\u212A\\x02\" +\r\n\t\t\"\\u212A\\x02\\u212C\\x02\\u212F\\x02\\u2131\\x02\\u213B\\x02\\u213E\\x02\\u2141\\x02\" +\r\n\t\t\"\\u2147\\x02\\u214B\\x02\\u2150\\x02\\u2150\\x02\\u2162\\x02\\u218A\\x02\\u2C02\\x02\" +\r\n\t\t\"\\u2C30\\x02\\u2C32\\x02\\u2C60\\x02\\u2C62\\x02\\u2CE6\\x02\\u2CED\\x02\\u2CF5\\x02\" +\r\n\t\t\"\\u2D02\\x02\\u2D27\\x02\\u2D29\\x02\\u2D29\\x02\\u2D2F\\x02\\u2D2F\\x02\\u2D32\\x02\" +\r\n\t\t\"\\u2D69\\x02\\u2D71\\x02\\u2D71\\x02\\u2D81\\x02\\u2D98\\x02\\u2DA2\\x02\\u2DA8\\x02\" +\r\n\t\t\"\\u2DAA\\x02\\u2DB0\\x02\\u2DB2\\x02\\u2DB8\\x02\\u2DBA\\x02\\u2DC0\\x02\\u2DC2\\x02\" +\r\n\t\t\"\\u2DC8\\x02\\u2DCA\\x02\\u2DD0\\x02\\u2DD2\\x02\\u2DD8\\x02\\u2DDA\\x02\\u2DE0\\x02\" +\r\n\t\t\"\\u2DE2\\x02\\u2E01\\x02\\u2E31\\x02\\u2E31\\x02\\u3007\\x02\\u3009\\x02\\u3023\\x02\" +\r\n\t\t\"\\u3031\\x02\\u3033\\x02\\u3037\\x02\\u303A\\x02\\u303E\\x02\\u3043\\x02\\u3098\\x02\" +\r\n\t\t\"\\u309B\\x02\\u309C\\x02\\u309F\\x02\\u30A1\\x02\\u30A3\\x02\\u30FC\\x02\\u30FE\\x02\" +\r\n\t\t\"\\u3101\\x02\\u3107\\x02\\u312F\\x02\\u3133\\x02\\u3190\\x02\\u31A2\\x02\\u31BC\\x02\" +\r\n\t\t\"\\u31F2\\x02\\u3201\\x02\\u3402\\x02\\u4DB7\\x02\\u4E02\\x02\\u9FD7\\x02\\uA002\\x02\" +\r\n\t\t\"\\uA48E\\x02\\uA4D2\\x02\\uA4FF\\x02\\uA502\\x02\\uA60E\\x02\\uA612\\x02\\uA62D\\x02\" +\r\n\t\t\"\\uA642\\x02\\uA671\\x02\\uA676\\x02\\uA67F\\x02\\uA681\\x02\\uA6F3\\x02\\uA719\\x02\" +\r\n\t\t\"\\uA721\\x02\\uA724\\x02\\uA78A\\x02\\uA78D\\x02\\uA7B0\\x02\\uA7B2\\x02\\uA7B9\\x02\" +\r\n\t\t\"\\uA7F9\\x02\\uA829\\x02\\uA842\\x02\\uA875\\x02\\uA882\\x02\\uA8C7\\x02\\uA8D2\\x02\" +\r\n\t\t\"\\uA8DB\\x02\\uA8E2\\x02\\uA8F9\\x02\\uA8FD\\x02\\uA8FD\\x02\\uA8FF\\x02\\uA8FF\\x02\" +\r\n\t\t\"\\uA902\\x02\\uA92F\\x02\\uA932\\x02\\uA955\\x02\\uA962\\x02\\uA97E\\x02\\uA982\\x02\" +\r\n\t\t\"\\uA9C2\\x02\\uA9D1\\x02\\uA9DB\\x02\\uA9E2\\x02\\uAA00\\x02\\uAA02\\x02\\uAA38\\x02\" +\r\n\t\t\"\\uAA42\\x02\\uAA4F\\x02\\uAA52\\x02\\uAA5B\\x02\\uAA62\\x02\\uAA78\\x02\\uAA7C\\x02\" +\r\n\t\t\"\\uAAC4\\x02\\uAADD\\x02\\uAADF\\x02\\uAAE2\\x02\\uAAF1\\x02\\uAAF4\\x02\\uAAF8\\x02\" +\r\n\t\t\"\\uAB03\\x02\\uAB08\\x02\\uAB0B\\x02\\uAB10\\x02\\uAB13\\x02\\uAB18\\x02\\uAB22\\x02\" +\r\n\t\t\"\\uAB28\\x02\\uAB2A\\x02\\uAB30\\x02\\uAB32\\x02\\uAB5C\\x02\\uAB5E\\x02\\uAB67\\x02\" +\r\n\t\t\"\\uAB72\\x02\\uABEC\\x02\\uABEE\\x02\\uABEF\\x02\\uABF2\\x02\\uABFB\\x02\\uAC02\\x02\" +\r\n\t\t\"\\uD7A5\\x02\\uD7B2\\x02\\uD7C8\\x02\\uD7CD\\x02\\uD7FD\\x02\\uF902\\x02\\uFA6F\\x02\" +\r\n\t\t\"\\uFA72\\x02\\uFADB\\x02\\uFB02\\x02\\uFB08\\x02\\uFB15\\x02\\uFB19\\x02\\uFB1F\\x02\" +\r\n\t\t\"\\uFB2A\\x02\\uFB2C\\x02\\uFB38\\x02\\uFB3A\\x02\\uFB3E\\x02\\uFB40\\x02\\uFB40\\x02\" +\r\n\t\t\"\\uFB42\\x02\\uFB43\\x02\\uFB45\\x02\\uFB46\\x02\\uFB48\\x02\\uFBB3\\x02\\uFBD5\\x02\" +\r\n\t\t\"\\uFD3F\\x02\\uFD52\\x02\\uFD91\\x02\\uFD94\\x02\\uFDC9\\x02\\uFDF2\\x02\\uFDFD\\x02\" +\r\n\t\t\"\\uFE02\\x02\\uFE11\\x02\\uFE22\\x02\\uFE31\\x02\\uFE35\\x02\\uFE36\\x02\\uFE4F\\x02\" +\r\n\t\t\"\\uFE51\\x02\\uFE72\\x02\\uFE76\\x02\\uFE78\\x02\\uFEFE\\x02\\uFF01\\x02\\uFF01\\x02\" +\r\n\t\t\"\\uFF12\\x02\\uFF1B\\x02\\uFF23\\x02\\uFF3C\\x02\\uFF41\\x02\\uFF41\\x02\\uFF43\\x02\" +\r\n\t\t\"\\uFF5C\\x02\\uFF68\\x02\\uFFC0\\x02\\uFFC4\\x02\\uFFC9\\x02\\uFFCC\\x02\\uFFD1\\x02\" +\r\n\t\t\"\\uFFD4\\x02\\uFFD9\\x02\\uFFDC\\x02\\uFFDE\\x02\\uFFFB\\x02\\uFFFD\\x02\\x02\\x03\\r\" +\r\n\t\t\"\\x03\\x0F\\x03(\\x03*\\x03<\\x03>\\x03?\\x03A\\x03O\\x03R\\x03_\\x03\\x82\\x03\\xFC\" +\r\n\t\t\"\\x03\\u0142\\x03\\u0176\\x03\\u01FF\\x03\\u01FF\\x03\\u0282\\x03\\u029E\\x03\\u02A2\" +\r\n\t\t\"\\x03\\u02D2\\x03\\u02E2\\x03\\u02E2\\x03\\u0302\\x03\\u0321\\x03\\u0332\\x03\\u034C\" +\r\n\t\t\"\\x03\\u0352\\x03\\u037C\\x03\\u0382\\x03\\u039F\\x03\\u03A2\\x03\\u03C5\\x03\\u03CA\" +\r\n\t\t\"\\x03\\u03D1\\x03\\u03D3\\x03\\u03D7\\x03\\u0402\\x03\\u049F\\x03\\u04A2\\x03\\u04AB\" +\r\n\t\t\"\\x03\\u04B2\\x03\\u04D5\\x03\\u04DA\\x03\\u04FD\\x03\\u0502\\x03\\u0529\\x03\\u0532\" +\r\n\t\t\"\\x03\\u0565\\x03\\u0602\\x03\\u0738\\x03\\u0742\\x03\\u0757\\x03\\u0762\\x03\\u0769\" +\r\n\t\t\"\\x03\\u0802\\x03\\u0807\\x03\\u080A\\x03\\u080A\\x03\\u080C\\x03\\u0837\\x03\\u0839\" +\r\n\t\t\"\\x03\\u083A\\x03\\u083E\\x03\\u083E\\x03\\u0841\\x03\\u0857\\x03\\u0862\\x03\\u0878\" +\r\n\t\t\"\\x03\\u0882\\x03\\u08A0\\x03\\u08E2\\x03\\u08F4\\x03\\u08F6\\x03\\u08F7\\x03\\u0902\" +\r\n\t\t\"\\x03\\u0917\\x03\\u0922\\x03\\u093B\\x03\\u0982\\x03\\u09B9\\x03\\u09C0\\x03\\u09C1\" +\r\n\t\t\"\\x03\\u0A02\\x03\\u0A05\\x03\\u0A07\\x03\\u0A08\\x03\\u0A0E\\x03\\u0A15\\x03\\u0A17\" +\r\n\t\t\"\\x03\\u0A19\\x03\\u0A1B\\x03\\u0A35\\x03\\u0A3A\\x03\\u0A3C\\x03\\u0A41\\x03\\u0A41\" +\r\n\t\t\"\\x03\\u0A62\\x03\\u0A7E\\x03\\u0A82\\x03\\u0A9E\\x03\\u0AC2\\x03\\u0AC9\\x03\\u0ACB\" +\r\n\t\t\"\\x03\\u0AE8\\x03\\u0B02\\x03\\u0B37\\x03\\u0B42\\x03\\u0B57\\x03\\u0B62\\x03\\u0B74\" +\r\n\t\t\"\\x03\\u0B82\\x03\\u0B93\\x03\\u0C02\\x03\\u0C4A\\x03\\u0C82\\x03\\u0CB4\\x03\\u0CC2\" +\r\n\t\t\"\\x03\\u0CF4\\x03\\u1002\\x03\\u1048\\x03\\u1068\\x03\\u1071\\x03\\u1081\\x03\\u10BC\" +\r\n\t\t\"\\x03\\u10BF\\x03\\u10BF\\x03\\u10D2\\x03\\u10EA\\x03\\u10F2\\x03\\u10FB\\x03\\u1102\" +\r\n\t\t\"\\x03\\u1136\\x03\\u1138\\x03\\u1141\\x03\\u1152\\x03\\u1175\\x03\\u1178\\x03\\u1178\" +\r\n\t\t\"\\x03\\u1182\\x03\\u11C6\\x03\\u11CC\\x03\\u11CE\\x03\\u11D2\\x03\\u11DC\\x03\\u11DE\" +\r\n\t\t\"\\x03\\u11DE\\x03\\u1202\\x03\\u1213\\x03\\u1215\\x03\\u1239\\x03\\u1240\\x03\\u1240\" +\r\n\t\t\"\\x03\\u1282\\x03\\u1288\\x03\\u128A\\x03\\u128A\\x03\\u128C\\x03\\u128F\\x03\\u1291\" +\r\n\t\t\"\\x03\\u129F\\x03\\u12A1\\x03\\u12AA\\x03\\u12B2\\x03\\u12EC\\x03\\u12F2\\x03\\u12FB\" +\r\n\t\t\"\\x03\\u1302\\x03\\u1305\\x03\\u1307\\x03\\u130E\\x03\\u1311\\x03\\u1312\\x03\\u1315\" +\r\n\t\t\"\\x03\\u132A\\x03\\u132C\\x03\\u1332\\x03\\u1334\\x03\\u1335\\x03\\u1337\\x03\\u133B\" +\r\n\t\t\"\\x03\\u133E\\x03\\u1346\\x03\\u1349\\x03\\u134A\\x03\\u134D\\x03\\u134F\\x03\\u1352\" +\r\n\t\t\"\\x03\\u1352\\x03\\u1359\\x03\\u1359\\x03\\u135F\\x03\\u1365\\x03\\u1368\\x03\\u136E\" +\r\n\t\t\"\\x03\\u1372\\x03\\u1376\\x03\\u1402\\x03\\u144C\\x03\\u1452\\x03\\u145B\\x03\\u1482\" +\r\n\t\t\"\\x03\\u14C7\\x03\\u14C9\\x03\\u14C9\\x03\\u14D2\\x03\\u14DB\\x03\\u1582\\x03\\u15B7\" +\r\n\t\t\"\\x03\\u15BA\\x03\\u15C2\\x03\\u15DA\\x03\\u15DF\\x03\\u1602\\x03\\u1642\\x03\\u1646\" +\r\n\t\t\"\\x03\\u1646\\x03\\u1652\\x03\\u165B\\x03\\u1682\\x03\\u16B9\\x03\\u16C2\\x03\\u16CB\" +\r\n\t\t\"\\x03\\u1702\\x03\\u171B\\x03\\u171F\\x03\\u172D\\x03\\u1732\\x03\\u173B\\x03\\u18A2\" +\r\n\t\t\"\\x03\\u18EB\\x03\\u1901\\x03\\u1901\\x03\\u1AC2\\x03\\u1AFA\\x03\\u1C02\\x03\\u1C0A\" +\r\n\t\t\"\\x03\\u1C0C\\x03\\u1C38\\x03\\u1C3A\\x03\\u1C42\\x03\\u1C52\\x03\\u1C5B\\x03\\u1C74\" +\r\n\t\t\"\\x03\\u1C91\\x03\\u1C94\\x03\\u1CA9\\x03\\u1CAB\\x03\\u1CB8\\x03\\u2002\\x03\\u239B\" +\r\n\t\t\"\\x03\\u2402\\x03\\u2470\\x03\\u2482\\x03\\u2545\\x03\\u3002\\x03\\u3430\\x03\\u4402\" +\r\n\t\t\"\\x03\\u4648\\x03\\u6802\\x03\\u6A3A\\x03\\u6A42\\x03\\u6A60\\x03\\u6A62\\x03\\u6A6B\" +\r\n\t\t\"\\x03\\u6AD2\\x03\\u6AEF\\x03\\u6AF2\\x03\\u6AF6\\x03\\u6B02\\x03\\u6B38\\x03\\u6B42\" +\r\n\t\t\"\\x03\\u6B45\\x03\\u6B52\\x03\\u6B5B\\x03\\u6B65\\x03\\u6B79\\x03\\u6B7F\\x03\\u6B91\" +\r\n\t\t\"\\x03\\u6F02\\x03\\u6F46\\x03\\u6F52\\x03\\u6F80\\x03\\u6F91\\x03\\u6FA1\\x03\\u6FE2\" +\r\n\t\t\"\\x03\\u6FE2\\x03\\u7002\\x03\\u87EE\\x03\\u8802\\x03\\u8AF4\\x03\\uB002\\x03\\uB003\" +\r\n\t\t\"\\x03\\uBC02\\x03\\uBC6C\\x03\\uBC72\\x03\\uBC7E\\x03\\uBC82\\x03\\uBC8A\\x03\\uBC92\" +\r\n\t\t\"\\x03\\uBC9B\\x03\\uBC9F\\x03\\uBCA0\\x03\\uBCA2\\x03\\uBCA5\\x03\\uD167\\x03\\uD16B\" +\r\n\t\t\"\\x03\\uD16F\\x03\\uD184\\x03\\uD187\\x03\\uD18D\\x03\\uD1AC\\x03\\uD1AF\\x03\\uD244\" +\r\n\t\t\"\\x03\\uD246\\x03\\uD402\\x03\\uD456\\x03\\uD458\\x03\\uD49E\\x03\\uD4A0\\x03\\uD4A1\" +\r\n\t\t\"\\x03\\uD4A4\\x03\\uD4A4\\x03\\uD4A7\\x03\\uD4A8\\x03\\uD4AB\\x03\\uD4AE\\x03\\uD4B0\" +\r\n\t\t\"\\x03\\uD4BB\\x03\\uD4BD\\x03\\uD4BD\\x03\\uD4BF\\x03\\uD4C5\\x03\\uD4C7\\x03\\uD507\" +\r\n\t\t\"\\x03\\uD509\\x03\\uD50C\\x03\\uD50F\\x03\\uD516\\x03\\uD518\\x03\\uD51E\\x03\\uD520\" +\r\n\t\t\"\\x03\\uD53B\\x03\\uD53D\\x03\\uD540\\x03\\uD542\\x03\\uD546\\x03\\uD548\\x03\\uD548\" +\r\n\t\t\"\\x03\\uD54C\\x03\\uD552\\x03\\uD554\\x03\\uD6A7\\x03\\uD6AA\\x03\\uD6C2\\x03\\uD6C4\" +\r\n\t\t\"\\x03\\uD6DC\\x03\\uD6DE\\x03\\uD6FC\\x03\\uD6FE\\x03\\uD716\\x03\\uD718\\x03\\uD736\" +\r\n\t\t\"\\x03\\uD738\\x03\\uD750\\x03\\uD752\\x03\\uD770\\x03\\uD772\\x03\\uD78A\\x03\\uD78C\" +\r\n\t\t\"\\x03\\uD7AA\\x03\\uD7AC\\x03\\uD7C4\\x03\\uD7C6\\x03\\uD7CD\\x03\\uD7D0\\x03\\uD801\" +\r\n\t\t\"\\x03\\uDA02\\x03\\uDA38\\x03\\uDA3D\\x03\\uDA6E\\x03\\uDA77\\x03\\uDA77\\x03\\uDA86\" +\r\n\t\t\"\\x03\\uDA86\\x03\\uDA9D\\x03\\uDAA1\\x03\\uDAA3\\x03\\uDAB1\\x03\\uE002\\x03\\uE008\" +\r\n\t\t\"\\x03\\uE00A\\x03\\uE01A\\x03\\uE01D\\x03\\uE023\\x03\\uE025\\x03\\uE026\\x03\\uE028\" +\r\n\t\t\"\\x03\\uE02C\\x03\\uE802\\x03\\uE8C6\\x03\\uE8D2\\x03\\uE8D8\\x03\\uE902\\x03\\uE94C\" +\r\n\t\t\"\\x03\\uE952\\x03\\uE95B\\x03\\uEE02\\x03\\uEE05\\x03\\uEE07\\x03\\uEE21\\x03\\uEE23\" +\r\n\t\t\"\\x03\\uEE24\\x03\\uEE26\\x03\\uEE26\\x03\\uEE29\\x03\\uEE29\\x03\\uEE2B\\x03\\uEE34\" +\r\n\t\t\"\\x03\\uEE36\\x03\\uEE39\\x03\\uEE3B\\x03\\uEE3B\\x03\\uEE3D\\x03\\uEE3D\\x03\\uEE44\" +\r\n\t\t\"\\x03\\uEE44\\x03\\uEE49\\x03\\uEE49\\x03\\uEE4B\\x03\\uEE4B\\x03\\uEE4D\\x03\\uEE4D\" +\r\n\t\t\"\\x03\\uEE4F\\x03\\uEE51\\x03\\uEE53\\x03\\uEE54\\x03\\uEE56\\x03\\uEE56\\x03\\uEE59\" +\r\n\t\t\"\\x03\\uEE59\\x03\\uEE5B\\x03\\uEE5B\\x03\\uEE5D\\x03\\uEE5D\\x03\\uEE5F\\x03\\uEE5F\" +\r\n\t\t\"\\x03\\uEE61\\x03\\uEE61\\x03\\uEE63\\x03\\uEE64\\x03\\uEE66\\x03\\uEE66\\x03\\uEE69\" +\r\n\t\t\"\\x03\\uEE6C\\x03\\uEE6E\\x03\\uEE74\\x03\\uEE76\\x03\\uEE79\\x03\\uEE7B\\x03\\uEE7E\" +\r\n\t\t\"\\x03\\uEE80\\x03\\uEE80\\x03\\uEE82\\x03\\uEE8B\\x03\\uEE8D\\x03\\uEE9D\\x03\\uEEA3\" +\r\n\t\t\"\\x03\\uEEA5\\x03\\uEEA7\\x03\\uEEAB\\x03\\uEEAD\\x03\\uEEBD\\x03\\x02\\x04\\uA6D8\\x04\" +\r\n\t\t\"\\uA702\\x04\\uB736\\x04\\uB742\\x04\\uB81F\\x04\\uB822\\x04\\uCEA3\\x04\\uF802\\x04\" +\r\n\t\t\"\\uFA1F\\x04\\x03\\x10\\x03\\x10\\\"\\x10\\x81\\x10\\u0102\\x10\\u01F1\\x10\\u0240\\x02\" +\r\n\t\t\"C\\x02\\\\\\x02c\\x02|\\x02\\xAC\\x02\\xAC\\x02\\xB7\\x02\\xB7\\x02\\xBC\\x02\\xBC\\x02\" +\r\n\t\t\"\\xC2\\x02\\xD8\\x02\\xDA\\x02\\xF8\\x02\\xFA\\x02\\u02C3\\x02\\u02C8\\x02\\u02D3\\x02\" +\r\n\t\t\"\\u02E2\\x02\\u02E6\\x02\\u02EE\\x02\\u02EE\\x02\\u02F0\\x02\\u02F0\\x02\\u0372\\x02\" +\r\n\t\t\"\\u0376\\x02\\u0378\\x02\\u0379\\x02\\u037C\\x02\\u037F\\x02\\u0381\\x02\\u0381\\x02\" +\r\n\t\t\"\\u0388\\x02\\u0388\\x02\\u038A\\x02\\u038C\\x02\\u038E\\x02\\u038E\\x02\\u0390\\x02\" +\r\n\t\t\"\\u03A3\\x02\\u03A5\\x02\\u03F7\\x02\\u03F9\\x02\\u0483\\x02\\u048C\\x02\\u0531\\x02\" +\r\n\t\t\"\\u0533\\x02\\u0558\\x02\\u055B\\x02\\u055B\\x02\\u0563\\x02\\u0589\\x02\\u05D2\\x02\" +\r\n\t\t\"\\u05EC\\x02\\u05F2\\x02\\u05F4\\x02\\u0622\\x02\\u064C\\x02\\u0670\\x02\\u0671\\x02\" +\r\n\t\t\"\\u0673\\x02\\u06D5\\x02\\u06D7\\x02\\u06D7\\x02\\u06E7\\x02\\u06E8\\x02\\u06F0\\x02\" +\r\n\t\t\"\\u06F1\\x02\\u06FC\\x02\\u06FE\\x02\\u0701\\x02\\u0701\\x02\\u0712\\x02\\u0712\\x02\" +\r\n\t\t\"\\u0714\\x02\\u0731\\x02\\u074F\\x02\\u07A7\\x02\\u07B3\\x02\\u07B3\\x02\\u07CC\\x02\" +\r\n\t\t\"\\u07EC\\x02\\u07F6\\x02\\u07F7\\x02\\u07FC\\x02\\u07FC\\x02\\u0802\\x02\\u0817\\x02\" +\r\n\t\t\"\\u081C\\x02\\u081C\\x02\\u0826\\x02\\u0826\\x02\\u082A\\x02\\u082A\\x02\\u0842\\x02\" +\r\n\t\t\"\\u085A\\x02\\u08A2\\x02\\u08B6\\x02\\u08B8\\x02\\u08BF\\x02\\u0906\\x02\\u093B\\x02\" +\r\n\t\t\"\\u093F\\x02\\u093F\\x02\\u0952\\x02\\u0952\\x02\\u095A\\x02\\u0963\\x02\\u0973\\x02\" +\r\n\t\t\"\\u0982\\x02\\u0987\\x02\\u098E\\x02\\u0991\\x02\\u0992\\x02\\u0995\\x02\\u09AA\\x02\" +\r\n\t\t\"\\u09AC\\x02\\u09B2\\x02\\u09B4\\x02\\u09B4\\x02\\u09B8\\x02\\u09BB\\x02\\u09BF\\x02\" +\r\n\t\t\"\\u09BF\\x02\\u09D0\\x02\\u09D0\\x02\\u09DE\\x02\\u09DF\\x02\\u09E1\\x02\\u09E3\\x02\" +\r\n\t\t\"\\u09F2\\x02\\u09F3\\x02\\u0A07\\x02\\u0A0C\\x02\\u0A11\\x02\\u0A12\\x02\\u0A15\\x02\" +\r\n\t\t\"\\u0A2A\\x02\\u0A2C\\x02\\u0A32\\x02\\u0A34\\x02\\u0A35\\x02\\u0A37\\x02\\u0A38\\x02\" +\r\n\t\t\"\\u0A3A\\x02\\u0A3B\\x02\\u0A5B\\x02\\u0A5E\\x02\\u0A60\\x02\\u0A60\\x02\\u0A74\\x02\" +\r\n\t\t\"\\u0A76\\x02\\u0A87\\x02\\u0A8F\\x02\\u0A91\\x02\\u0A93\\x02\\u0A95\\x02\\u0AAA\\x02\" +\r\n\t\t\"\\u0AAC\\x02\\u0AB2\\x02\\u0AB4\\x02\\u0AB5\\x02\\u0AB7\\x02\\u0ABB\\x02\\u0ABF\\x02\" +\r\n\t\t\"\\u0ABF\\x02\\u0AD2\\x02\\u0AD2\\x02\\u0AE2\\x02\\u0AE3\\x02\\u0AFB\\x02\\u0AFB\\x02\" +\r\n\t\t\"\\u0B07\\x02\\u0B0E\\x02\\u0B11\\x02\\u0B12\\x02\\u0B15\\x02\\u0B2A\\x02\\u0B2C\\x02\" +\r\n\t\t\"\\u0B32\\x02\\u0B34\\x02\\u0B35\\x02\\u0B37\\x02\\u0B3B\\x02\\u0B3F\\x02\\u0B3F\\x02\" +\r\n\t\t\"\\u0B5E\\x02\\u0B5F\\x02\\u0B61\\x02\\u0B63\\x02\\u0B73\\x02\\u0B73\\x02\\u0B85\\x02\" +\r\n\t\t\"\\u0B85\\x02\\u0B87\\x02\\u0B8C\\x02\\u0B90\\x02\\u0B92\\x02\\u0B94\\x02\\u0B97\\x02\" +\r\n\t\t\"\\u0B9B\\x02\\u0B9C\\x02\\u0B9E\\x02\\u0B9E\\x02\\u0BA0\\x02\\u0BA1\\x02\\u0BA5\\x02\" +\r\n\t\t\"\\u0BA6\\x02\\u0BAA\\x02\\u0BAC\\x02\\u0BB0\\x02\\u0BBB\\x02\\u0BD2\\x02\\u0BD2\\x02\" +\r\n\t\t\"\\u0C07\\x02\\u0C0E\\x02\\u0C10\\x02\\u0C12\\x02\\u0C14\\x02\\u0C2A\\x02\\u0C2C\\x02\" +\r\n\t\t\"\\u0C3B\\x02\\u0C3F\\x02\\u0C3F\\x02\\u0C5A\\x02\\u0C5C\\x02\\u0C62\\x02\\u0C63\\x02\" +\r\n\t\t\"\\u0C82\\x02\\u0C82\\x02\\u0C87\\x02\\u0C8E\\x02\\u0C90\\x02\\u0C92\\x02\\u0C94\\x02\" +\r\n\t\t\"\\u0CAA\\x02\\u0CAC\\x02\\u0CB5\\x02\\u0CB7\\x02\\u0CBB\\x02\\u0CBF\\x02\\u0CBF\\x02\" +\r\n\t\t\"\\u0CE0\\x02\\u0CE0\\x02\\u0CE2\\x02\\u0CE3\\x02\\u0CF3\\x02\\u0CF4\\x02\\u0D07\\x02\" +\r\n\t\t\"\\u0D0E\\x02\\u0D10\\x02\\u0D12\\x02\\u0D14\\x02\\u0D3C\\x02\\u0D3F\\x02\\u0D3F\\x02\" +\r\n\t\t\"\\u0D50\\x02\\u0D50\\x02\\u0D56\\x02\\u0D58\\x02\\u0D61\\x02\\u0D63\\x02\\u0D7C\\x02\" +\r\n\t\t\"\\u0D81\\x02\\u0D87\\x02\\u0D98\\x02\\u0D9C\\x02\\u0DB3\\x02\\u0DB5\\x02\\u0DBD\\x02\" +\r\n\t\t\"\\u0DBF\\x02\\u0DBF\\x02\\u0DC2\\x02\\u0DC8\\x02\\u0E03\\x02\\u0E32\\x02\\u0E34\\x02\" +\r\n\t\t\"\\u0E35\\x02\\u0E42\\x02\\u0E48\\x02\\u0E83\\x02\\u0E84\\x02\\u0E86\\x02\\u0E86\\x02\" +\r\n\t\t\"\\u0E89\\x02\\u0E8A\\x02\\u0E8C\\x02\\u0E8C\\x02\\u0E8F\\x02\\u0E8F\\x02\\u0E96\\x02\" +\r\n\t\t\"\\u0E99\\x02\\u0E9B\\x02\\u0EA1\\x02\\u0EA3\\x02\\u0EA5\\x02\\u0EA7\\x02\\u0EA7\\x02\" +\r\n\t\t\"\\u0EA9\\x02\\u0EA9\\x02\\u0EAC\\x02\\u0EAD\\x02\\u0EAF\\x02\\u0EB2\\x02\\u0EB4\\x02\" +\r\n\t\t\"\\u0EB5\\x02\\u0EBF\\x02\\u0EBF\\x02\\u0EC2\\x02\\u0EC6\\x02\\u0EC8\\x02\\u0EC8\\x02\" +\r\n\t\t\"\\u0EDE\\x02\\u0EE1\\x02\\u0F02\\x02\\u0F02\\x02\\u0F42\\x02\\u0F49\\x02\\u0F4B\\x02\" +\r\n\t\t\"\\u0F6E\\x02\\u0F8A\\x02\\u0F8E\\x02\\u1002\\x02\\u102C\\x02\\u1041\\x02\\u1041\\x02\" +\r\n\t\t\"\\u1052\\x02\\u1057\\x02\\u105C\\x02\\u105F\\x02\\u1063\\x02\\u1063\\x02\\u1067\\x02\" +\r\n\t\t\"\\u1068\\x02\\u1070\\x02\\u1072\\x02\\u1077\\x02\\u1083\\x02\\u1090\\x02\\u1090\\x02\" +\r\n\t\t\"\\u10A2\\x02\\u10C7\\x02\\u10C9\\x02\\u10C9\\x02\\u10CF\\x02\\u10CF\\x02\\u10D2\\x02\" +\r\n\t\t\"\\u10FC\\x02\\u10FE\\x02\\u124A\\x02\\u124C\\x02\\u124F\\x02\\u1252\\x02\\u1258\\x02\" +\r\n\t\t\"\\u125A\\x02\\u125A\\x02\\u125C\\x02\\u125F\\x02\\u1262\\x02\\u128A\\x02\\u128C\\x02\" +\r\n\t\t\"\\u128F\\x02\\u1292\\x02\\u12B2\\x02\\u12B4\\x02\\u12B7\\x02\\u12BA\\x02\\u12C0\\x02\" +\r\n\t\t\"\\u12C2\\x02\\u12C2\\x02\\u12C4\\x02\\u12C7\\x02\\u12CA\\x02\\u12D8\\x02\\u12DA\\x02\" +\r\n\t\t\"\\u1312\\x02\\u1314\\x02\\u1317\\x02\\u131A\\x02\\u135C\\x02\\u1382\\x02\\u1391\\x02\" +\r\n\t\t\"\\u13A2\\x02\\u13F7\\x02\\u13FA\\x02\\u13FF\\x02\\u1403\\x02\\u166E\\x02\\u1671\\x02\" +\r\n\t\t\"\\u1681\\x02\\u1683\\x02\\u169C\\x02\\u16A2\\x02\\u16EC\\x02\\u16F0\\x02\\u16FA\\x02\" +\r\n\t\t\"\\u1702\\x02\\u170E\\x02\\u1710\\x02\\u1713\\x02\\u1722\\x02\\u1733\\x02\\u1742\\x02\" +\r\n\t\t\"\\u1753\\x02\\u1762\\x02\\u176E\\x02\\u1770\\x02\\u1772\\x02\\u1782\\x02\\u17B5\\x02\" +\r\n\t\t\"\\u17D9\\x02\\u17D9\\x02\\u17DE\\x02\\u17DE\\x02\\u1822\\x02\\u1879\\x02\\u1882\\x02\" +\r\n\t\t\"\\u1886\\x02\\u1889\\x02\\u18AA\\x02\\u18AC\\x02\\u18AC\\x02\\u18B2\\x02\\u18F7\\x02\" +\r\n\t\t\"\\u1902\\x02\\u1920\\x02\\u1952\\x02\\u196F\\x02\\u1972\\x02\\u1976\\x02\\u1982\\x02\" +\r\n\t\t\"\\u19AD\\x02\\u19B2\\x02\\u19CB\\x02\\u1A02\\x02\\u1A18\\x02\\u1A22\\x02\\u1A56\\x02\" +\r\n\t\t\"\\u1AA9\\x02\\u1AA9\\x02\\u1B07\\x02\\u1B35\\x02\\u1B47\\x02\\u1B4D\\x02\\u1B85\\x02\" +\r\n\t\t\"\\u1BA2\\x02\\u1BB0\\x02\\u1BB1\\x02\\u1BBC\\x02\\u1BE7\\x02\\u1C02\\x02\\u1C25\\x02\" +\r\n\t\t\"\\u1C4F\\x02\\u1C51\\x02\\u1C5C\\x02\\u1C7F\\x02\\u1C82\\x02\\u1C8A\\x02\\u1CEB\\x02\" +\r\n\t\t\"\\u1CEE\\x02\\u1CF0\\x02\\u1CF3\\x02\\u1CF7\\x02\\u1CF8\\x02\\u1D02\\x02\\u1DC1\\x02\" +\r\n\t\t\"\\u1E02\\x02\\u1F17\\x02\\u1F1A\\x02\\u1F1F\\x02\\u1F22\\x02\\u1F47\\x02\\u1F4A\\x02\" +\r\n\t\t\"\\u1F4F\\x02\\u1F52\\x02\\u1F59\\x02\\u1F5B\\x02\\u1F5B\\x02\\u1F5D\\x02\\u1F5D\\x02\" +\r\n\t\t\"\\u1F5F\\x02\\u1F5F\\x02\\u1F61\\x02\\u1F7F\\x02\\u1F82\\x02\\u1FB6\\x02\\u1FB8\\x02\" +\r\n\t\t\"\\u1FBE\\x02\\u1FC0\\x02\\u1FC0\\x02\\u1FC4\\x02\\u1FC6\\x02\\u1FC8\\x02\\u1FCE\\x02\" +\r\n\t\t\"\\u1FD2\\x02\\u1FD5\\x02\\u1FD8\\x02\\u1FDD\\x02\\u1FE2\\x02\\u1FEE\\x02\\u1FF4\\x02\" +\r\n\t\t\"\\u1FF6\\x02\\u1FF8\\x02\\u1FFE\\x02\\u2073\\x02\\u2073\\x02\\u2081\\x02\\u2081\\x02\" +\r\n\t\t\"\\u2092\\x02\\u209E\\x02\\u2104\\x02\\u2104\\x02\\u2109\\x02\\u2109\\x02\\u210C\\x02\" +\r\n\t\t\"\\u2115\\x02\\u2117\\x02\\u2117\\x02\\u211B\\x02\\u211F\\x02\\u2126\\x02\\u2126\\x02\" +\r\n\t\t\"\\u2128\\x02\\u2128\\x02\\u212A\\x02\\u212A\\x02\\u212C\\x02\\u212F\\x02\\u2131\\x02\" +\r\n\t\t\"\\u213B\\x02\\u213E\\x02\\u2141\\x02\\u2147\\x02\\u214B\\x02\\u2150\\x02\\u2150\\x02\" +\r\n\t\t\"\\u2162\\x02\\u218A\\x02\\u2C02\\x02\\u2C30\\x02\\u2C32\\x02\\u2C60\\x02\\u2C62\\x02\" +\r\n\t\t\"\\u2CE6\\x02\\u2CED\\x02\\u2CF0\\x02\\u2CF4\\x02\\u2CF5\\x02\\u2D02\\x02\\u2D27\\x02\" +\r\n\t\t\"\\u2D29\\x02\\u2D29\\x02\\u2D2F\\x02\\u2D2F\\x02\\u2D32\\x02\\u2D69\\x02\\u2D71\\x02\" +\r\n\t\t\"\\u2D71\\x02\\u2D82\\x02\\u2D98\\x02\\u2DA2\\x02\\u2DA8\\x02\\u2DAA\\x02\\u2DB0\\x02\" +\r\n\t\t\"\\u2DB2\\x02\\u2DB8\\x02\\u2DBA\\x02\\u2DC0\\x02\\u2DC2\\x02\\u2DC8\\x02\\u2DCA\\x02\" +\r\n\t\t\"\\u2DD0\\x02\\u2DD2\\x02\\u2DD8\\x02\\u2DDA\\x02\\u2DE0\\x02\\u2E31\\x02\\u2E31\\x02\" +\r\n\t\t\"\\u3007\\x02\\u3009\\x02\\u3023\\x02\\u302B\\x02\\u3033\\x02\\u3037\\x02\\u303A\\x02\" +\r\n\t\t\"\\u303E\\x02\\u3043\\x02\\u3098\\x02\\u309F\\x02\\u30A1\\x02\\u30A3\\x02\\u30FC\\x02\" +\r\n\t\t\"\\u30FE\\x02\\u3101\\x02\\u3107\\x02\\u312F\\x02\\u3133\\x02\\u3190\\x02\\u31A2\\x02\" +\r\n\t\t\"\\u31BC\\x02\\u31F2\\x02\\u3201\\x02\\u3402\\x02\\u4DB7\\x02\\u4E02\\x02\\u9FD7\\x02\" +\r\n\t\t\"\\uA002\\x02\\uA48E\\x02\\uA4D2\\x02\\uA4FF\\x02\\uA502\\x02\\uA60E\\x02\\uA612\\x02\" +\r\n\t\t\"\\uA621\\x02\\uA62C\\x02\\uA62D\\x02\\uA642\\x02\\uA670\\x02\\uA681\\x02\\uA69F\\x02\" +\r\n\t\t\"\\uA6A2\\x02\\uA6F1\\x02\\uA719\\x02\\uA721\\x02\\uA724\\x02\\uA78A\\x02\\uA78D\\x02\" +\r\n\t\t\"\\uA7B0\\x02\\uA7B2\\x02\\uA7B9\\x02\\uA7F9\\x02\\uA803\\x02\\uA805\\x02\\uA807\\x02\" +\r\n\t\t\"\\uA809\\x02\\uA80C\\x02\\uA80E\\x02\\uA824\\x02\\uA842\\x02\\uA875\\x02\\uA884\\x02\" +\r\n\t\t\"\\uA8B5\\x02\\uA8F4\\x02\\uA8F9\\x02\\uA8FD\\x02\\uA8FD\\x02\\uA8FF\\x02\\uA8FF\\x02\" +\r\n\t\t\"\\uA90C\\x02\\uA927\\x02\\uA932\\x02\\uA948\\x02\\uA962\\x02\\uA97E\\x02\\uA986\\x02\" +\r\n\t\t\"\\uA9B4\\x02\\uA9D1\\x02\\uA9D1\\x02\\uA9E2\\x02\\uA9E6\\x02\\uA9E8\\x02\\uA9F1\\x02\" +\r\n\t\t\"\\uA9FC\\x02\\uAA00\\x02\\uAA02\\x02\\uAA2A\\x02\\uAA42\\x02\\uAA44\\x02\\uAA46\\x02\" +\r\n\t\t\"\\uAA4D\\x02\\uAA62\\x02\\uAA78\\x02\\uAA7C\\x02\\uAA7C\\x02\\uAA80\\x02\\uAAB1\\x02\" +\r\n\t\t\"\\uAAB3\\x02\\uAAB3\\x02\\uAAB7\\x02\\uAAB8\\x02\\uAABB\\x02\\uAABF\\x02\\uAAC2\\x02\" +\r\n\t\t\"\\uAAC2\\x02\\uAAC4\\x02\\uAAC4\\x02\\uAADD\\x02\\uAADF\\x02\\uAAE2\\x02\\uAAEC\\x02\" +\r\n\t\t\"\\uAAF4\\x02\\uAAF6\\x02\\uAB03\\x02\\uAB08\\x02\\uAB0B\\x02\\uAB10\\x02\\uAB13\\x02\" +\r\n\t\t\"\\uAB18\\x02\\uAB22\\x02\\uAB28\\x02\\uAB2A\\x02\\uAB30\\x02\\uAB32\\x02\\uAB5C\\x02\" +\r\n\t\t\"\\uAB5E\\x02\\uAB67\\x02\\uAB72\\x02\\uABE4\\x02\\uAC02\\x02\\uD7A5\\x02\\uD7B2\\x02\" +\r\n\t\t\"\\uD7C8\\x02\\uD7CD\\x02\\uD7FD\\x02\\uF902\\x02\\uFA6F\\x02\\uFA72\\x02\\uFADB\\x02\" +\r\n\t\t\"\\uFB02\\x02\\uFB08\\x02\\uFB15\\x02\\uFB19\\x02\\uFB1F\\x02\\uFB1F\\x02\\uFB21\\x02\" +\r\n\t\t\"\\uFB2A\\x02\\uFB2C\\x02\\uFB38\\x02\\uFB3A\\x02\\uFB3E\\x02\\uFB40\\x02\\uFB40\\x02\" +\r\n\t\t\"\\uFB42\\x02\\uFB43\\x02\\uFB45\\x02\\uFB46\\x02\\uFB48\\x02\\uFBB3\\x02\\uFBD5\\x02\" +\r\n\t\t\"\\uFD3F\\x02\\uFD52\\x02\\uFD91\\x02\\uFD94\\x02\\uFDC9\\x02\\uFDF2\\x02\\uFDFD\\x02\" +\r\n\t\t\"\\uFE72\\x02\\uFE76\\x02\\uFE78\\x02\\uFEFE\\x02\\uFF23\\x02\\uFF3C\\x02\\uFF43\\x02\" +\r\n\t\t\"\\uFF5C\\x02\\uFF68\\x02\\uFFC0\\x02\\uFFC4\\x02\\uFFC9\\x02\\uFFCC\\x02\\uFFD1\\x02\" +\r\n\t\t\"\\uFFD4\\x02\\uFFD9\\x02\\uFFDC\\x02\\uFFDE\\x02\\x02\\x03\\r\\x03\\x0F\\x03(\\x03*\\x03\" +\r\n\t\t\"<\\x03>\\x03?\\x03A\\x03O\\x03R\\x03_\\x03\\x82\\x03\\xFC\\x03\\u0142\\x03\\u0176\\x03\" +\r\n\t\t\"\\u0282\\x03\\u029E\\x03\\u02A2\\x03\\u02D2\\x03\\u0302\\x03\\u0321\\x03\\u0332\\x03\" +\r\n\t\t\"\\u034C\\x03\\u0352\\x03\\u0377\\x03\\u0382\\x03\\u039F\\x03\\u03A2\\x03\\u03C5\\x03\" +\r\n\t\t\"\\u03CA\\x03\\u03D1\\x03\\u03D3\\x03\\u03D7\\x03\\u0402\\x03\\u049F\\x03\\u04B2\\x03\" +\r\n\t\t\"\\u04D5\\x03\\u04DA\\x03\\u04FD\\x03\\u0502\\x03\\u0529\\x03\\u0532\\x03\\u0565\\x03\" +\r\n\t\t\"\\u0602\\x03\\u0738\\x03\\u0742\\x03\\u0757\\x03\\u0762\\x03\\u0769\\x03\\u0802\\x03\" +\r\n\t\t\"\\u0807\\x03\\u080A\\x03\\u080A\\x03\\u080C\\x03\\u0837\\x03\\u0839\\x03\\u083A\\x03\" +\r\n\t\t\"\\u083E\\x03\\u083E\\x03\\u0841\\x03\\u0857\\x03\\u0862\\x03\\u0878\\x03\\u0882\\x03\" +\r\n\t\t\"\\u08A0\\x03\\u08E2\\x03\\u08F4\\x03\\u08F6\\x03\\u08F7\\x03\\u0902\\x03\\u0917\\x03\" +\r\n\t\t\"\\u0922\\x03\\u093B\\x03\\u0982\\x03\\u09B9\\x03\\u09C0\\x03\\u09C1\\x03\\u0A02\\x03\" +\r\n\t\t\"\\u0A02\\x03\\u0A12\\x03\\u0A15\\x03\\u0A17\\x03\\u0A19\\x03\\u0A1B\\x03\\u0A35\\x03\" +\r\n\t\t\"\\u0A62\\x03\\u0A7E\\x03\\u0A82\\x03\\u0A9E\\x03\\u0AC2\\x03\\u0AC9\\x03\\u0ACB\\x03\" +\r\n\t\t\"\\u0AE6\\x03\\u0B02\\x03\\u0B37\\x03\\u0B42\\x03\\u0B57\\x03\\u0B62\\x03\\u0B74\\x03\" +\r\n\t\t\"\\u0B82\\x03\\u0B93\\x03\\u0C02\\x03\\u0C4A\\x03\\u0C82\\x03\\u0CB4\\x03\\u0CC2\\x03\" +\r\n\t\t\"\\u0CF4\\x03\\u1005\\x03\\u1039\\x03\\u1085\\x03\\u10B1\\x03\\u10D2\\x03\\u10EA\\x03\" +\r\n\t\t\"\\u1105\\x03\\u1128\\x03\\u1152\\x03\\u1174\\x03\\u1178\\x03\\u1178\\x03\\u1185\\x03\" +\r\n\t\t\"\\u11B4\\x03\\u11C3\\x03\\u11C6\\x03\\u11DC\\x03\\u11DC\\x03\\u11DE\\x03\\u11DE\\x03\" +\r\n\t\t\"\\u1202\\x03\\u1213\\x03\\u1215\\x03\\u122D\\x03\\u1282\\x03\\u1288\\x03\\u128A\\x03\" +\r\n\t\t\"\\u128A\\x03\\u128C\\x03\\u128F\\x03\\u1291\\x03\\u129F\\x03\\u12A1\\x03\\u12AA\\x03\" +\r\n\t\t\"\\u12B2\\x03\\u12E0\\x03\\u1307\\x03\\u130E\\x03\\u1311\\x03\\u1312\\x03\\u1315\\x03\" +\r\n\t\t\"\\u132A\\x03\\u132C\\x03\\u1332\\x03\\u1334\\x03\\u1335\\x03\\u1337\\x03\\u133B\\x03\" +\r\n\t\t\"\\u133F\\x03\\u133F\\x03\\u1352\\x03\\u1352\\x03\\u135F\\x03\\u1363\\x03\\u1402\\x03\" +\r\n\t\t\"\\u1436\\x03\\u1449\\x03\\u144C\\x03\\u1482\\x03\\u14B1\\x03\\u14C6\\x03\\u14C7\\x03\" +\r\n\t\t\"\\u14C9\\x03\\u14C9\\x03\\u1582\\x03\\u15B0\\x03\\u15DA\\x03\\u15DD\\x03\\u1602\\x03\" +\r\n\t\t\"\\u1631\\x03\\u1646\\x03\\u1646\\x03\\u1682\\x03\\u16AC\\x03\\u1702\\x03\\u171B\\x03\" +\r\n\t\t\"\\u18A2\\x03\\u18E1\\x03\\u1901\\x03\\u1901\\x03\\u1AC2\\x03\\u1AFA\\x03\\u1C02\\x03\" +\r\n\t\t\"\\u1C0A\\x03\\u1C0C\\x03\\u1C30\\x03\\u1C42\\x03\\u1C42\\x03\\u1C74\\x03\\u1C91\\x03\" +\r\n\t\t\"\\u2002\\x03\\u239B\\x03\\u2402\\x03\\u2470\\x03\\u2482\\x03\\u2545\\x03\\u3002\\x03\" +\r\n\t\t\"\\u3430\\x03\\u4402\\x03\\u4648\\x03\\u6802\\x03\\u6A3A\\x03\\u6A42\\x03\\u6A60\\x03\" +\r\n\t\t\"\\u6AD2\\x03\\u6AEF\\x03\\u6B02\\x03\\u6B31\\x03\\u6B42\\x03\\u6B45\\x03\\u6B65\\x03\" +\r\n\t\t\"\\u6B79\\x03\\u6B7F\\x03\\u6B91\\x03\\u6F02\\x03\\u6F46\\x03\\u6F52\\x03\\u6F52\\x03\" +\r\n\t\t\"\\u6F95\\x03\\u6FA1\\x03\\u6FE2\\x03\\u6FE2\\x03\\u7002\\x03\\u87EE\\x03\\u8802\\x03\" +\r\n\t\t\"\\u8AF4\\x03\\uB002\\x03\\uB003\\x03\\uBC02\\x03\\uBC6C\\x03\\uBC72\\x03\\uBC7E\\x03\" +\r\n\t\t\"\\uBC82\\x03\\uBC8A\\x03\\uBC92\\x03\\uBC9B\\x03\\uD402\\x03\\uD456\\x03\\uD458\\x03\" +\r\n\t\t\"\\uD49E\\x03\\uD4A0\\x03\\uD4A1\\x03\\uD4A4\\x03\\uD4A4\\x03\\uD4A7\\x03\\uD4A8\\x03\" +\r\n\t\t\"\\uD4AB\\x03\\uD4AE\\x03\\uD4B0\\x03\\uD4BB\\x03\\uD4BD\\x03\\uD4BD\\x03\\uD4BF\\x03\" +\r\n\t\t\"\\uD4C5\\x03\\uD4C7\\x03\\uD507\\x03\\uD509\\x03\\uD50C\\x03\\uD50F\\x03\\uD516\\x03\" +\r\n\t\t\"\\uD518\\x03\\uD51E\\x03\\uD520\\x03\\uD53B\\x03\\uD53D\\x03\\uD540\\x03\\uD542\\x03\" +\r\n\t\t\"\\uD546\\x03\\uD548\\x03\\uD548\";\r\n\tprivate static readonly _serializedATNSegment1: string =\r\n\t\t\"\\x03\\uD54C\\x03\\uD552\\x03\\uD554\\x03\\uD6A7\\x03\\uD6AA\\x03\\uD6C2\\x03\\uD6C4\" +\r\n\t\t\"\\x03\\uD6DC\\x03\\uD6DE\\x03\\uD6FC\\x03\\uD6FE\\x03\\uD716\\x03\\uD718\\x03\\uD736\" +\r\n\t\t\"\\x03\\uD738\\x03\\uD750\\x03\\uD752\\x03\\uD770\\x03\\uD772\\x03\\uD78A\\x03\\uD78C\" +\r\n\t\t\"\\x03\\uD7AA\\x03\\uD7AC\\x03\\uD7C4\\x03\\uD7C6\\x03\\uD7CD\\x03\\uE802\\x03\\uE8C6\" +\r\n\t\t\"\\x03\\uE902\\x03\\uE945\\x03\\uEE02\\x03\\uEE05\\x03\\uEE07\\x03\\uEE21\\x03\\uEE23\" +\r\n\t\t\"\\x03\\uEE24\\x03\\uEE26\\x03\\uEE26\\x03\\uEE29\\x03\\uEE29\\x03\\uEE2B\\x03\\uEE34\" +\r\n\t\t\"\\x03\\uEE36\\x03\\uEE39\\x03\\uEE3B\\x03\\uEE3B\\x03\\uEE3D\\x03\\uEE3D\\x03\\uEE44\" +\r\n\t\t\"\\x03\\uEE44\\x03\\uEE49\\x03\\uEE49\\x03\\uEE4B\\x03\\uEE4B\\x03\\uEE4D\\x03\\uEE4D\" +\r\n\t\t\"\\x03\\uEE4F\\x03\\uEE51\\x03\\uEE53\\x03\\uEE54\\x03\\uEE56\\x03\\uEE56\\x03\\uEE59\" +\r\n\t\t\"\\x03\\uEE59\\x03\\uEE5B\\x03\\uEE5B\\x03\\uEE5D\\x03\\uEE5D\\x03\\uEE5F\\x03\\uEE5F\" +\r\n\t\t\"\\x03\\uEE61\\x03\\uEE61\\x03\\uEE63\\x03\\uEE64\\x03\\uEE66\\x03\\uEE66\\x03\\uEE69\" +\r\n\t\t\"\\x03\\uEE6C\\x03\\uEE6E\\x03\\uEE74\\x03\\uEE76\\x03\\uEE79\\x03\\uEE7B\\x03\\uEE7E\" +\r\n\t\t\"\\x03\\uEE80\\x03\\uEE80\\x03\\uEE82\\x03\\uEE8B\\x03\\uEE8D\\x03\\uEE9D\\x03\\uEEA3\" +\r\n\t\t\"\\x03\\uEEA5\\x03\\uEEA7\\x03\\uEEAB\\x03\\uEEAD\\x03\\uEEBD\\x03\\x02\\x04\\uA6D8\\x04\" +\r\n\t\t\"\\uA702\\x04\\uB736\\x04\\uB742\\x04\\uB81F\\x04\\uB822\\x04\\uCEA3\\x04\\uF802\\x04\" +\r\n\t\t\"\\uFA1F\\x041\\x02\\x03\\x03\\x02\\x02\\x02\\x02\\x05\\x03\\x02\\x02\\x02\\x02\\x07\\x03\" +\r\n\t\t\"\\x02\\x02\\x02\\x02\\t\\x03\\x02\\x02\\x02\\x02\\v\\x03\\x02\\x02\\x02\\x02\\x11\\x03\\x02\" +\r\n\t\t\"\\x02\\x02\\x03\\x13\\x03\\x02\\x02\\x02\\x05\\x16\\x03\\x02\\x02\\x02\\x07\\x18\\x03\\x02\" +\r\n\t\t\"\\x02\\x02\\t\\x1A\\x03\\x02\\x02\\x02\\v\\x1C\\x03\\x02\\x02\\x02\\r%\\x03\\x02\\x02\\x02\" +\r\n\t\t\"\\x0F\\'\\x03\\x02\\x02\\x02\\x11)\\x03\\x02\\x02\\x02\\x13\\x14\\x071\\x02\\x02\\x14\\x15\" +\r\n\t\t\"\\x071\\x02\\x02\\x15\\x04\\x03\\x02\\x02\\x02\\x16\\x17\\x071\\x02\\x02\\x17\\x06\\x03\" +\r\n\t\t\"\\x02\\x02\\x02\\x18\\x19\\x07,\\x02\\x02\\x19\\b\\x03\\x02\\x02\\x02\\x1A\\x1B\\x07#\\x02\" +\r\n\t\t\"\\x02\\x1B\\n\\x03\\x02\\x02\\x02\\x1C \\x05\\x0F\\b\\x02\\x1D\\x1F\\x05\\r\\x07\\x02\\x1E\" +\r\n\t\t\"\\x1D\\x03\\x02\\x02\\x02\\x1F\\\"\\x03\\x02\\x02\\x02 \\x1E\\x03\\x02\\x02\\x02 !\\x03\" +\r\n\t\t\"\\x02\\x02\\x02!#\\x03\\x02\\x02\\x02\\\" \\x03\\x02\\x02\\x02#$\\b\\x06\\x02\\x02$\\f\\x03\" +\r\n\t\t\"\\x02\\x02\\x02%&\\t\\x02\\x02\\x02&\\x0E\\x03\\x02\\x02\\x02\\'(\\t\\x03\\x02\\x02(\\x10\" +\r\n\t\t\"\\x03\\x02\\x02\\x02)-\\x07)\\x02\\x02*,\\v\\x02\\x02\\x02+*\\x03\\x02\\x02\\x02,/\\x03\" +\r\n\t\t\"\\x02\\x02\\x02-.\\x03\\x02\\x02\\x02-+\\x03\\x02\\x02\\x02.0\\x03\\x02\\x02\\x02/-\\x03\" +\r\n\t\t\"\\x02\\x02\\x0201\\x07)\\x02\\x021\\x12\\x03\\x02\\x02\\x02\\x05\\x02 -\\x03\\x03\\x06\" +\r\n\t\t\"\\x02\";\r\n\tpublic static readonly _serializedATN: string = Utils.join(\r\n\t\t[\r\n\t\t\tXPathLexer._serializedATNSegment0,\r\n\t\t\tXPathLexer._serializedATNSegment1,\r\n\t\t],\r\n\t\t\"\",\r\n\t);\r\n\tpublic static __ATN: ATN;\r\n\tpublic static get _ATN(): ATN {\r\n\t\tif (!XPathLexer.__ATN) {\r\n\t\t\tXPathLexer.__ATN = new ATNDeserializer().deserialize(Utils.toCharArray(XPathLexer._serializedATN));\r\n\t\t}\r\n\r\n\t\treturn XPathLexer.__ATN;\r\n\t}\r\n\r\n}\r\n\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// CONVERSTION complete, Burt Harris 10/14/2016\r\n\r\nimport { ANTLRErrorListener } from \"../../ANTLRErrorListener\";\r\nimport { Override } from \"../../Decorators\";\r\nimport { Recognizer } from \"../../Recognizer\";\r\nimport { RecognitionException } from \"../../RecognitionException\";\r\n\r\nexport class XPathLexerErrorListener implements ANTLRErrorListener {\r\n\t@Override\r\n\tpublic syntaxError(\r\n\t\trecognizer: Recognizer, offendingSymbol: T | undefined,\r\n\t\tline: number, charPositionInLine: number, msg: string,\r\n\t\te: RecognitionException | undefined): void {\r\n\t\t// intentionally empty\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// CONVERSTION complete, Burt Harris 10/14/2016\r\nimport { Override } from \"../../Decorators\";\r\nimport { ParseTree } from \"../ParseTree\";\r\n\r\nexport abstract class XPathElement {\r\n\tprotected nodeName: string;\r\n\tpublic invert: boolean;\r\n\r\n\t/** Construct element like `/ID` or `ID` or `/*` etc...\r\n\t * op is null if just node\r\n\t */\r\n\tconstructor(nodeName: string) {\r\n\t\tthis.nodeName = nodeName;\r\n\t\tthis.invert = false;\r\n\t}\r\n\r\n\t/**\r\n\t * Given tree rooted at `t` return all nodes matched by this path\r\n\t * element.\r\n\t */\r\n\tpublic abstract evaluate(t: ParseTree): ParseTree[];\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\tlet inv: string = this.invert ? \"!\" : \"\";\r\n\t\tlet className: string = Object.constructor.name;\r\n\t\treturn className + \"[\" + inv + this.nodeName + \"]\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// CONVERSTION complete, Burt Harris 10/14/2016\r\nimport { ParserRuleContext } from \"../../ParserRuleContext\";\r\nimport { Override } from \"../../Decorators\";\r\nimport { ParseTree } from \"../ParseTree\";\r\nimport { Trees } from \"../Trees\";\r\nimport { XPathElement } from \"./XPathElement\";\r\n\r\n/**\r\n * Either `ID` at start of path or `...//ID` in middle of path.\r\n */\r\nexport class XPathRuleAnywhereElement extends XPathElement {\r\n\tprotected ruleIndex: number;\r\n\tconstructor(ruleName: string, ruleIndex: number) {\r\n\t\tsuper(ruleName);\r\n\t\tthis.ruleIndex = ruleIndex;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic evaluate(t: ParseTree): ParseTree[] {\r\n\t\treturn Trees.findAllRuleNodes(t, this.ruleIndex);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// CONVERSTION complete, Burt Harris 10/14/2016\r\nimport { ParserRuleContext } from \"../../ParserRuleContext\";\r\nimport { Override } from \"../../Decorators\";\r\nimport { ParseTree } from \"../ParseTree\";\r\nimport { Trees } from \"../Trees\";\r\nimport { XPathElement } from \"./XPathElement\";\r\n\r\nexport class XPathRuleElement extends XPathElement {\r\n\tprotected ruleIndex: number;\r\n\tconstructor(ruleName: string, ruleIndex: number) {\r\n\t\tsuper(ruleName);\r\n\t\tthis.ruleIndex = ruleIndex;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic evaluate(t: ParseTree): ParseTree[] {\r\n\t\t// return all children of t that match nodeName\r\n\t\tlet nodes: ParseTree[] = [];\r\n\t\tfor (let c of Trees.getChildren(t)) {\r\n\t\t\tif (c instanceof ParserRuleContext) {\r\n\t\t\t\tif ((c.ruleIndex === this.ruleIndex && !this.invert) ||\r\n\t\t\t\t\t(c.ruleIndex !== this.ruleIndex && this.invert)) {\r\n\t\t\t\t\tnodes.push(c);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn nodes;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// CONVERSTION complete, Burt Harris 10/14/2016\r\nimport { Override } from \"../../Decorators\";\r\nimport { ParseTree } from \"../ParseTree\";\r\nimport { Trees } from \"../Trees\";\r\nimport { XPathElement } from \"./XPathElement\";\r\n\r\nexport class XPathTokenAnywhereElement extends XPathElement {\r\n\tprotected tokenType: number;\r\n\tconstructor(tokenName: string, tokenType: number) {\r\n\t\tsuper(tokenName);\r\n\t\tthis.tokenType = tokenType;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic evaluate(t: ParseTree): ParseTree[] {\r\n\t\treturn Trees.findAllTokenNodes(t, this.tokenType);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// CONVERSTION complete, Burt Harris 10/14/2016\r\nimport { Override } from \"../../Decorators\";\r\nimport { ParseTree } from \"../ParseTree\";\r\nimport { TerminalNode } from \"../TerminalNode\";\r\nimport { Trees } from \"../Trees\";\r\nimport { XPathElement } from \"./XPathElement\";\r\n\r\nexport class XPathTokenElement extends XPathElement {\r\n\tprotected tokenType: number;\r\n\tconstructor(tokenName: string, tokenType: number) {\r\n\t\tsuper(tokenName);\r\n\t\tthis.tokenType = tokenType;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic evaluate(t: ParseTree): ParseTree[] {\r\n\t\t// return all children of t that match nodeName\r\n\t\tlet nodes: ParseTree[] = [];\r\n\t\tfor (let c of Trees.getChildren(t)) {\r\n\t\t\tif (c instanceof TerminalNode) {\r\n\t\t\t\tif ((c.symbol.type === this.tokenType && !this.invert) ||\r\n\t\t\t\t\t(c.symbol.type !== this.tokenType && this.invert)) {\r\n\t\t\t\t\tnodes.push(c);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn nodes;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// CONVERSTION complete, Burt Harris 10/14/2016\r\nimport { Override } from \"../../Decorators\";\r\nimport { ParseTree } from \"../ParseTree\";\r\nimport { TerminalNode } from \"../TerminalNode\";\r\nimport { Trees } from \"../Trees\";\r\nimport { XPath } from \"./XPath\";\r\nimport { XPathElement } from \"./XPathElement\";\r\n\r\nexport class XPathWildcardAnywhereElement extends XPathElement {\r\n\tconstructor() {\r\n\t\tsuper(XPath.WILDCARD);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic evaluate(t: ParseTree): ParseTree[] {\r\n\t\tif (this.invert) {\r\n\t\t\t// !* is weird but valid (empty)\r\n\t\t\treturn [];\r\n\t\t}\r\n\t\treturn Trees.getDescendants(t);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// CONVERSTION complete, Burt Harris 10/14/2016\r\nimport { Override } from \"../../Decorators\";\r\nimport { ParseTree } from \"../ParseTree\";\r\nimport { TerminalNode } from \"../TerminalNode\";\r\nimport { Trees } from \"../Trees\";\r\nimport { XPath } from \"./XPath\";\r\nimport { XPathElement } from \"./XPathElement\";\r\n\r\nexport class XPathWildcardElement extends XPathElement {\r\n\tconstructor() {\r\n\t\tsuper(XPath.WILDCARD);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic evaluate(t: ParseTree): ParseTree[] {\r\n\t\tlet kids: ParseTree[] = [];\r\n\t\tif (this.invert) {\r\n\t\t\t// !* is weird but valid (empty)\r\n\t\t\treturn kids;\r\n\t\t}\r\n\t\tfor (let c of Trees.getChildren(t)) {\r\n\t\t\tkids.push(c);\r\n\t\t}\r\n\t\treturn kids;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:46.4373888-07:00\r\n\r\nimport { CharStreams } from \"../../CharStreams\";\r\nimport { CommonTokenStream } from \"../../CommonTokenStream\";\r\nimport { LexerNoViableAltException } from \"../../LexerNoViableAltException\";\r\nimport { Parser } from \"../../Parser\";\r\nimport { ParserRuleContext } from \"../../ParserRuleContext\";\r\nimport { ParseTree } from \"../ParseTree\";\r\nimport { Token } from \"../../Token\";\r\nimport { XPathElement } from \"./XPathElement\";\r\nimport { XPathLexer } from \"./XPathLexer\";\r\nimport { XPathLexerErrorListener } from \"./XPathLexerErrorListener\";\r\nimport { XPathRuleAnywhereElement } from \"./XPathRuleAnywhereElement\";\r\nimport { XPathRuleElement } from \"./XPathRuleElement\";\r\nimport { XPathTokenAnywhereElement } from \"./XPathTokenAnywhereElement\";\r\nimport { XPathTokenElement } from \"./XPathTokenElement\";\r\nimport { XPathWildcardAnywhereElement } from \"./XPathWildcardAnywhereElement\";\r\nimport { XPathWildcardElement } from \"./XPathWildcardElement\";\r\n\r\n/**\r\n * Represent a subset of XPath XML path syntax for use in identifying nodes in\r\n * parse trees.\r\n *\r\n * Split path into words and separators `/` and `//` via ANTLR\r\n * itself then walk path elements from left to right. At each separator-word\r\n * pair, find set of nodes. Next stage uses those as work list.\r\n *\r\n * The basic interface is\r\n * {@link XPath#findAll ParseTree.findAll}`(tree, pathString, parser)`.\r\n * But that is just shorthand for:\r\n *\r\n * ```\r\n * let p = new XPath(parser, pathString);\r\n * return p.evaluate(tree);\r\n * ```\r\n *\r\n * See `TestXPath` for descriptions. In short, this\r\n * allows operators:\r\n *\r\n * | | |\r\n * | --- | --- |\r\n * | `/` | root |\r\n * | `//` | anywhere |\r\n * | `!` | invert; this much appear directly after root or anywhere operator |\r\n *\r\n * and path elements:\r\n *\r\n * | | |\r\n * | --- | --- |\r\n * | `ID` | token name |\r\n * | `'string'` | any string literal token from the grammar |\r\n * | `expr` | rule name |\r\n * | `*` | wildcard matching any node |\r\n *\r\n * Whitespace is not allowed.\r\n */\r\nexport class XPath {\r\n\tpublic static readonly WILDCARD: string = \"*\"; // word not operator/separator\r\n\tpublic static readonly NOT: string = \"!\"; \t // word for invert operator\r\n\r\n\tprotected path: string;\r\n\tprotected elements: XPathElement[];\r\n\tprotected parser: Parser;\r\n\r\n\tconstructor(parser: Parser, path: string) {\r\n\t\tthis.parser = parser;\r\n\t\tthis.path = path;\r\n\t\tthis.elements = this.split(path);\r\n\t\t// console.log(this.elements.toString());\r\n\t}\r\n\r\n\t// TODO: check for invalid token/rule names, bad syntax\r\n\r\n\tpublic split(path: string): XPathElement[] {\r\n\t\tlet lexer = new XPathLexer(CharStreams.fromString(path));\r\n\t\tlexer.recover = (e: LexerNoViableAltException) => { throw e; };\r\n\r\n\t\tlexer.removeErrorListeners();\r\n\t\tlexer.addErrorListener(new XPathLexerErrorListener());\r\n\t\tlet tokenStream = new CommonTokenStream(lexer);\r\n\t\ttry {\r\n\t\t\ttokenStream.fill();\r\n\t\t}\r\n\t\tcatch (e) {\r\n\t\t\tif (e instanceof LexerNoViableAltException) {\r\n\t\t\t\tlet pos: number = lexer.charPositionInLine;\r\n\t\t\t\tlet msg: string = \"Invalid tokens or characters at index \" + pos + \" in path '\" + path + \"' -- \" + e.message;\r\n\t\t\t\tthrow new RangeError(msg);\r\n\t\t\t}\r\n\t\t\tthrow e;\r\n\t\t}\r\n\r\n\t\tlet tokens: Token[] = tokenStream.getTokens();\r\n\t\t// console.log(\"path=\" + path + \"=>\" + tokens);\r\n\t\tlet elements: XPathElement[] = [];\r\n\t\tlet n: number = tokens.length;\r\n\t\tlet i: number = 0;\r\n\t\tloop:\r\n\t\twhile (i < n) {\r\n\t\t\tlet el: Token = tokens[i];\r\n\t\t\tlet next: Token | undefined;\r\n\t\t\tswitch (el.type) {\r\n\t\t\t\tcase XPathLexer.ROOT:\r\n\t\t\t\tcase XPathLexer.ANYWHERE:\r\n\t\t\t\t\tlet anywhere: boolean = el.type === XPathLexer.ANYWHERE;\r\n\t\t\t\t\ti++;\r\n\t\t\t\t\tnext = tokens[i];\r\n\t\t\t\t\tlet invert: boolean = next.type === XPathLexer.BANG;\r\n\t\t\t\t\tif (invert) {\r\n\t\t\t\t\t\ti++;\r\n\t\t\t\t\t\tnext = tokens[i];\r\n\t\t\t\t\t}\r\n\t\t\t\t\tlet pathElement: XPathElement = this.getXPathElement(next, anywhere);\r\n\t\t\t\t\tpathElement.invert = invert;\r\n\t\t\t\t\telements.push(pathElement);\r\n\t\t\t\t\ti++;\r\n\t\t\t\t\tbreak;\r\n\r\n\t\t\t\tcase XPathLexer.TOKEN_REF:\r\n\t\t\t\tcase XPathLexer.RULE_REF:\r\n\t\t\t\tcase XPathLexer.WILDCARD:\r\n\t\t\t\t\telements.push(this.getXPathElement(el, false));\r\n\t\t\t\t\ti++;\r\n\t\t\t\t\tbreak;\r\n\r\n\t\t\t\tcase Token.EOF:\r\n\t\t\t\t\tbreak loop;\r\n\r\n\t\t\t\tdefault:\r\n\t\t\t\t\tthrow new Error(\"Unknowth path element \" + el);\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn elements;\r\n\t}\r\n\r\n\t/**\r\n\t * Convert word like `*` or `ID` or `expr` to a path\r\n\t * element. `anywhere` is `true` if `//` precedes the\r\n\t * word.\r\n\t */\r\n\tprotected getXPathElement(wordToken: Token, anywhere: boolean): XPathElement {\r\n\t\tif (wordToken.type === Token.EOF) {\r\n\t\t\tthrow new Error(\"Missing path element at end of path\");\r\n\t\t}\r\n\r\n\t\tlet word = wordToken.text;\r\n\t\tif (word == null) {\r\n\t\t\tthrow new Error(\"Expected wordToken to have text content.\");\r\n\t\t}\r\n\r\n\t\tlet ttype: number = this.parser.getTokenType(word);\r\n\t\tlet ruleIndex: number = this.parser.getRuleIndex(word);\r\n\t\tswitch (wordToken.type) {\r\n\t\t\tcase XPathLexer.WILDCARD:\r\n\t\t\t\treturn anywhere ?\r\n\t\t\t\t\tnew XPathWildcardAnywhereElement() :\r\n\t\t\t\t\tnew XPathWildcardElement();\r\n\t\t\tcase XPathLexer.TOKEN_REF:\r\n\t\t\tcase XPathLexer.STRING:\r\n\t\t\t\tif (ttype === Token.INVALID_TYPE) {\r\n\t\t\t\t\tthrow new Error(word + \" at index \" +\r\n\t\t\t\t\t\twordToken.startIndex +\r\n\t\t\t\t\t\t\" isn't a valid token name\");\r\n\t\t\t\t}\r\n\t\t\t\treturn anywhere ?\r\n\t\t\t\t\tnew XPathTokenAnywhereElement(word, ttype) :\r\n\t\t\t\t\tnew XPathTokenElement(word, ttype);\r\n\t\t\tdefault:\r\n\t\t\t\tif (ruleIndex === -1) {\r\n\t\t\t\t\tthrow new Error(word + \" at index \" +\r\n\t\t\t\t\t\twordToken.startIndex +\r\n\t\t\t\t\t\t\" isn't a valid rule name\");\r\n\t\t\t\t}\r\n\t\t\t\treturn anywhere ?\r\n\t\t\t\t\tnew XPathRuleAnywhereElement(word, ruleIndex) :\r\n\t\t\t\t\tnew XPathRuleElement(word, ruleIndex);\r\n\t\t}\r\n\t}\r\n\r\n\tpublic static findAll(tree: ParseTree, xpath: string, parser: Parser): Set {\r\n\t\tlet p: XPath = new XPath(parser, xpath);\r\n\t\treturn p.evaluate(tree);\r\n\t}\r\n\r\n\t/**\r\n\t * Return a list of all nodes starting at `t` as root that satisfy the\r\n\t * path. The root `/` is relative to the node passed to {@link evaluate}.\r\n\t */\r\n\tpublic evaluate(t: ParseTree): Set {\r\n\t\tlet dummyRoot = new ParserRuleContext();\r\n\t\tdummyRoot.addChild(t as ParserRuleContext);\r\n\r\n\t\tlet work = new Set([dummyRoot]);\r\n\r\n\t\tlet i: number = 0;\r\n\t\twhile (i < this.elements.length) {\r\n\t\t\tlet next = new Set();\r\n\t\t\tfor (let node of work) {\r\n\t\t\t\tif (node.childCount > 0) {\r\n\t\t\t\t\t// only try to match next element if it has children\r\n\t\t\t\t\t// e.g., //func/*/stat might have a token node for which\r\n\t\t\t\t\t// we can't go looking for stat nodes.\r\n\t\t\t\t\tlet matching = this.elements[i].evaluate(node);\r\n\t\t\t\t\tmatching.forEach(next.add, next);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\ti++;\r\n\t\t\twork = next;\r\n\t\t}\r\n\r\n\t\treturn work;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// CONVERSTION complete, Burt Harris 10/14/2016\r\nimport { NotNull } from \"../../Decorators\";\r\nimport { ParseTree } from \"../ParseTree\";\r\nimport { ParseTreeMatch } from \"./ParseTreeMatch\";\r\nimport { ParseTreePatternMatcher } from \"./ParseTreePatternMatcher\";\r\nimport { XPath } from \"../xpath/XPath\";\r\n\r\n/**\r\n * A pattern like ` = ;` converted to a {@link ParseTree} by\r\n * {@link ParseTreePatternMatcher#compile(String, int)}.\r\n */\r\nexport class ParseTreePattern {\r\n\t/**\r\n\t * This is the backing field for `patternRuleIndex`.\r\n\t */\r\n\tprivate _patternRuleIndex: number;\r\n\r\n\t/**\r\n\t * This is the backing field for `pattern`.\r\n\t */\r\n\t@NotNull\r\n\tprivate _pattern: string;\r\n\r\n\t/**\r\n\t * This is the backing field for `patternTree`.\r\n\t */\r\n\t@NotNull\r\n\tprivate _patternTree: ParseTree;\r\n\r\n\t/**\r\n\t * This is the backing field for `matcher`.\r\n\t */\r\n\t@NotNull\r\n\tprivate _matcher: ParseTreePatternMatcher;\r\n\r\n\t/**\r\n\t * Construct a new instance of the {@link ParseTreePattern} class.\r\n\t *\r\n\t * @param matcher The {@link ParseTreePatternMatcher} which created this\r\n\t * tree pattern.\r\n\t * @param pattern The tree pattern in concrete syntax form.\r\n\t * @param patternRuleIndex The parser rule which serves as the root of the\r\n\t * tree pattern.\r\n\t * @param patternTree The tree pattern in {@link ParseTree} form.\r\n\t */\r\n\tconstructor(\r\n\t\t@NotNull matcher: ParseTreePatternMatcher,\r\n\t\t@NotNull pattern: string,\r\n\t\tpatternRuleIndex: number,\r\n\t\t@NotNull patternTree: ParseTree) {\r\n\t\tthis._matcher = matcher;\r\n\t\tthis._patternRuleIndex = patternRuleIndex;\r\n\t\tthis._pattern = pattern;\r\n\t\tthis._patternTree = patternTree;\r\n\t}\r\n\r\n\t/**\r\n\t * Match a specific parse tree against this tree pattern.\r\n\t *\r\n\t * @param tree The parse tree to match against this tree pattern.\r\n\t * @returns A {@link ParseTreeMatch} object describing the result of the\r\n\t * match operation. The `ParseTreeMatch.succeeded` method can be\r\n\t * used to determine whether or not the match was successful.\r\n\t */\r\n\t@NotNull\r\n\tpublic match(@NotNull tree: ParseTree): ParseTreeMatch {\r\n\t\treturn this._matcher.match(tree, this);\r\n\t}\r\n\r\n\t/**\r\n\t * Determine whether or not a parse tree matches this tree pattern.\r\n\t *\r\n\t * @param tree The parse tree to match against this tree pattern.\r\n\t * @returns `true` if `tree` is a match for the current tree\r\n\t * pattern; otherwise, `false`.\r\n\t */\r\n\tpublic matches(@NotNull tree: ParseTree): boolean {\r\n\t\treturn this._matcher.match(tree, this).succeeded;\r\n\t}\r\n\r\n\t/**\r\n\t * Find all nodes using XPath and then try to match those subtrees against\r\n\t * this tree pattern.\r\n\t *\r\n\t * @param tree The {@link ParseTree} to match against this pattern.\r\n\t * @param xpath An expression matching the nodes\r\n\t *\r\n\t * @returns A collection of {@link ParseTreeMatch} objects describing the\r\n\t * successful matches. Unsuccessful matches are omitted from the result,\r\n\t * regardless of the reason for the failure.\r\n\t */\r\n\t@NotNull\r\n\tpublic findAll(@NotNull tree: ParseTree, @NotNull xpath: string): ParseTreeMatch[] {\r\n\t\tlet subtrees: Set = XPath.findAll(tree, xpath, this._matcher.parser);\r\n\t\tlet matches: ParseTreeMatch[] = [];\r\n\t\tfor (let t of subtrees) {\r\n\t\t\tlet match: ParseTreeMatch = this.match(t);\r\n\t\t\tif (match.succeeded) {\r\n\t\t\t\tmatches.push(match);\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn matches;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the {@link ParseTreePatternMatcher} which created this tree pattern.\r\n\t *\r\n\t * @returns The {@link ParseTreePatternMatcher} which created this tree\r\n\t * pattern.\r\n\t */\r\n\t@NotNull\r\n\tget matcher(): ParseTreePatternMatcher {\r\n\t\treturn this._matcher;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the tree pattern in concrete syntax form.\r\n\t *\r\n\t * @returns The tree pattern in concrete syntax form.\r\n\t */\r\n\t@NotNull\r\n\tget pattern(): string {\r\n\t\treturn this._pattern;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the parser rule which serves as the outermost rule for the tree\r\n\t * pattern.\r\n\t *\r\n\t * @returns The parser rule which serves as the outermost rule for the tree\r\n\t * pattern.\r\n\t */\r\n\tget patternRuleIndex(): number {\r\n\t\treturn this._patternRuleIndex;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the tree pattern as a {@link ParseTree}. The rule and token tags from\r\n\t * the pattern are present in the parse tree as terminal nodes with a symbol\r\n\t * of type {@link RuleTagToken} or {@link TokenTagToken}.\r\n\t *\r\n\t * @returns The tree pattern as a {@link ParseTree}.\r\n\t */\r\n\t@NotNull\r\n\tget patternTree(): ParseTree {\r\n\t\treturn this._patternTree;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:46.0343500-07:00\r\n\r\nimport { CharStream } from \"../../CharStream\";\r\nimport { NotNull, Override } from \"../../Decorators\";\r\nimport { Token } from \"../../Token\";\r\nimport { TokenSource } from \"../../TokenSource\";\r\n\r\n/**\r\n * A {@link Token} object representing an entire subtree matched by a parser\r\n * rule; e.g., ``. These tokens are created for {@link TagChunk}\r\n * chunks where the tag corresponds to a parser rule.\r\n */\r\nexport class RuleTagToken implements Token {\r\n\t/**\r\n\t * This is the backing field for `ruleName`.\r\n\t */\r\n\tprivate _ruleName: string;\r\n\t/**\r\n\t * The token type for the current token. This is the token type assigned to\r\n\t * the bypass alternative for the rule during ATN deserialization.\r\n\t */\r\n\tprivate bypassTokenType: number;\r\n\t/**\r\n\t * This is the backing field for `label`.\r\n\t */\r\n\tprivate _label?: string;\r\n\r\n\t/**\r\n\t * Constructs a new instance of {@link RuleTagToken} with the specified rule\r\n\t * name, bypass token type, and label.\r\n\t *\r\n\t * @param ruleName The name of the parser rule this rule tag matches.\r\n\t * @param bypassTokenType The bypass token type assigned to the parser rule.\r\n\t * @param label The label associated with the rule tag, or `undefined` if\r\n\t * the rule tag is unlabeled.\r\n\t *\r\n\t * @exception IllegalArgumentException if `ruleName` is not defined\r\n\t * or empty.\r\n\t */\r\n\tconstructor(@NotNull ruleName: string, bypassTokenType: number, label?: string) {\r\n\t\tif (ruleName == null || ruleName.length === 0) {\r\n\t\t\tthrow new Error(\"ruleName cannot be null or empty.\");\r\n\t\t}\r\n\r\n\t\tthis._ruleName = ruleName;\r\n\t\tthis.bypassTokenType = bypassTokenType;\r\n\t\tthis._label = label;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the name of the rule associated with this rule tag.\r\n\t *\r\n\t * @returns The name of the parser rule associated with this rule tag.\r\n\t */\r\n\t@NotNull\r\n\tget ruleName(): string {\r\n\t\treturn this._ruleName;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the label associated with the rule tag.\r\n\t *\r\n\t * @returns The name of the label associated with the rule tag, or\r\n\t * `undefined` if this is an unlabeled rule tag.\r\n\t */\r\n\tget label(): string | undefined {\r\n\t\treturn this._label;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * Rule tag tokens are always placed on the {@link #DEFAULT_CHANNEL}.\r\n\t */\r\n\t@Override\r\n\tget channel(): number {\r\n\t\treturn Token.DEFAULT_CHANNEL;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * This method returns the rule tag formatted with `<` and `>`\r\n\t * delimiters.\r\n\t */\r\n\t@Override\r\n\tget text(): string {\r\n\t\tif (this._label != null) {\r\n\t\t\treturn \"<\" + this._label + \":\" + this._ruleName + \">\";\r\n\t\t}\r\n\r\n\t\treturn \"<\" + this._ruleName + \">\";\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * Rule tag tokens have types assigned according to the rule bypass\r\n\t * transitions created during ATN deserialization.\r\n\t */\r\n\t@Override\r\n\tget type(): number {\r\n\t\treturn this.bypassTokenType;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The implementation for {@link RuleTagToken} always returns 0.\r\n\t */\r\n\t@Override\r\n\tget line(): number {\r\n\t\treturn 0;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The implementation for {@link RuleTagToken} always returns -1.\r\n\t */\r\n\t@Override\r\n\tget charPositionInLine(): number {\r\n\t\treturn -1;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The implementation for {@link RuleTagToken} always returns -1.\r\n\t */\r\n\t@Override\r\n\tget tokenIndex(): number {\r\n\t\treturn -1;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The implementation for {@link RuleTagToken} always returns -1.\r\n\t */\r\n\t@Override\r\n\tget startIndex(): number {\r\n\t\treturn -1;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The implementation for {@link RuleTagToken} always returns -1.\r\n\t */\r\n\t@Override\r\n\tget stopIndex(): number {\r\n\t\treturn -1;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The implementation for {@link RuleTagToken} always returns `undefined`.\r\n\t */\r\n\t@Override\r\n\tget tokenSource(): TokenSource | undefined {\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The implementation for {@link RuleTagToken} always returns `undefined`.\r\n\t */\r\n\t@Override\r\n\tget inputStream(): CharStream | undefined {\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The implementation for {@link RuleTagToken} returns a string of the form\r\n\t * `ruleName:bypassTokenType`.\r\n\t */\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn this._ruleName + \":\" + this.bypassTokenType;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:45.2799060-07:00\r\n\r\n/**\r\n * A chunk is either a token tag, a rule tag, or a span of literal text within a\r\n * tree pattern.\r\n *\r\n * The method {@link ParseTreePatternMatcher#split(String)} returns a list of\r\n * chunks in preparation for creating a token stream by\r\n * {@link ParseTreePatternMatcher#tokenize(String)}. From there, we get a parse\r\n * tree from with {@link ParseTreePatternMatcher#compile(String, int)}. These\r\n * chunks are converted to {@link RuleTagToken}, {@link TokenTagToken}, or the\r\n * regular tokens of the text surrounding the tags.\r\n */\r\nexport abstract class Chunk {\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:46.1670669-07:00\r\n\r\nimport { Chunk } from \"./Chunk\";\r\nimport { NotNull, Override } from \"../../Decorators\";\r\n\r\n/**\r\n * Represents a placeholder tag in a tree pattern. A tag can have any of the\r\n * following forms.\r\n *\r\n * * `expr`: An unlabeled placeholder for a parser rule `expr`.\r\n * * `ID`: An unlabeled placeholder for a token of type `ID`.\r\n * * `e:expr`: A labeled placeholder for a parser rule `expr`.\r\n * * `id:ID`: A labeled placeholder for a token of type `ID`.\r\n *\r\n * This class does not perform any validation on the tag or label names aside\r\n * from ensuring that the tag is a defined, non-empty string.\r\n */\r\nexport class TagChunk extends Chunk {\r\n\t/**\r\n\t * This is the backing field for `tag`.\r\n\t */\r\n\tprivate _tag: string;\r\n\t/**\r\n\t * This is the backing field for `label`.\r\n\t */\r\n\tprivate _label?: string;\r\n\r\n\t/**\r\n\t * Construct a new instance of {@link TagChunk} using the specified label\r\n\t * and tag.\r\n\t *\r\n\t * @param label The label for the tag. If this is `undefined`, the\r\n\t * {@link TagChunk} represents an unlabeled tag.\r\n\t * @param tag The tag, which should be the name of a parser rule or token\r\n\t * type.\r\n\t *\r\n\t * @exception IllegalArgumentException if `tag` is not defined or\r\n\t * empty.\r\n\t */\r\n\tconstructor(tag: string, label?: string) {\r\n\t\tsuper();\r\n\r\n\t\tif (tag == null || tag.length === 0) {\r\n\t\t\tthrow new Error(\"tag cannot be null or empty\");\r\n\t\t}\r\n\r\n\t\tthis._tag = tag;\r\n\t\tthis._label = label;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the tag for this chunk.\r\n\t *\r\n\t * @returns The tag for the chunk.\r\n\t */\r\n\t@NotNull\r\n\tget tag(): string {\r\n\t\treturn this._tag;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the label, if any, assigned to this chunk.\r\n\t *\r\n\t * @returns The label assigned to this chunk, or `undefined` if no label is\r\n\t * assigned to the chunk.\r\n\t */\r\n\tget label(): string | undefined {\r\n\t\treturn this._label;\r\n\t}\r\n\r\n\t/**\r\n\t * This method returns a text representation of the tag chunk. Labeled tags\r\n\t * are returned in the form `label:tag`, and unlabeled tags are\r\n\t * returned as just the tag name.\r\n\t */\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\tif (this._label != null) {\r\n\t\t\treturn this._label + \":\" + this._tag;\r\n\t\t}\r\n\r\n\t\treturn this._tag;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:46.2521448-07:00\r\n\r\nimport { Chunk } from \"./Chunk\";\r\nimport { NotNull, Override } from \"../../Decorators\";\r\n\r\n/**\r\n * Represents a span of raw text (concrete syntax) between tags in a tree\r\n * pattern string.\r\n */\r\nexport class TextChunk extends Chunk {\r\n\t/**\r\n\t * This is the backing field for {@link #getText}.\r\n\t */\r\n\t@NotNull\r\n\tprivate _text: string;\r\n\r\n\t/**\r\n\t * Constructs a new instance of {@link TextChunk} with the specified text.\r\n\t *\r\n\t * @param text The text of this chunk.\r\n\t * @exception IllegalArgumentException if `text` is not defined.\r\n\t */\r\n\tconstructor(@NotNull text: string) {\r\n\t\tsuper();\r\n\r\n\t\tif (text == null) {\r\n\t\t\tthrow new Error(\"text cannot be null\");\r\n\t\t}\r\n\r\n\t\tthis._text = text;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the raw text of this chunk.\r\n\t *\r\n\t * @returns The text of the chunk.\r\n\t */\r\n\t@NotNull\r\n\tget text(): string {\r\n\t\treturn this._text;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The implementation for {@link TextChunk} returns the result of\r\n\t * `text` in single quotes.\r\n\t */\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn \"'\" + this._text + \"'\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:46.3281988-07:00\r\n\r\nimport { CommonToken } from \"../../CommonToken\";\r\nimport { NotNull, Override } from \"../../Decorators\";\r\n\r\n/**\r\n * A {@link Token} object representing a token of a particular type; e.g.,\r\n * ``. These tokens are created for {@link TagChunk} chunks where the\r\n * tag corresponds to a lexer rule or token type.\r\n */\r\nexport class TokenTagToken extends CommonToken {\r\n\t/**\r\n\t * This is the backing field for `tokenName`.\r\n\t */\r\n\t@NotNull\r\n\tprivate _tokenName: string;\r\n\t/**\r\n\t * This is the backing field for `label`.\r\n\t */\r\n\tprivate _label: string | undefined;\r\n\r\n\t/**\r\n\t * Constructs a new instance of {@link TokenTagToken} with the specified\r\n\t * token name, type, and label.\r\n\t *\r\n\t * @param tokenName The token name.\r\n\t * @param type The token type.\r\n\t * @param label The label associated with the token tag, or `undefined` if\r\n\t * the token tag is unlabeled.\r\n\t */\r\n\tconstructor(@NotNull tokenName: string, type: number, label?: string) {\r\n\t\tsuper(type);\r\n\t\tthis._tokenName = tokenName;\r\n\t\tthis._label = label;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the token name.\r\n\t * @returns The token name.\r\n\t */\r\n\t@NotNull\r\n\tget tokenName(): string {\r\n\t\treturn this._tokenName;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the label associated with the rule tag.\r\n\t *\r\n\t * @returns The name of the label associated with the rule tag, or\r\n\t * `undefined` if this is an unlabeled rule tag.\r\n\t */\r\n\tget label(): string | undefined {\r\n\t\treturn this._label;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The implementation for {@link TokenTagToken} returns the token tag\r\n\t * formatted with `<` and `>` delimiters.\r\n\t */\r\n\t@Override\r\n\tget text(): string {\r\n\t\tif (this._label != null) {\r\n\t\t\treturn \"<\" + this._label + \":\" + this._tokenName + \">\";\r\n\t\t}\r\n\r\n\t\treturn \"<\" + this._tokenName + \">\";\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The implementation for {@link TokenTagToken} returns a string of the form\r\n\t * `tokenName:type`.\r\n\t */\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn this._tokenName + \":\" + this.type;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// CONVERSTION complete, Burt Harris 10/14/2016\r\n\r\nimport { BailErrorStrategy } from \"../../BailErrorStrategy\";\r\nimport { CharStreams } from \"../../CharStreams\";\r\nimport { Chunk } from \"./Chunk\";\r\nimport { CommonTokenStream } from \"../../CommonTokenStream\";\r\nimport { Lexer } from \"../../Lexer\";\r\nimport { ListTokenSource } from \"../../ListTokenSource\";\r\nimport { MultiMap } from \"../../misc/MultiMap\";\r\nimport { NotNull } from \"../../Decorators\";\r\nimport { ParseCancellationException } from \"../../misc/ParseCancellationException\";\r\nimport { Parser } from \"../../Parser\";\r\nimport { ParserInterpreter } from \"../../ParserInterpreter\";\r\nimport { ParserRuleContext } from \"../../ParserRuleContext\";\r\nimport { ParseTree } from \"../ParseTree\";\r\nimport { ParseTreeMatch } from \"./ParseTreeMatch\";\r\nimport { ParseTreePattern } from \"./ParseTreePattern\";\r\nimport { RecognitionException } from \"../../RecognitionException\";\r\nimport { RuleNode } from \"../RuleNode\";\r\nimport { RuleTagToken } from \"./RuleTagToken\";\r\nimport { TagChunk } from \"./TagChunk\";\r\nimport { TerminalNode } from \"../TerminalNode\";\r\nimport { TextChunk } from \"./TextChunk\";\r\nimport { Token } from \"../../Token\";\r\nimport { TokenTagToken } from \"./TokenTagToken\";\r\n\r\n/**\r\n * A tree pattern matching mechanism for ANTLR {@link ParseTree}s.\r\n *\r\n * Patterns are strings of source input text with special tags representing\r\n * token or rule references such as:\r\n *\r\n * ```\r\n * = ;\r\n * ```\r\n *\r\n * Given a pattern start rule such as `statement`, this object constructs\r\n * a {@link ParseTree} with placeholders for the `ID` and `expr`\r\n * subtree. Then the {@link #match} routines can compare an actual\r\n * {@link ParseTree} from a parse with this pattern. Tag `` matches\r\n * any `ID` token and tag `` references the result of the\r\n * `expr` rule (generally an instance of `ExprContext`.\r\n *\r\n * Pattern `x = 0;` is a similar pattern that matches the same pattern\r\n * except that it requires the identifier to be `x` and the expression to\r\n * be `0`.\r\n *\r\n * The {@link #matches} routines return `true` or `false` based\r\n * upon a match for the tree rooted at the parameter sent in. The\r\n * {@link #match} routines return a {@link ParseTreeMatch} object that\r\n * contains the parse tree, the parse tree pattern, and a map from tag name to\r\n * matched nodes (more below). A subtree that fails to match, returns with\r\n * {@link ParseTreeMatch#mismatchedNode} set to the first tree node that did not\r\n * match.\r\n *\r\n * For efficiency, you can compile a tree pattern in string form to a\r\n * {@link ParseTreePattern} object.\r\n *\r\n * See `TestParseTreeMatcher` for lots of examples.\r\n * {@link ParseTreePattern} has two static helper methods:\r\n * {@link ParseTreePattern#findAll} and {@link ParseTreePattern#match} that\r\n * are easy to use but not super efficient because they create new\r\n * {@link ParseTreePatternMatcher} objects each time and have to compile the\r\n * pattern in string form before using it.\r\n *\r\n * The lexer and parser that you pass into the {@link ParseTreePatternMatcher}\r\n * constructor are used to parse the pattern in string form. The lexer converts\r\n * the ` = ;` into a sequence of four tokens (assuming lexer\r\n * throws out whitespace or puts it on a hidden channel). Be aware that the\r\n * input stream is reset for the lexer (but not the parser; a\r\n * {@link ParserInterpreter} is created to parse the input.). Any user-defined\r\n * fields you have put into the lexer might get changed when this mechanism asks\r\n * it to scan the pattern string.\r\n *\r\n * Normally a parser does not accept token `` as a valid\r\n * `expr` but, from the parser passed in, we create a special version of\r\n * the underlying grammar representation (an {@link ATN}) that allows imaginary\r\n * tokens representing rules (``) to match entire rules. We call\r\n * these *bypass alternatives*.\r\n *\r\n * Delimiters are `<`} and `>`}, with `\\` as the escape string\r\n * by default, but you can set them to whatever you want using\r\n * {@link #setDelimiters}. You must escape both start and stop strings\r\n * `\\<` and `\\>`.\r\n */\r\nexport class ParseTreePatternMatcher {\r\n\t/**\r\n\t * This is the backing field for `lexer`.\r\n\t */\r\n\tprivate _lexer: Lexer;\r\n\r\n\t/**\r\n\t * This is the backing field for `parser`.\r\n\t */\r\n\tprivate _parser: Parser;\r\n\r\n\tprotected start = \"<\";\r\n\tprotected stop = \">\";\r\n\tprotected escape = \"\\\\\"; // e.g., \\< and \\> must escape BOTH!\r\n\r\n\t/**\r\n\t * Regular expression corresponding to escape, for global replace\r\n\t */\r\n\tprotected escapeRE = /\\\\/g;\r\n\r\n\t/**\r\n\t * Constructs a {@link ParseTreePatternMatcher} or from a {@link Lexer} and\r\n\t * {@link Parser} object. The lexer input stream is altered for tokenizing\r\n\t * the tree patterns. The parser is used as a convenient mechanism to get\r\n\t * the grammar name, plus token, rule names.\r\n\t */\r\n\tconstructor(lexer: Lexer, parser: Parser) {\r\n\t\tthis._lexer = lexer;\r\n\t\tthis._parser = parser;\r\n\t}\r\n\r\n\t/**\r\n\t * Set the delimiters used for marking rule and token tags within concrete\r\n\t * syntax used by the tree pattern parser.\r\n\t *\r\n\t * @param start The start delimiter.\r\n\t * @param stop The stop delimiter.\r\n\t * @param escapeLeft The escape sequence to use for escaping a start or stop delimiter.\r\n\t *\r\n\t * @throws {@link Error} if `start` is not defined or empty.\r\n\t * @throws {@link Error} if `stop` is not defined or empty.\r\n\t */\r\n\tpublic setDelimiters(start: string, stop: string, escapeLeft: string): void {\r\n\t\tif (!start) {\r\n\t\t\tthrow new Error(\"start cannot be null or empty\");\r\n\t\t}\r\n\r\n\t\tif (!stop) {\r\n\t\t\tthrow new Error(\"stop cannot be null or empty\");\r\n\t\t}\r\n\r\n\t\tthis.start = start;\r\n\t\tthis.stop = stop;\r\n\t\tthis.escape = escapeLeft;\r\n\t\tthis.escapeRE = new RegExp(escapeLeft.replace(/[.*+?^${}()|[\\]\\\\]/g, \"\\\\$&\"), \"g\");\r\n\t}\r\n\r\n\t/** Does `pattern` matched as rule `patternRuleIndex` match `tree`? */\r\n\tpublic matches(tree: ParseTree, pattern: string, patternRuleIndex: number): boolean;\r\n\r\n\t/** Does `pattern` matched as rule patternRuleIndex match tree? Pass in a\r\n\t * compiled pattern instead of a string representation of a tree pattern.\r\n\t */\r\n\tpublic matches(tree: ParseTree, pattern: ParseTreePattern): boolean;\r\n\r\n\tpublic matches(tree: ParseTree, pattern: string | ParseTreePattern, patternRuleIndex: number = 0): boolean {\r\n\t\tif (typeof pattern === \"string\") {\r\n\t\t\tlet p: ParseTreePattern = this.compile(pattern, patternRuleIndex);\r\n\t\t\treturn this.matches(tree, p);\r\n\t\t} else {\r\n\t\t\tlet labels = new MultiMap();\r\n\t\t\tlet mismatchedNode = this.matchImpl(tree, pattern.patternTree, labels);\r\n\t\t\treturn !mismatchedNode;\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Compare `pattern` matched as rule `patternRuleIndex` against\r\n\t * `tree` and return a {@link ParseTreeMatch} object that contains the\r\n\t * matched elements, or the node at which the match failed.\r\n\t */\r\n\tpublic match(tree: ParseTree, pattern: string, patternRuleIndex: number): ParseTreeMatch;\r\n\r\n\t/**\r\n\t * Compare `pattern` matched against `tree` and return a\r\n\t * {@link ParseTreeMatch} object that contains the matched elements, or the\r\n\t * node at which the match failed. Pass in a compiled pattern instead of a\r\n\t * string representation of a tree pattern.\r\n\t */\r\n\tpublic match(tree: ParseTree, pattern: ParseTreePattern): ParseTreeMatch;\r\n\r\n\t// Implementation of match\r\n\t@NotNull\r\n\tpublic match(tree: ParseTree, @NotNull pattern: string | ParseTreePattern, patternRuleIndex: number = 0): ParseTreeMatch {\r\n\t\tif (typeof pattern === \"string\") {\r\n\t\t\tlet p: ParseTreePattern = this.compile(pattern, patternRuleIndex);\r\n\t\t\treturn this.match(tree, p);\r\n\t\t} else {\r\n\t\t\tlet labels = new MultiMap();\r\n\t\t\tlet mismatchedNode = this.matchImpl(tree, pattern.patternTree, labels);\r\n\t\t\treturn new ParseTreeMatch(tree, pattern, labels, mismatchedNode);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * For repeated use of a tree pattern, compile it to a\r\n\t * {@link ParseTreePattern} using this method.\r\n\t */\r\n\tpublic compile(pattern: string, patternRuleIndex: number): ParseTreePattern {\r\n\t\tlet tokenList = this.tokenize(pattern);\r\n\t\tlet tokenSrc = new ListTokenSource(tokenList);\r\n\t\tlet tokens = new CommonTokenStream(tokenSrc);\r\n\t\tconst parser = this._parser;\r\n\r\n\t\tlet parserInterp = new ParserInterpreter(\r\n\t\t\tparser.grammarFileName,\r\n\t\t\tparser.vocabulary,\r\n\t\t\tparser.ruleNames,\r\n\t\t\tparser.getATNWithBypassAlts(),\r\n\t\t\ttokens);\r\n\r\n\t\tlet tree: ParseTree;\r\n\t\ttry {\r\n\t\t\tparserInterp.errorHandler = new BailErrorStrategy();\r\n\t\t\ttree = parserInterp.parse(patternRuleIndex);\r\n//\t\t\tSystem.out.println(\"pattern tree = \"+tree.toStringTree(parserInterp));\r\n\t\t} catch (e) {\r\n\t\t\tif (e instanceof ParseCancellationException) {\r\n\t\t\t\tthrow e.getCause();\r\n\t\t\t} else if (e instanceof RecognitionException) {\r\n\t\t\t\tthrow e;\r\n\t\t\t} else if (e instanceof Error) {\r\n\t\t\t\tthrow new ParseTreePatternMatcher.CannotInvokeStartRule(e);\r\n\t\t\t} else {\r\n\t\t\t\tthrow e;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// Make sure tree pattern compilation checks for a complete parse\r\n\t\tif (tokens.LA(1) !== Token.EOF) {\r\n\t\t\tthrow new ParseTreePatternMatcher.StartRuleDoesNotConsumeFullPattern();\r\n\t\t}\r\n\r\n\t\treturn new ParseTreePattern(this, pattern, patternRuleIndex, tree);\r\n\t}\r\n\r\n\t/**\r\n\t * Used to convert the tree pattern string into a series of tokens. The\r\n\t * input stream is reset.\r\n\t */\r\n\t@NotNull\r\n\tget lexer(): Lexer {\r\n\t\treturn this._lexer;\r\n\t}\r\n\r\n\t/**\r\n\t * Used to collect to the grammar file name, token names, rule names for\r\n\t * used to parse the pattern into a parse tree.\r\n\t */\r\n\t@NotNull\r\n\tget parser(): Parser {\r\n\t\treturn this._parser;\r\n\t}\r\n\r\n\t// ---- SUPPORT CODE ----\r\n\r\n\t/**\r\n\t * Recursively walk `tree` against `patternTree`, filling\r\n\t * `match.`{@link ParseTreeMatch#labels labels}.\r\n\t *\r\n\t * @returns the first node encountered in `tree` which does not match\r\n\t * a corresponding node in `patternTree`, or `undefined` if the match\r\n\t * was successful. The specific node returned depends on the matching\r\n\t * algorithm used by the implementation, and may be overridden.\r\n\t */\r\n\tprotected matchImpl(\r\n\t\t@NotNull tree: ParseTree,\r\n\t\t@NotNull patternTree: ParseTree,\r\n\t\t@NotNull labels: MultiMap): ParseTree | undefined {\r\n\t\tif (!tree) {\r\n\t\t\tthrow new TypeError(\"tree cannot be null\");\r\n\t\t}\r\n\r\n\t\tif (!patternTree) {\r\n\t\t\tthrow new TypeError(\"patternTree cannot be null\");\r\n\t\t}\r\n\r\n\t\t// x and , x and y, or x and x; or could be mismatched types\r\n\t\tif (tree instanceof TerminalNode && patternTree instanceof TerminalNode) {\r\n\t\t\tlet mismatchedNode: ParseTree | undefined;\r\n\t\t\t// both are tokens and they have same type\r\n\t\t\tif (tree.symbol.type === patternTree.symbol.type) {\r\n\t\t\t\tif (patternTree.symbol instanceof TokenTagToken) { // x and \r\n\t\t\t\t\tlet tokenTagToken = patternTree.symbol;\r\n\t\t\t\t\t// track label->list-of-nodes for both token name and label (if any)\r\n\t\t\t\t\tlabels.map(tokenTagToken.tokenName, tree);\r\n\t\t\t\t\tconst l = tokenTagToken.label;\r\n\t\t\t\t\tif (l) {\r\n\t\t\t\t\t\tlabels.map(l, tree);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t\telse if (tree.text === patternTree.text) {\r\n\t\t\t\t\t// x and x\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\t// x and y\r\n\t\t\t\t\tif (!mismatchedNode) {\r\n\t\t\t\t\t\tmismatchedNode = tree;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tif (!mismatchedNode) {\r\n\t\t\t\t\tmismatchedNode = tree;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\treturn mismatchedNode;\r\n\t\t}\r\n\r\n\t\tif (tree instanceof ParserRuleContext\r\n\t\t\t&& patternTree instanceof ParserRuleContext) {\r\n\t\t\tlet mismatchedNode: ParseTree | undefined;\r\n\t\t\t// (expr ...) and \r\n\t\t\tlet ruleTagToken = this.getRuleTagToken(patternTree);\r\n\t\t\tif (ruleTagToken) {\r\n\t\t\t\tlet m: ParseTreeMatch;\r\n\t\t\t\tif (tree.ruleContext.ruleIndex === patternTree.ruleContext.ruleIndex) {\r\n\t\t\t\t\t// track label->list-of-nodes for both rule name and label (if any)\r\n\t\t\t\t\tlabels.map(ruleTagToken.ruleName, tree);\r\n\t\t\t\t\tconst l = ruleTagToken.label;\r\n\t\t\t\t\tif (l) {\r\n\t\t\t\t\t\tlabels.map(l, tree);\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\tif (!mismatchedNode) {\r\n\t\t\t\t\t\tmismatchedNode = tree;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\r\n\t\t\t\treturn mismatchedNode;\r\n\t\t\t}\r\n\r\n\t\t\t// (expr ...) and (expr ...)\r\n\t\t\tif (tree.childCount !== patternTree.childCount) {\r\n\t\t\t\tif (!mismatchedNode) {\r\n\t\t\t\t\tmismatchedNode = tree;\r\n\t\t\t\t}\r\n\r\n\t\t\t\treturn mismatchedNode;\r\n\t\t\t}\r\n\r\n\t\t\tlet n: number = tree.childCount;\r\n\t\t\tfor (let i = 0; i < n; i++) {\r\n\t\t\t\tlet childMatch = this.matchImpl(tree.getChild(i), patternTree.getChild(i), labels);\r\n\t\t\t\tif (childMatch) {\r\n\t\t\t\t\treturn childMatch;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\treturn mismatchedNode;\r\n\t\t}\r\n\r\n\t\t// if nodes aren't both tokens or both rule nodes, can't match\r\n\t\treturn tree;\r\n\t}\r\n\r\n\t/** Is `t` `(expr )` subtree? */\r\n\tprotected getRuleTagToken(t: ParseTree): RuleTagToken | undefined {\r\n\t\tif (t instanceof RuleNode) {\r\n\t\t\tif (t.childCount === 1 && t.getChild(0) instanceof TerminalNode) {\r\n\t\t\t\tlet c = t.getChild(0) as TerminalNode;\r\n\t\t\t\tif (c.symbol instanceof RuleTagToken) {\r\n//\t\t\t\t\tSystem.out.println(\"rule tag subtree \"+t.toStringTree(parser));\r\n\t\t\t\t\treturn c.symbol;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\tpublic tokenize(pattern: string): Token[] {\r\n\t\t// split pattern into chunks: sea (raw input) and islands (, )\r\n\t\tlet chunks = this.split(pattern);\r\n\r\n\t\t// create token stream from text and tags\r\n\t\tlet tokens: Token[] = [];\r\n\r\n\t\tfor (let chunk of chunks) {\r\n\t\t\tif (chunk instanceof TagChunk) {\r\n\t\t\t\tlet tagChunk = chunk;\r\n\t\t\t\tconst firstChar = tagChunk.tag.substr(0, 1);\r\n\t\t\t\t// add special rule token or conjure up new token from name\r\n\t\t\t\tif (firstChar === firstChar.toUpperCase()) {\r\n\t\t\t\t\tlet ttype: number = this._parser.getTokenType(tagChunk.tag);\r\n\t\t\t\t\tif (ttype === Token.INVALID_TYPE) {\r\n\t\t\t\t\t\tthrow new Error(\"Unknown token \" + tagChunk.tag + \" in pattern: \" + pattern);\r\n\t\t\t\t\t}\r\n\t\t\t\t\tlet t: TokenTagToken = new TokenTagToken(tagChunk.tag, ttype, tagChunk.label);\r\n\t\t\t\t\ttokens.push(t);\r\n\t\t\t\t}\r\n\t\t\t\telse if (firstChar === firstChar.toLowerCase()) {\r\n\t\t\t\t\tlet ruleIndex: number = this._parser.getRuleIndex(tagChunk.tag);\r\n\t\t\t\t\tif (ruleIndex === -1) {\r\n\t\t\t\t\t\tthrow new Error(\"Unknown rule \" + tagChunk.tag + \" in pattern: \" + pattern);\r\n\t\t\t\t\t}\r\n\t\t\t\t\tlet ruleImaginaryTokenType: number = this._parser.getATNWithBypassAlts().ruleToTokenType[ruleIndex];\r\n\t\t\t\t\ttokens.push(new RuleTagToken(tagChunk.tag, ruleImaginaryTokenType, tagChunk.label));\r\n\t\t\t\t}\r\n\t\t\t\telse {\r\n\t\t\t\t\tthrow new Error(\"invalid tag: \" + tagChunk.tag + \" in pattern: \" + pattern);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tlet textChunk = chunk as TextChunk;\r\n\t\t\t\tthis._lexer.inputStream = CharStreams.fromString(textChunk.text);\r\n\t\t\t\tlet t: Token = this._lexer.nextToken();\r\n\t\t\t\twhile (t.type !== Token.EOF) {\r\n\t\t\t\t\ttokens.push(t);\r\n\t\t\t\t\tt = this._lexer.nextToken();\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n//\t\tSystem.out.println(\"tokens=\"+tokens);\r\n\t\treturn tokens;\r\n\t}\r\n\r\n\t/** Split ` = ;` into 4 chunks for tokenizing by {@link #tokenize}. */\r\n\tpublic split(pattern: string): Chunk[] {\r\n\t\tlet p: number = 0;\r\n\t\tlet n: number = pattern.length;\r\n\t\tlet chunks: Chunk[] = [];\r\n\t\tlet buf: \"\";\r\n\t\t// find all start and stop indexes first, then collect\r\n\t\tlet starts: number[] = [];\r\n\t\tlet stops: number[] = [];\r\n\t\twhile (p < n) {\r\n\t\t\tif (p === pattern.indexOf(this.escape + this.start, p)) {\r\n\t\t\t\tp += this.escape.length + this.start.length;\r\n\t\t\t}\r\n\t\t\telse if (p === pattern.indexOf(this.escape + this.stop, p)) {\r\n\t\t\t\tp += this.escape.length + this.stop.length;\r\n\t\t\t}\r\n\t\t\telse if (p === pattern.indexOf(this.start, p)) {\r\n\t\t\t\tstarts.push(p);\r\n\t\t\t\tp += this.start.length;\r\n\t\t\t}\r\n\t\t\telse if (p === pattern.indexOf(this.stop, p)) {\r\n\t\t\t\tstops.push(p);\r\n\t\t\t\tp += this.stop.length;\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tp++;\r\n\t\t\t}\r\n\t\t}\r\n\r\n//\t\tSystem.out.println(\"\");\r\n//\t\tSystem.out.println(starts);\r\n//\t\tSystem.out.println(stops);\r\n\t\tif (starts.length > stops.length) {\r\n\t\t\tthrow new Error(\"unterminated tag in pattern: \" + pattern);\r\n\t\t}\r\n\r\n\t\tif (starts.length < stops.length) {\r\n\t\t\tthrow new Error(\"missing start tag in pattern: \" + pattern);\r\n\t\t}\r\n\r\n\t\tlet ntags: number = starts.length;\r\n\t\tfor (let i = 0; i < ntags; i++) {\r\n\t\t\tif (starts[i] >= stops[i]) {\r\n\t\t\t\tthrow new Error(\"tag delimiters out of order in pattern: \" + pattern);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// collect into chunks now\r\n\t\tif (ntags === 0) {\r\n\t\t\tlet text: string = pattern.substring(0, n);\r\n\t\t\tchunks.push(new TextChunk(text));\r\n\t\t}\r\n\r\n\t\tif (ntags > 0 && starts[0] > 0) { // copy text up to first tag into chunks\r\n\t\t\tlet text: string = pattern.substring(0, starts[0]);\r\n\t\t\tchunks.push(new TextChunk(text));\r\n\t\t}\r\n\t\tfor (let i = 0; i < ntags; i++) {\r\n\t\t\t// copy inside of \r\n\t\t\tlet tag: string = pattern.substring(starts[i] + this.start.length, stops[i]);\r\n\t\t\tlet ruleOrToken: string = tag;\r\n\t\t\tlet label: string | undefined;\r\n\t\t\tlet colon: number = tag.indexOf(\":\");\r\n\t\t\tif (colon >= 0) {\r\n\t\t\t\tlabel = tag.substring(0, colon);\r\n\t\t\t\truleOrToken = tag.substring(colon + 1, tag.length);\r\n\t\t\t}\r\n\t\t\tchunks.push(new TagChunk(ruleOrToken, label));\r\n\t\t\tif (i + 1 < ntags) {\r\n\t\t\t\t// copy from end of to start of next\r\n\t\t\t\tlet text: string = pattern.substring(stops[i] + this.stop.length, starts[i + 1]);\r\n\t\t\t\tchunks.push(new TextChunk(text));\r\n\t\t\t}\r\n\t\t}\r\n\t\tif (ntags > 0) {\r\n\t\t\tlet afterLastTag: number = stops[ntags - 1] + this.stop.length;\r\n\t\t\tif (afterLastTag < n) { // copy text from end of last tag to end\r\n\t\t\t\tlet text: string = pattern.substring(afterLastTag, n);\r\n\t\t\t\tchunks.push(new TextChunk(text));\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// strip out the escape sequences from text chunks but not tags\r\n\t\tfor (let i = 0; i < chunks.length; i++) {\r\n\t\t\tlet c: Chunk = chunks[i];\r\n\t\t\tif (c instanceof TextChunk) {\r\n\t\t\t\tlet unescaped: string = c.text.replace(this.escapeRE, \"\");\r\n\t\t\t\tif (unescaped.length < c.text.length) {\r\n\t\t\t\t\tchunks[i] = new TextChunk(unescaped);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn chunks;\r\n\t}\r\n}\r\n\r\nexport namespace ParseTreePatternMatcher {\r\n\texport class CannotInvokeStartRule extends Error {\r\n\t\tpublic constructor(public error: Error) {\r\n\t\t\tsuper(`CannotInvokeStartRule: ${error}`);\r\n\t\t}\r\n\t}\r\n\r\n\t// Fixes https://github.com/antlr/antlr4/issues/413\r\n\t// \"Tree pattern compilation doesn't check for a complete parse\"\r\n\texport class StartRuleDoesNotConsumeFullPattern extends Error {\r\n\t\tconstructor() {\r\n\t\t\tsuper(\"StartRuleDoesNotConsumeFullPattern\");\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:28.2401032-07:00\r\n\r\nimport { NotNull } from \"../Decorators\";\r\nimport { SimulatorState } from \"./SimulatorState\";\r\nimport { TokenStream } from \"../TokenStream\";\r\n\r\n/**\r\n * This is the base class for gathering detailed information about prediction\r\n * events which occur during parsing.\r\n *\r\n * Note that we could record the parser call stack at the time this event\r\n * occurred but in the presence of left recursive rules, the stack is kind of\r\n * meaningless. It's better to look at the individual configurations for their\r\n * individual stacks. Of course that is a {@link PredictionContext} object\r\n * not a parse tree node and so it does not have information about the extent\r\n * (start...stop) of the various subtrees. Examining the stack tops of all\r\n * configurations provide the return states for the rule invocations.\r\n * From there you can get the enclosing rule.\r\n *\r\n * @since 4.3\r\n */\r\nexport class DecisionEventInfo {\r\n\t/**\r\n\t * The invoked decision number which this event is related to.\r\n\t *\r\n\t * @see ATN#decisionToState\r\n\t */\r\n\tpublic decision: number;\r\n\r\n\t/**\r\n\t * The simulator state containing additional information relevant to the\r\n\t * prediction state when the current event occurred, or `undefined` if no\r\n\t * additional information is relevant or available.\r\n\t */\r\n\tpublic state: SimulatorState | undefined;\r\n\r\n\t/**\r\n\t * The input token stream which is being parsed.\r\n\t */\r\n\t@NotNull\r\n\tpublic input: TokenStream;\r\n\r\n\t/**\r\n\t * The token index in the input stream at which the current prediction was\r\n\t * originally invoked.\r\n\t */\r\n\tpublic startIndex: number;\r\n\r\n\t/**\r\n\t * The token index in the input stream at which the current event occurred.\r\n\t */\r\n\tpublic stopIndex: number;\r\n\r\n\t/**\r\n\t * `true` if the current event occurred during LL prediction;\r\n\t * otherwise, `false` if the input occurred during SLL prediction.\r\n\t */\r\n\tpublic fullCtx: boolean;\r\n\r\n\tconstructor(\r\n\t\tdecision: number,\r\n\t\tstate: SimulatorState | undefined,\r\n\t\t@NotNull input: TokenStream,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\tfullCtx: boolean) {\r\n\r\n\t\tthis.decision = decision;\r\n\t\tthis.fullCtx = fullCtx;\r\n\t\tthis.stopIndex = stopIndex;\r\n\t\tthis.input = input;\r\n\t\tthis.startIndex = startIndex;\r\n\t\tthis.state = state;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:24.8229279-07:00\r\n\r\nimport { BitSet } from \"../misc/BitSet\";\r\nimport { DecisionEventInfo } from \"./DecisionEventInfo\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { SimulatorState } from \"./SimulatorState\";\r\nimport { TokenStream } from \"../TokenStream\";\r\n\r\n/**\r\n * This class represents profiling event information for an ambiguity.\r\n * Ambiguities are decisions where a particular input resulted in an SLL\r\n * conflict, followed by LL prediction also reaching a conflict state\r\n * (indicating a true ambiguity in the grammar).\r\n *\r\n * This event may be reported during SLL prediction in cases where the\r\n * conflicting SLL configuration set provides sufficient information to\r\n * determine that the SLL conflict is truly an ambiguity. For example, if none\r\n * of the ATN configurations in the conflicting SLL configuration set have\r\n * traversed a global follow transition (i.e.\r\n * {@link ATNConfig#getReachesIntoOuterContext} is `false` for all\r\n * configurations), then the result of SLL prediction for that input is known to\r\n * be equivalent to the result of LL prediction for that input.\r\n *\r\n * In some cases, the minimum represented alternative in the conflicting LL\r\n * configuration set is not equal to the minimum represented alternative in the\r\n * conflicting SLL configuration set. Grammars and inputs which result in this\r\n * scenario are unable to use {@link PredictionMode#SLL}, which in turn means\r\n * they cannot use the two-stage parsing strategy to improve parsing performance\r\n * for that input.\r\n *\r\n * @see ParserATNSimulator#reportAmbiguity\r\n * @see ParserErrorListener#reportAmbiguity\r\n *\r\n * @since 4.3\r\n */\r\nexport class AmbiguityInfo extends DecisionEventInfo {\r\n\t/** The set of alternative numbers for this decision event that lead to a valid parse. */\r\n\t@NotNull\r\n\tprivate ambigAlts: BitSet;\r\n\r\n\t/**\r\n\t * Constructs a new instance of the {@link AmbiguityInfo} class with the\r\n\t * specified detailed ambiguity information.\r\n\t *\r\n\t * @param decision The decision number\r\n\t * @param state The final simulator state identifying the ambiguous\r\n\t * alternatives for the current input\r\n\t * @param ambigAlts The set of alternatives in the decision that lead to a valid parse.\r\n\t * The predicted alt is the min(ambigAlts)\r\n\t * @param input The input token stream\r\n\t * @param startIndex The start index for the current prediction\r\n\t * @param stopIndex The index at which the ambiguity was identified during\r\n\t * prediction\r\n\t */\r\n\tconstructor(\r\n\t\tdecision: number,\r\n\t\t@NotNull state: SimulatorState,\r\n\t\t@NotNull ambigAlts: BitSet,\r\n\t\t@NotNull input: TokenStream,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number) {\r\n\t\tsuper(decision, state, input, startIndex, stopIndex, state.useContext);\r\n\t\tthis.ambigAlts = ambigAlts;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the set of alternatives in the decision that lead to a valid parse.\r\n\t *\r\n\t * @since 4.5\r\n\t */\r\n\t@NotNull\r\n\tget ambiguousAlternatives(): BitSet {\r\n\t\treturn this.ambigAlts;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:28.1575933-07:00\r\n\r\nimport { DecisionEventInfo } from \"./DecisionEventInfo\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { SimulatorState } from \"./SimulatorState\";\r\nimport { TokenStream } from \"../TokenStream\";\r\n\r\n/**\r\n * This class represents profiling event information for a context sensitivity.\r\n * Context sensitivities are decisions where a particular input resulted in an\r\n * SLL conflict, but LL prediction produced a single unique alternative.\r\n *\r\n * In some cases, the unique alternative identified by LL prediction is not\r\n * equal to the minimum represented alternative in the conflicting SLL\r\n * configuration set. Grammars and inputs which result in this scenario are\r\n * unable to use {@link PredictionMode#SLL}, which in turn means they cannot use\r\n * the two-stage parsing strategy to improve parsing performance for that\r\n * input.\r\n *\r\n * @see ParserATNSimulator#reportContextSensitivity\r\n * @see ParserErrorListener#reportContextSensitivity\r\n *\r\n * @since 4.3\r\n */\r\nexport class ContextSensitivityInfo extends DecisionEventInfo {\r\n\t/**\r\n\t * Constructs a new instance of the {@link ContextSensitivityInfo} class\r\n\t * with the specified detailed context sensitivity information.\r\n\t *\r\n\t * @param decision The decision number\r\n\t * @param state The final simulator state containing the unique\r\n\t * alternative identified by full-context prediction\r\n\t * @param input The input token stream\r\n\t * @param startIndex The start index for the current prediction\r\n\t * @param stopIndex The index at which the context sensitivity was\r\n\t * identified during full-context prediction\r\n\t */\r\n\tconstructor(\r\n\t\tdecision: number,\r\n\t\t@NotNull state: SimulatorState,\r\n\t\t@NotNull input: TokenStream,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number) {\r\n\r\n\t\tsuper(decision, state, input, startIndex, stopIndex, true);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:28.3330673-07:00\r\n\r\nimport { AmbiguityInfo } from \"./AmbiguityInfo\";\r\nimport { ContextSensitivityInfo } from \"./ContextSensitivityInfo\";\r\nimport { ErrorInfo } from \"./ErrorInfo\";\r\nimport { LookaheadEventInfo } from \"./LookaheadEventInfo\";\r\nimport { Override } from \"../Decorators\";\r\nimport { PredicateEvalInfo } from \"./PredicateEvalInfo\";\r\n\r\n/**\r\n * This class contains profiling gathered for a particular decision.\r\n *\r\n * Parsing performance in ANTLR 4 is heavily influenced by both static factors\r\n * (e.g. the form of the rules in the grammar) and dynamic factors (e.g. the\r\n * choice of input and the state of the DFA cache at the time profiling\r\n * operations are started). For best results, gather and use aggregate\r\n * statistics from a large sample of inputs representing the inputs expected in\r\n * production before using the results to make changes in the grammar.\r\n *\r\n * @since 4.3\r\n */\r\nexport class DecisionInfo {\r\n\t/**\r\n\t * The decision number, which is an index into {@link ATN#decisionToState}.\r\n\t */\r\n\tpublic decision: number;\r\n\r\n\t/**\r\n\t * The total number of times {@link ParserATNSimulator#adaptivePredict} was\r\n\t * invoked for this decision.\r\n\t */\r\n\tpublic invocations: number = 0;\r\n\r\n\t/**\r\n\t * The total time spent in {@link ParserATNSimulator#adaptivePredict} for\r\n\t * this decision, in nanoseconds.\r\n\t *\r\n\t * The value of this field contains the sum of differential results obtained\r\n\t * by {@link System#nanoTime()}, and is not adjusted to compensate for JIT\r\n\t * and/or garbage collection overhead. For best accuracy, use a modern JVM\r\n\t * implementation that provides precise results from\r\n\t * {@link System#nanoTime()}, and perform profiling in a separate process\r\n\t * which is warmed up by parsing the input prior to profiling. If desired,\r\n\t * call {@link ATNSimulator#clearDFA} to reset the DFA cache to its initial\r\n\t * state before starting the profiling measurement pass.\r\n\t */\r\n\tpublic timeInPrediction: number = 0;\r\n\r\n\t/**\r\n\t * The sum of the lookahead required for SLL prediction for this decision.\r\n\t * Note that SLL prediction is used before LL prediction for performance\r\n\t * reasons even when {@link PredictionMode#LL} or\r\n\t * {@link PredictionMode#LL_EXACT_AMBIG_DETECTION} is used.\r\n\t */\r\n\tpublic SLL_TotalLook: number = 0;\r\n\r\n\t/**\r\n\t * Gets the minimum lookahead required for any single SLL prediction to\r\n\t * complete for this decision, by reaching a unique prediction, reaching an\r\n\t * SLL conflict state, or encountering a syntax error.\r\n\t */\r\n\tpublic SLL_MinLook: number = 0;\r\n\r\n\t/**\r\n\t * Gets the maximum lookahead required for any single SLL prediction to\r\n\t * complete for this decision, by reaching a unique prediction, reaching an\r\n\t * SLL conflict state, or encountering a syntax error.\r\n\t */\r\n\tpublic SLL_MaxLook: number = 0;\r\n\r\n\t/**\r\n\t * Gets the {@link LookaheadEventInfo} associated with the event where the\r\n\t * {@link #SLL_MaxLook} value was set.\r\n\t */\r\n\tpublic SLL_MaxLookEvent?: LookaheadEventInfo;\r\n\r\n\t/**\r\n\t * The sum of the lookahead required for LL prediction for this decision.\r\n\t * Note that LL prediction is only used when SLL prediction reaches a\r\n\t * conflict state.\r\n\t */\r\n\tpublic LL_TotalLook: number = 0;\r\n\r\n\t/**\r\n\t * Gets the minimum lookahead required for any single LL prediction to\r\n\t * complete for this decision. An LL prediction completes when the algorithm\r\n\t * reaches a unique prediction, a conflict state (for\r\n\t * {@link PredictionMode#LL}, an ambiguity state (for\r\n\t * {@link PredictionMode#LL_EXACT_AMBIG_DETECTION}, or a syntax error.\r\n\t */\r\n\tpublic LL_MinLook: number = 0;\r\n\r\n\t/**\r\n\t * Gets the maximum lookahead required for any single LL prediction to\r\n\t * complete for this decision. An LL prediction completes when the algorithm\r\n\t * reaches a unique prediction, a conflict state (for\r\n\t * {@link PredictionMode#LL}, an ambiguity state (for\r\n\t * {@link PredictionMode#LL_EXACT_AMBIG_DETECTION}, or a syntax error.\r\n\t */\r\n\tpublic LL_MaxLook: number = 0;\r\n\r\n\t/**\r\n\t * Gets the {@link LookaheadEventInfo} associated with the event where the\r\n\t * {@link #LL_MaxLook} value was set.\r\n\t */\r\n\tpublic LL_MaxLookEvent?: LookaheadEventInfo;\r\n\r\n\t/**\r\n\t * A collection of {@link ContextSensitivityInfo} instances describing the\r\n\t * context sensitivities encountered during LL prediction for this decision.\r\n\t *\r\n\t * @see ContextSensitivityInfo\r\n\t */\r\n\tpublic contextSensitivities: ContextSensitivityInfo[] = [];\r\n\r\n\t/**\r\n\t * A collection of {@link ErrorInfo} instances describing the parse errors\r\n\t * identified during calls to {@link ParserATNSimulator#adaptivePredict} for\r\n\t * this decision.\r\n\t *\r\n\t * @see ErrorInfo\r\n\t */\r\n\tpublic errors: ErrorInfo[] = [];\r\n\r\n\t/**\r\n\t * A collection of {@link AmbiguityInfo} instances describing the\r\n\t * ambiguities encountered during LL prediction for this decision.\r\n\t *\r\n\t * @see AmbiguityInfo\r\n\t */\r\n\tpublic ambiguities: AmbiguityInfo[] = [];\r\n\r\n\t/**\r\n\t * A collection of {@link PredicateEvalInfo} instances describing the\r\n\t * results of evaluating individual predicates during prediction for this\r\n\t * decision.\r\n\t *\r\n\t * @see PredicateEvalInfo\r\n\t */\r\n\tpublic predicateEvals: PredicateEvalInfo[] = [];\r\n\r\n\t/**\r\n\t * The total number of ATN transitions required during SLL prediction for\r\n\t * this decision. An ATN transition is determined by the number of times the\r\n\t * DFA does not contain an edge that is required for prediction, resulting\r\n\t * in on-the-fly computation of that edge.\r\n\t *\r\n\t * If DFA caching of SLL transitions is employed by the implementation, ATN\r\n\t * computation may cache the computed edge for efficient lookup during\r\n\t * future parsing of this decision. Otherwise, the SLL parsing algorithm\r\n\t * will use ATN transitions exclusively.\r\n\t *\r\n\t * @see #SLL_ATNTransitions\r\n\t * @see ParserATNSimulator#computeTargetState\r\n\t * @see LexerATNSimulator#computeTargetState\r\n\t */\r\n\tpublic SLL_ATNTransitions: number = 0;\r\n\r\n\t/**\r\n\t * The total number of DFA transitions required during SLL prediction for\r\n\t * this decision.\r\n\t *\r\n\t * If the ATN simulator implementation does not use DFA caching for SLL\r\n\t * transitions, this value will be 0.\r\n\t *\r\n\t * @see ParserATNSimulator#getExistingTargetState\r\n\t * @see LexerATNSimulator#getExistingTargetState\r\n\t */\r\n\tpublic SLL_DFATransitions: number = 0;\r\n\r\n\t/**\r\n\t * Gets the total number of times SLL prediction completed in a conflict\r\n\t * state, resulting in fallback to LL prediction.\r\n\t *\r\n\t * Note that this value is not related to whether or not\r\n\t * {@link PredictionMode#SLL} may be used successfully with a particular\r\n\t * grammar. If the ambiguity resolution algorithm applied to the SLL\r\n\t * conflicts for this decision produce the same result as LL prediction for\r\n\t * this decision, {@link PredictionMode#SLL} would produce the same overall\r\n\t * parsing result as {@link PredictionMode#LL}.\r\n\t */\r\n\tpublic LL_Fallback: number = 0;\r\n\r\n\t/**\r\n\t * The total number of ATN transitions required during LL prediction for\r\n\t * this decision. An ATN transition is determined by the number of times the\r\n\t * DFA does not contain an edge that is required for prediction, resulting\r\n\t * in on-the-fly computation of that edge.\r\n\t *\r\n\t * If DFA caching of LL transitions is employed by the implementation, ATN\r\n\t * computation may cache the computed edge for efficient lookup during\r\n\t * future parsing of this decision. Otherwise, the LL parsing algorithm will\r\n\t * use ATN transitions exclusively.\r\n\t *\r\n\t * @see #LL_DFATransitions\r\n\t * @see ParserATNSimulator#computeTargetState\r\n\t * @see LexerATNSimulator#computeTargetState\r\n\t */\r\n\tpublic LL_ATNTransitions: number = 0;\r\n\r\n\t/**\r\n\t * The total number of DFA transitions required during LL prediction for\r\n\t * this decision.\r\n\t *\r\n\t * If the ATN simulator implementation does not use DFA caching for LL\r\n\t * transitions, this value will be 0.\r\n\t *\r\n\t * @see ParserATNSimulator#getExistingTargetState\r\n\t * @see LexerATNSimulator#getExistingTargetState\r\n\t */\r\n\tpublic LL_DFATransitions: number = 0;\r\n\r\n\t/**\r\n\t * Constructs a new instance of the {@link DecisionInfo} class to contain\r\n\t * statistics for a particular decision.\r\n\t *\r\n\t * @param decision The decision number\r\n\t */\r\n\tconstructor(decision: number) {\r\n\t\tthis.decision = decision;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\treturn \"{\" +\r\n\t\t\t\"decision=\" + this.decision +\r\n\t\t\t\", contextSensitivities=\" + this.contextSensitivities.length +\r\n\t\t\t\", errors=\" + this.errors.length +\r\n\t\t\t\", ambiguities=\" + this.ambiguities.length +\r\n\t\t\t\", SLL_lookahead=\" + this.SLL_TotalLook +\r\n\t\t\t\", SLL_ATNTransitions=\" + this.SLL_ATNTransitions +\r\n\t\t\t\", SLL_DFATransitions=\" + this.SLL_DFATransitions +\r\n\t\t\t\", LL_Fallback=\" + this.LL_Fallback +\r\n\t\t\t\", LL_lookahead=\" + this.LL_TotalLook +\r\n\t\t\t\", LL_ATNTransitions=\" + this.LL_ATNTransitions +\r\n\t\t\t\"}\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:28.7213647-07:00\r\n\r\nimport { DecisionEventInfo } from \"./DecisionEventInfo\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { SimulatorState } from \"./SimulatorState\";\r\nimport { TokenStream } from \"../TokenStream\";\r\n\r\n/**\r\n * This class represents profiling event information for a syntax error\r\n * identified during prediction. Syntax errors occur when the prediction\r\n * algorithm is unable to identify an alternative which would lead to a\r\n * successful parse.\r\n *\r\n * @see Parser#notifyErrorListeners(Token, String, RecognitionException)\r\n * @see ANTLRErrorListener#syntaxError\r\n *\r\n * @since 4.3\r\n */\r\nexport class ErrorInfo extends DecisionEventInfo {\r\n\t/**\r\n\t * Constructs a new instance of the {@link ErrorInfo} class with the\r\n\t * specified detailed syntax error information.\r\n\t *\r\n\t * @param decision The decision number\r\n\t * @param state The final simulator state reached during prediction\r\n\t * prior to reaching the {@link ATNSimulator#ERROR} state\r\n\t * @param input The input token stream\r\n\t * @param startIndex The start index for the current prediction\r\n\t * @param stopIndex The index at which the syntax error was identified\r\n\t */\r\n\tconstructor(\r\n\t\tdecision: number,\r\n\t\t@NotNull state: SimulatorState,\r\n\t\t@NotNull input: TokenStream,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number) {\r\n\r\n\t\tsuper(decision, state, input, startIndex, stopIndex, state.useContext);\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:30.6852565-07:00\r\n\r\nimport { DecisionEventInfo } from \"./DecisionEventInfo\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { SimulatorState } from \"./SimulatorState\";\r\nimport { TokenStream } from \"../TokenStream\";\r\n\r\n/**\r\n * This class represents profiling event information for tracking the lookahead\r\n * depth required in order to make a prediction.\r\n *\r\n * @since 4.3\r\n */\r\nexport class LookaheadEventInfo extends DecisionEventInfo {\r\n\t/** The alternative chosen by adaptivePredict(), not necessarily\r\n\t * the outermost alt shown for a rule; left-recursive rules have\r\n\t * user-level alts that differ from the rewritten rule with a (...) block\r\n\t * and a (..)* loop.\r\n\t */\r\n\tpublic predictedAlt: number;\r\n\r\n\t/**\r\n\t * Constructs a new instance of the {@link LookaheadEventInfo} class with\r\n\t * the specified detailed lookahead information.\r\n\t *\r\n\t * @param decision The decision number\r\n\t * @param state The final simulator state containing the necessary\r\n\t * information to determine the result of a prediction, or `undefined` if\r\n\t * the final state is not available\r\n\t * @param input The input token stream\r\n\t * @param startIndex The start index for the current prediction\r\n\t * @param stopIndex The index at which the prediction was finally made\r\n\t * @param fullCtx `true` if the current lookahead is part of an LL\r\n\t * prediction; otherwise, `false` if the current lookahead is part of\r\n\t * an SLL prediction\r\n\t */\r\n\tconstructor(\r\n\t\tdecision: number,\r\n\t\tstate: SimulatorState | undefined,\r\n\t\tpredictedAlt: number,\r\n\t\t@NotNull input: TokenStream,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\tfullCtx: boolean) {\r\n\r\n\t\tsuper(decision, state, input, startIndex, stopIndex, fullCtx);\r\n\t\tthis.predictedAlt = predictedAlt;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:35.1914305-07:00\r\n\r\nimport { DecisionEventInfo } from \"./DecisionEventInfo\";\r\nimport { NotNull } from \"../Decorators\";\r\nimport { SemanticContext } from \"./SemanticContext\";\r\nimport { SimulatorState } from \"./SimulatorState\";\r\nimport { TokenStream } from \"../TokenStream\";\r\n\r\n/**\r\n * This class represents profiling event information for semantic predicate\r\n * evaluations which occur during prediction.\r\n *\r\n * @see ParserATNSimulator#evalSemanticContext\r\n *\r\n * @since 4.3\r\n */\r\nexport class PredicateEvalInfo extends DecisionEventInfo {\r\n\t/**\r\n\t * The semantic context which was evaluated.\r\n\t */\r\n\tpublic semctx: SemanticContext;\r\n\t/**\r\n\t * The alternative number for the decision which is guarded by the semantic\r\n\t * context {@link #semctx}. Note that other ATN\r\n\t * configurations may predict the same alternative which are guarded by\r\n\t * other semantic contexts and/or {@link SemanticContext#NONE}.\r\n\t */\r\n\tpublic predictedAlt: number;\r\n\t/**\r\n\t * The result of evaluating the semantic context {@link #semctx}.\r\n\t */\r\n\tpublic evalResult: boolean;\r\n\r\n\t/**\r\n\t * Constructs a new instance of the {@link PredicateEvalInfo} class with the\r\n\t * specified detailed predicate evaluation information.\r\n\t *\r\n\t * @param state The simulator state\r\n\t * @param decision The decision number\r\n\t * @param input The input token stream\r\n\t * @param startIndex The start index for the current prediction\r\n\t * @param stopIndex The index at which the predicate evaluation was\r\n\t * triggered. Note that the input stream may be reset to other positions for\r\n\t * the actual evaluation of individual predicates.\r\n\t * @param semctx The semantic context which was evaluated\r\n\t * @param evalResult The results of evaluating the semantic context\r\n\t * @param predictedAlt The alternative number for the decision which is\r\n\t * guarded by the semantic context `semctx`. See {@link #predictedAlt}\r\n\t * for more information.\r\n\t *\r\n\t * @see ParserATNSimulator#evalSemanticContext(SemanticContext, ParserRuleContext, int)\r\n\t * @see SemanticContext#eval(Recognizer, RuleContext)\r\n\t */\r\n\tconstructor(\r\n\t\t@NotNull state: SimulatorState,\r\n\t\tdecision: number,\r\n\t\t@NotNull input: TokenStream,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\t@NotNull semctx: SemanticContext,\r\n\t\tevalResult: boolean,\r\n\t\tpredictedAlt: number) {\r\n\r\n\t\tsuper(decision, state, input, startIndex, stopIndex, state.useContext);\r\n\t\tthis.semctx = semctx;\r\n\t\tthis.evalResult = evalResult;\r\n\t\tthis.predictedAlt = predictedAlt;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:36.4188352-07:00\r\n\r\nimport { AmbiguityInfo } from \"./AmbiguityInfo\";\r\nimport { ATN } from \"./ATN\";\r\nimport { ATNConfigSet } from \"./ATNConfigSet\";\r\nimport { ATNSimulator } from \"./ATNSimulator\";\r\nimport { BitSet } from \"../misc/BitSet\";\r\nimport { ContextSensitivityInfo } from \"./ContextSensitivityInfo\";\r\nimport { DecisionInfo } from \"./DecisionInfo\";\r\nimport { DFA } from \"../dfa/DFA\";\r\nimport { DFAState } from \"../dfa/DFAState\";\r\nimport { ErrorInfo } from \"./ErrorInfo\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { LookaheadEventInfo } from \"./LookaheadEventInfo\";\r\nimport { Parser } from \"../Parser\";\r\nimport { ParserATNSimulator } from \"./ParserATNSimulator\";\r\nimport { ParserRuleContext } from \"../ParserRuleContext\";\r\nimport { PredicateEvalInfo } from \"./PredicateEvalInfo\";\r\nimport { PredictionContextCache } from \"./PredictionContextCache\";\r\nimport { SemanticContext } from \"./SemanticContext\";\r\nimport { SimulatorState } from \"./SimulatorState\";\r\nimport { TokenStream } from \"../TokenStream\";\r\n\r\n/**\r\n * @since 4.3\r\n */\r\nexport class ProfilingATNSimulator extends ParserATNSimulator {\r\n\tprotected decisions: DecisionInfo[];\r\n\tprotected numDecisions: number;\r\n\r\n\tprotected _input: TokenStream | undefined;\r\n\tprotected _startIndex: number = 0;\r\n\tprotected _sllStopIndex: number = 0;\r\n\tprotected _llStopIndex: number = 0;\r\n\r\n\tprotected currentDecision: number = 0;\r\n\tprotected currentState: SimulatorState | undefined;\r\n\r\n\t/** At the point of LL failover, we record how SLL would resolve the conflict so that\r\n\t * we can determine whether or not a decision / input pair is context-sensitive.\r\n\t * If LL gives a different result than SLL's predicted alternative, we have a\r\n\t * context sensitivity for sure. The converse is not necessarily true, however.\r\n\t * It's possible that after conflict resolution chooses minimum alternatives,\r\n\t * SLL could get the same answer as LL. Regardless of whether or not the result indicates\r\n\t * an ambiguity, it is not treated as a context sensitivity because LL prediction\r\n\t * was not required in order to produce a correct prediction for this decision and input sequence.\r\n\t * It may in fact still be a context sensitivity but we don't know by looking at the\r\n\t * minimum alternatives for the current input.\r\n\t */\r\n\tprotected conflictingAltResolvedBySLL: number = 0;\r\n\r\n\tconstructor(parser: Parser) {\r\n\t\tsuper(parser.interpreter.atn, parser);\r\n\t\tthis.optimize_ll1 = false;\r\n\t\tthis.reportAmbiguities = true;\r\n\t\tthis.numDecisions = this.atn.decisionToState.length;\r\n\t\tthis.decisions = [];\r\n\t\tfor (let i = 0; i < this.numDecisions; i++) {\r\n\t\t\tthis.decisions.push(new DecisionInfo(i));\r\n\t\t}\r\n\t}\r\n\r\n\tpublic adaptivePredict(/*@NotNull*/ input: TokenStream, decision: number, outerContext: ParserRuleContext | undefined): number;\r\n\tpublic adaptivePredict(/*@NotNull*/ input: TokenStream, decision: number, outerContext: ParserRuleContext | undefined, useContext: boolean): number;\r\n\t@Override\r\n\tpublic adaptivePredict(\r\n\t\t@NotNull input: TokenStream,\r\n\t\tdecision: number,\r\n\t\touterContext: ParserRuleContext | undefined,\r\n\t\tuseContext?: boolean): number {\r\n\t\tif (useContext !== undefined) {\r\n\t\t\treturn super.adaptivePredict(input, decision, outerContext, useContext);\r\n\t\t}\r\n\r\n\t\ttry {\r\n\t\t\tthis._input = input;\r\n\t\t\tthis._startIndex = input.index;\r\n\t\t\t// it's possible for SLL to reach a conflict state without consuming any input\r\n\t\t\tthis._sllStopIndex = this._startIndex - 1;\r\n\t\t\tthis._llStopIndex = -1;\r\n\t\t\tthis.currentDecision = decision;\r\n\t\t\tthis.currentState = undefined;\r\n\t\t\tthis.conflictingAltResolvedBySLL = ATN.INVALID_ALT_NUMBER;\r\n\t\t\tlet start: number[] = process.hrtime();\r\n\t\t\tlet alt: number = super.adaptivePredict(input, decision, outerContext);\r\n\t\t\tlet stop: number[] = process.hrtime();\r\n\r\n\t\t\tlet nanoseconds: number = (stop[0] - start[0]) * 1000000000;\r\n\t\t\tif (nanoseconds === 0) {\r\n\t\t\t\tnanoseconds = stop[1] - start[1];\r\n\t\t\t} else {\r\n\t\t\t\t// Add nanoseconds from start to end of that second, plus start of the end second to end\r\n\t\t\t\tnanoseconds += (1000000000 - start[1]) + stop[1];\r\n\t\t\t}\r\n\r\n\t\t\tthis.decisions[decision].timeInPrediction += nanoseconds;\r\n\t\t\tthis.decisions[decision].invocations++;\r\n\r\n\t\t\tlet SLL_k: number = this._sllStopIndex - this._startIndex + 1;\r\n\t\t\tthis.decisions[decision].SLL_TotalLook += SLL_k;\r\n\t\t\tthis.decisions[decision].SLL_MinLook = this.decisions[decision].SLL_MinLook === 0 ? SLL_k : Math.min(this.decisions[decision].SLL_MinLook, SLL_k);\r\n\t\t\tif (SLL_k > this.decisions[decision].SLL_MaxLook) {\r\n\t\t\t\tthis.decisions[decision].SLL_MaxLook = SLL_k;\r\n\t\t\t\tthis.decisions[decision].SLL_MaxLookEvent =\r\n\t\t\t\t\tnew LookaheadEventInfo(decision, undefined, alt, input, this._startIndex, this._sllStopIndex, false);\r\n\t\t\t}\r\n\r\n\t\t\tif (this._llStopIndex >= 0) {\r\n\t\t\t\tlet LL_k: number = this._llStopIndex - this._startIndex + 1;\r\n\t\t\t\tthis.decisions[decision].LL_TotalLook += LL_k;\r\n\t\t\t\tthis.decisions[decision].LL_MinLook = this.decisions[decision].LL_MinLook === 0 ? LL_k : Math.min(this.decisions[decision].LL_MinLook, LL_k);\r\n\t\t\t\tif (LL_k > this.decisions[decision].LL_MaxLook) {\r\n\t\t\t\t\tthis.decisions[decision].LL_MaxLook = LL_k;\r\n\t\t\t\t\tthis.decisions[decision].LL_MaxLookEvent =\r\n\t\t\t\t\t\tnew LookaheadEventInfo(decision, undefined, alt, input, this._startIndex, this._llStopIndex, true);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\treturn alt;\r\n\t\t}\r\n\t\tfinally {\r\n\t\t\tthis._input = undefined;\r\n\t\t\tthis.currentDecision = -1;\r\n\t\t}\r\n\t}\r\n\r\n\t@Override\r\n\tprotected getStartState(dfa: DFA, input: TokenStream, outerContext: ParserRuleContext, useContext: boolean): SimulatorState | undefined {\r\n\t\tlet state: SimulatorState | undefined = super.getStartState(dfa, input, outerContext, useContext);\r\n\t\tthis.currentState = state;\r\n\t\treturn state;\r\n\t}\r\n\r\n\t@Override\r\n\tprotected computeStartState(dfa: DFA, globalContext: ParserRuleContext, useContext: boolean): SimulatorState {\r\n\t\tlet state: SimulatorState = super.computeStartState(dfa, globalContext, useContext);\r\n\t\tthis.currentState = state;\r\n\t\treturn state;\r\n\t}\r\n\r\n\t@Override\r\n\tprotected computeReachSet(dfa: DFA, previous: SimulatorState, t: number, contextCache: PredictionContextCache): SimulatorState | undefined {\r\n\t\tif (this._input === undefined) {\r\n\t\t\tthrow new Error(\"Invalid state\");\r\n\t\t}\r\n\r\n\t\tlet reachState: SimulatorState | undefined = super.computeReachSet(dfa, previous, t, contextCache);\r\n\t\tif (reachState == null) {\r\n\t\t\t// no reach on current lookahead symbol. ERROR.\r\n\t\t\tthis.decisions[this.currentDecision].errors.push(\r\n\t\t\t\tnew ErrorInfo(this.currentDecision, previous, this._input, this._startIndex, this._input.index),\r\n\t\t\t);\r\n\t\t}\r\n\r\n\t\tthis.currentState = reachState;\r\n\t\treturn reachState;\r\n\t}\r\n\r\n\t@Override\r\n\tprotected getExistingTargetState(previousD: DFAState, t: number): DFAState | undefined {\r\n\t\tif (this.currentState === undefined || this._input === undefined) {\r\n\t\t\tthrow new Error(\"Invalid state\");\r\n\t\t}\r\n\r\n\t\t// this method is called after each time the input position advances\r\n\t\tif (this.currentState.useContext) {\r\n\t\t\tthis._llStopIndex = this._input.index;\r\n\t\t}\r\n\t\telse {\r\n\t\t\tthis._sllStopIndex = this._input.index;\r\n\t\t}\r\n\r\n\t\tlet existingTargetState: DFAState | undefined = super.getExistingTargetState(previousD, t);\r\n\t\tif (existingTargetState != null) {\r\n\t\t\t// this method is directly called by execDFA; must construct a SimulatorState\r\n\t\t\t// to represent the current state for this case\r\n\t\t\tthis.currentState = new SimulatorState(this.currentState.outerContext, existingTargetState, this.currentState.useContext, this.currentState.remainingOuterContext);\r\n\r\n\t\t\tif (this.currentState.useContext) {\r\n\t\t\t\tthis.decisions[this.currentDecision].LL_DFATransitions++;\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tthis.decisions[this.currentDecision].SLL_DFATransitions++; // count only if we transition over a DFA state\r\n\t\t\t}\r\n\r\n\t\t\tif (existingTargetState === ATNSimulator.ERROR) {\r\n\t\t\t\tlet state: SimulatorState = new SimulatorState(this.currentState.outerContext, previousD, this.currentState.useContext, this.currentState.remainingOuterContext);\r\n\t\t\t\tthis.decisions[this.currentDecision].errors.push(\r\n\t\t\t\t\tnew ErrorInfo(this.currentDecision, state, this._input, this._startIndex, this._input.index),\r\n\t\t\t\t);\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn existingTargetState;\r\n\t}\r\n\r\n\t@Override\r\n\tprotected computeTargetState(dfa: DFA, s: DFAState, remainingGlobalContext: ParserRuleContext, t: number, useContext: boolean, contextCache: PredictionContextCache): [DFAState, ParserRuleContext | undefined] {\r\n\t\tlet targetState: [DFAState, ParserRuleContext | undefined] = super.computeTargetState(dfa, s, remainingGlobalContext, t, useContext, contextCache);\r\n\r\n\t\tif (useContext) {\r\n\t\t\tthis.decisions[this.currentDecision].LL_ATNTransitions++;\r\n\t\t}\r\n\t\telse {\r\n\t\t\tthis.decisions[this.currentDecision].SLL_ATNTransitions++;\r\n\t\t}\r\n\r\n\t\treturn targetState;\r\n\t}\r\n\r\n\t@Override\r\n\tprotected evalSemanticContextImpl(pred: SemanticContext, parserCallStack: ParserRuleContext, alt: number): boolean {\r\n\t\tif (this.currentState === undefined || this._input === undefined) {\r\n\t\t\tthrow new Error(\"Invalid state\");\r\n\t\t}\r\n\r\n\t\tlet result: boolean = super.evalSemanticContextImpl(pred, parserCallStack, alt);\r\n\t\tif (!(pred instanceof SemanticContext.PrecedencePredicate)) {\r\n\t\t\tlet fullContext: boolean = this._llStopIndex >= 0;\r\n\t\t\tlet stopIndex: number = fullContext ? this._llStopIndex : this._sllStopIndex;\r\n\t\t\tthis.decisions[this.currentDecision].predicateEvals.push(\r\n\t\t\t\tnew PredicateEvalInfo(this.currentState, this.currentDecision, this._input, this._startIndex, stopIndex, pred, result, alt),\r\n\t\t\t);\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\t@Override\r\n\tprotected reportContextSensitivity(dfa: DFA, prediction: number, acceptState: SimulatorState, startIndex: number, stopIndex: number): void {\r\n\t\tif (this._input === undefined) {\r\n\t\t\tthrow new Error(\"Invalid state\");\r\n\t\t}\r\n\r\n\t\tif (prediction !== this.conflictingAltResolvedBySLL) {\r\n\t\t\tthis.decisions[this.currentDecision].contextSensitivities.push(\r\n\t\t\t\tnew ContextSensitivityInfo(this.currentDecision, acceptState, this._input, startIndex, stopIndex),\r\n\t\t\t);\r\n\t\t}\r\n\t\tsuper.reportContextSensitivity(dfa, prediction, acceptState, startIndex, stopIndex);\r\n\t}\r\n\r\n\t@Override\r\n\tprotected reportAttemptingFullContext(dfa: DFA, conflictingAlts: BitSet, conflictState: SimulatorState, startIndex: number, stopIndex: number): void {\r\n\t\tif (conflictingAlts != null) {\r\n\t\t\tthis.conflictingAltResolvedBySLL = conflictingAlts.nextSetBit(0);\r\n\t\t}\r\n\t\telse {\r\n\t\t\tthis.conflictingAltResolvedBySLL = conflictState.s0.configs.getRepresentedAlternatives().nextSetBit(0);\r\n\t\t}\r\n\t\tthis.decisions[this.currentDecision].LL_Fallback++;\r\n\t\tsuper.reportAttemptingFullContext(dfa, conflictingAlts, conflictState, startIndex, stopIndex);\r\n\t}\r\n\r\n\t@Override\r\n\tprotected reportAmbiguity(@NotNull dfa: DFA, D: DFAState, startIndex: number, stopIndex: number, exact: boolean, @NotNull ambigAlts: BitSet, @NotNull configs: ATNConfigSet): void {\r\n\t\tif (this.currentState === undefined || this._input === undefined) {\r\n\t\t\tthrow new Error(\"Invalid state\");\r\n\t\t}\r\n\r\n\t\tlet prediction: number;\r\n\t\tif (ambigAlts != null) {\r\n\t\t\tprediction = ambigAlts.nextSetBit(0);\r\n\t\t}\r\n\t\telse {\r\n\t\t\tprediction = configs.getRepresentedAlternatives().nextSetBit(0);\r\n\t\t}\r\n\t\tif (this.conflictingAltResolvedBySLL !== ATN.INVALID_ALT_NUMBER && prediction !== this.conflictingAltResolvedBySLL) {\r\n\t\t\t// Even though this is an ambiguity we are reporting, we can\r\n\t\t\t// still detect some context sensitivities. Both SLL and LL\r\n\t\t\t// are showing a conflict, hence an ambiguity, but if they resolve\r\n\t\t\t// to different minimum alternatives we have also identified a\r\n\t\t\t// context sensitivity.\r\n\t\t\tthis.decisions[this.currentDecision].contextSensitivities.push(\r\n\t\t\t\tnew ContextSensitivityInfo(this.currentDecision, this.currentState, this._input, startIndex, stopIndex),\r\n\t\t\t);\r\n\t\t}\r\n\t\tthis.decisions[this.currentDecision].ambiguities.push(\r\n\t\t\tnew AmbiguityInfo(this.currentDecision, this.currentState, ambigAlts, this._input, startIndex, stopIndex),\r\n\t\t);\r\n\t\tsuper.reportAmbiguity(dfa, D, startIndex, stopIndex, exact, ambigAlts, configs);\r\n\t}\r\n\r\n\t// ---------------------------------------------------------------------\r\n\r\n\tpublic getDecisionInfo(): DecisionInfo[] {\r\n\t\treturn this.decisions;\r\n\t}\r\n\r\n\tpublic getCurrentState(): SimulatorState | undefined {\r\n\t\treturn this.currentState;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:52.4399193-07:00\r\n\r\nimport * as assert from \"assert\";\r\nimport * as Utils from \"./misc/Utils\";\r\n\r\nimport { ANTLRErrorListener } from \"./ANTLRErrorListener\";\r\nimport { ANTLRErrorStrategy } from \"./ANTLRErrorStrategy\";\r\nimport { ATN } from \"./atn/ATN\";\r\nimport { ATNDeserializationOptions } from \"./atn/ATNDeserializationOptions\";\r\nimport { ATNDeserializer } from \"./atn/ATNDeserializer\";\r\nimport { ATNSimulator } from \"./atn/ATNSimulator\";\r\nimport { ATNState } from \"./atn/ATNState\";\r\nimport { DefaultErrorStrategy } from \"./DefaultErrorStrategy\";\r\nimport { DFA } from \"./dfa/DFA\";\r\nimport { ErrorNode } from \"./tree/ErrorNode\";\r\nimport { IntegerStack } from \"./misc/IntegerStack\";\r\nimport { IntervalSet } from \"./misc/IntervalSet\";\r\nimport { IntStream } from \"./IntStream\";\r\nimport { Lexer } from \"./Lexer\";\r\nimport { Override, NotNull, Nullable } from \"./Decorators\";\r\nimport { ParseInfo } from \"./atn/ParseInfo\";\r\nimport { ParserATNSimulator } from \"./atn/ParserATNSimulator\";\r\nimport { ParserErrorListener } from \"./ParserErrorListener\";\r\nimport { ParserRuleContext } from \"./ParserRuleContext\";\r\nimport { ParseTreeListener } from \"./tree/ParseTreeListener\";\r\nimport { ParseTreePattern } from \"./tree/pattern/ParseTreePattern\";\r\nimport { ProxyParserErrorListener } from \"./ProxyParserErrorListener\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { Recognizer } from \"./Recognizer\";\r\nimport { RuleContext } from \"./RuleContext\";\r\nimport { RuleTransition } from \"./atn/RuleTransition\";\r\nimport { TerminalNode } from \"./tree/TerminalNode\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenFactory } from \"./TokenFactory\";\r\nimport { TokenSource } from \"./TokenSource\";\r\nimport { TokenStream } from \"./TokenStream\";\r\n\r\nclass TraceListener implements ParseTreeListener {\r\n\tconstructor(private ruleNames: string[], private tokenStream: TokenStream) {\r\n\t}\r\n\r\n\t@Override\r\n\tpublic enterEveryRule(ctx: ParserRuleContext): void {\r\n\t\tconsole.log(\"enter \" + this.ruleNames[ctx.ruleIndex] +\r\n\t\t\t\", LT(1)=\" + this.tokenStream.LT(1).text);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic exitEveryRule(ctx: ParserRuleContext): void {\r\n\t\tconsole.log(\"exit \" + this.ruleNames[ctx.ruleIndex] +\r\n\t\t\t\", LT(1)=\" + this.tokenStream.LT(1).text);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic visitErrorNode(node: ErrorNode): void {\r\n\t\t// intentionally empty\r\n\t}\r\n\r\n\t@Override\r\n\tpublic visitTerminal(node: TerminalNode): void {\r\n\t\tlet parent = node.parent!.ruleContext;\r\n\t\tlet token: Token = node.symbol;\r\n\t\tconsole.log(\"consume \" + token + \" rule \" + this.ruleNames[parent.ruleIndex]);\r\n\t}\r\n}\r\n\r\n/** This is all the parsing support code essentially; most of it is error recovery stuff. */\r\nexport abstract class Parser extends Recognizer {\r\n\t/**\r\n\t * This field maps from the serialized ATN string to the deserialized {@link ATN} with\r\n\t * bypass alternatives.\r\n\t *\r\n\t * @see ATNDeserializationOptions.isGenerateRuleBypassTransitions\r\n\t */\r\n\tprivate static readonly bypassAltsAtnCache = new Map();\r\n\r\n\t/**\r\n\t * The error handling strategy for the parser. The default value is a new\r\n\t * instance of {@link DefaultErrorStrategy}.\r\n\t *\r\n\t * @see #getErrorHandler\r\n\t * @see #setErrorHandler\r\n\t */\r\n\t@NotNull\r\n\tprotected _errHandler: ANTLRErrorStrategy = new DefaultErrorStrategy();\r\n\r\n\t/**\r\n\t * The input stream.\r\n\t *\r\n\t * @see #getInputStream\r\n\t * @see #setInputStream\r\n\t */\r\n\tprotected _input!: TokenStream;\r\n\r\n\tprotected readonly _precedenceStack: IntegerStack = new IntegerStack();\r\n\r\n\t/**\r\n\t * The {@link ParserRuleContext} object for the currently executing rule.\r\n\t *\r\n\t * This is always non-undefined during the parsing process.\r\n\t */\r\n\tprotected _ctx!: ParserRuleContext;\r\n\r\n\t/**\r\n\t * Specifies whether or not the parser should construct a parse tree during\r\n\t * the parsing process. The default value is `true`.\r\n\t *\r\n\t * @see `buildParseTree`\r\n\t */\r\n\tprivate _buildParseTrees: boolean = true;\r\n\r\n\t/**\r\n\t * When {@link #setTrace}`(true)` is called, a reference to the\r\n\t * {@link TraceListener} is stored here so it can be easily removed in a\r\n\t * later call to {@link #setTrace}`(false)`. The listener itself is\r\n\t * implemented as a parser listener so this field is not directly used by\r\n\t * other parser methods.\r\n\t */\r\n\tprivate _tracer: TraceListener | undefined;\r\n\r\n\t/**\r\n\t * The list of {@link ParseTreeListener} listeners registered to receive\r\n\t * events during the parse.\r\n\t *\r\n\t * @see #addParseListener\r\n\t */\r\n\tprotected _parseListeners: ParseTreeListener[] = [];\r\n\r\n\t/**\r\n\t * The number of syntax errors reported during parsing. This value is\r\n\t * incremented each time {@link #notifyErrorListeners} is called.\r\n\t */\r\n\tprotected _syntaxErrors: number = 0;\r\n\r\n\t/** Indicates parser has match()ed EOF token. See {@link #exitRule()}. */\r\n\tprotected matchedEOF: boolean = false;\r\n\r\n\tconstructor(input: TokenStream) {\r\n\t\tsuper();\r\n\t\tthis._precedenceStack.push(0);\r\n\t\tthis.inputStream = input;\r\n\t}\r\n\r\n\t/** reset the parser's state */\r\n\tpublic reset(): void;\r\n\tpublic reset(resetInput: boolean): void;\r\n\tpublic reset(resetInput?: boolean): void {\r\n\t\t// Note: this method executes when not parsing, so _ctx can be undefined\r\n\t\tif (resetInput === undefined || resetInput) {\r\n\t\t\tthis.inputStream.seek(0);\r\n\t\t}\r\n\r\n\t\tthis._errHandler.reset(this);\r\n\t\tthis._ctx = undefined as any;\r\n\t\tthis._syntaxErrors = 0;\r\n\t\tthis.matchedEOF = false;\r\n\t\tthis.isTrace = false;\r\n\t\tthis._precedenceStack.clear();\r\n\t\tthis._precedenceStack.push(0);\r\n\t\tlet interpreter: ATNSimulator = this.interpreter;\r\n\t\tif (interpreter != null) {\r\n\t\t\tinterpreter.reset();\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Match current input symbol against `ttype`. If the symbol type\r\n\t * matches, {@link ANTLRErrorStrategy#reportMatch} and {@link #consume} are\r\n\t * called to complete the match process.\r\n\t *\r\n\t * If the symbol type does not match,\r\n\t * {@link ANTLRErrorStrategy#recoverInline} is called on the current error\r\n\t * strategy to attempt recovery. If {@link #getBuildParseTree} is\r\n\t * `true` and the token index of the symbol returned by\r\n\t * {@link ANTLRErrorStrategy#recoverInline} is -1, the symbol is added to\r\n\t * the parse tree by calling {@link #createErrorNode(ParserRuleContext, Token)} then\r\n\t * {@link ParserRuleContext#addErrorNode(ErrorNode)}.\r\n\t *\r\n\t * @param ttype the token type to match\r\n\t * @returns the matched symbol\r\n\t * @ if the current input symbol did not match\r\n\t * `ttype` and the error strategy could not recover from the\r\n\t * mismatched symbol\r\n\t */\r\n\t@NotNull\r\n\tpublic match(ttype: number): Token {\r\n\t\tlet t: Token = this.currentToken;\r\n\t\tif (t.type === ttype) {\r\n\t\t\tif (ttype === Token.EOF) {\r\n\t\t\t\tthis.matchedEOF = true;\r\n\t\t\t}\r\n\t\t\tthis._errHandler.reportMatch(this);\r\n\t\t\tthis.consume();\r\n\t\t}\r\n\t\telse {\r\n\t\t\tt = this._errHandler.recoverInline(this);\r\n\t\t\tif (this._buildParseTrees && t.tokenIndex === -1) {\r\n\t\t\t\t// we must have conjured up a new token during single token insertion\r\n\t\t\t\t// if it's not the current symbol\r\n\t\t\t\tthis._ctx.addErrorNode(this.createErrorNode(this._ctx, t));\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn t;\r\n\t}\r\n\r\n\t/**\r\n\t * Match current input symbol as a wildcard. If the symbol type matches\r\n\t * (i.e. has a value greater than 0), {@link ANTLRErrorStrategy#reportMatch}\r\n\t * and {@link #consume} are called to complete the match process.\r\n\t *\r\n\t * If the symbol type does not match,\r\n\t * {@link ANTLRErrorStrategy#recoverInline} is called on the current error\r\n\t * strategy to attempt recovery. If {@link #getBuildParseTree} is\r\n\t * `true` and the token index of the symbol returned by\r\n\t * {@link ANTLRErrorStrategy#recoverInline} is -1, the symbol is added to\r\n\t * the parse tree by calling {@link Parser#createErrorNode(ParserRuleContext, Token)} then\r\n\t * {@link ParserRuleContext#addErrorNode(ErrorNode)}.\r\n\t *\r\n\t * @returns the matched symbol\r\n\t * @ if the current input symbol did not match\r\n\t * a wildcard and the error strategy could not recover from the mismatched\r\n\t * symbol\r\n\t */\r\n\t@NotNull\r\n\tpublic matchWildcard(): Token {\r\n\t\tlet t: Token = this.currentToken;\r\n\t\tif (t.type > 0) {\r\n\t\t\tthis._errHandler.reportMatch(this);\r\n\t\t\tthis.consume();\r\n\t\t}\r\n\t\telse {\r\n\t\t\tt = this._errHandler.recoverInline(this);\r\n\t\t\tif (this._buildParseTrees && t.tokenIndex === -1) {\r\n\t\t\t\t// we must have conjured up a new token during single token insertion\r\n\t\t\t\t// if it's not the current symbol\r\n\t\t\t\tthis._ctx.addErrorNode(this.createErrorNode(this._ctx, t));\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn t;\r\n\t}\r\n\r\n\t/**\r\n\t * Track the {@link ParserRuleContext} objects during the parse and hook\r\n\t * them up using the {@link ParserRuleContext#children} list so that it\r\n\t * forms a parse tree. The {@link ParserRuleContext} returned from the start\r\n\t * rule represents the root of the parse tree.\r\n\t *\r\n\t * Note that if we are not building parse trees, rule contexts only point\r\n\t * upwards. When a rule exits, it returns the context but that gets garbage\r\n\t * collected if nobody holds a reference. It points upwards but nobody\r\n\t * points at it.\r\n\t *\r\n\t * When we build parse trees, we are adding all of these contexts to\r\n\t * {@link ParserRuleContext#children} list. Contexts are then not candidates\r\n\t * for garbage collection.\r\n\t */\r\n\tset buildParseTree(buildParseTrees: boolean) {\r\n\t\tthis._buildParseTrees = buildParseTrees;\r\n\t}\r\n\r\n\t/**\r\n\t * Gets whether or not a complete parse tree will be constructed while\r\n\t * parsing. This property is `true` for a newly constructed parser.\r\n\t *\r\n\t * @returns `true` if a complete parse tree will be constructed while\r\n\t * parsing, otherwise `false`\r\n\t */\r\n\tget buildParseTree(): boolean {\r\n\t\treturn this._buildParseTrees;\r\n\t}\r\n\r\n\t@NotNull\r\n\tpublic getParseListeners(): ParseTreeListener[] {\r\n\t\treturn this._parseListeners;\r\n\t}\r\n\r\n\t/**\r\n\t * Registers `listener` to receive events during the parsing process.\r\n\t *\r\n\t * To support output-preserving grammar transformations (including but not\r\n\t * limited to left-recursion removal, automated left-factoring, and\r\n\t * optimized code generation), calls to listener methods during the parse\r\n\t * may differ substantially from calls made by\r\n\t * {@link ParseTreeWalker#DEFAULT} used after the parse is complete. In\r\n\t * particular, rule entry and exit events may occur in a different order\r\n\t * during the parse than after the parser. In addition, calls to certain\r\n\t * rule entry methods may be omitted.\r\n\t *\r\n\t * With the following specific exceptions, calls to listener events are\r\n\t * *deterministic*, i.e. for identical input the calls to listener\r\n\t * methods will be the same.\r\n\t *\r\n\t * * Alterations to the grammar used to generate code may change the\r\n\t * behavior of the listener calls.\r\n\t * * Alterations to the command line options passed to ANTLR 4 when\r\n\t * generating the parser may change the behavior of the listener calls.\r\n\t * * Changing the version of the ANTLR Tool used to generate the parser\r\n\t * may change the behavior of the listener calls.\r\n\t *\r\n\t * @param listener the listener to add\r\n\t *\r\n\t * @throws {@link TypeError} if `listener` is `undefined`\r\n\t */\r\n\tpublic addParseListener(@NotNull listener: ParseTreeListener): void {\r\n\t\tif (listener == null) {\r\n\t\t\tthrow new TypeError(\"listener cannot be null\");\r\n\t\t}\r\n\r\n\t\tthis._parseListeners.push(listener);\r\n\t}\r\n\r\n\t/**\r\n\t * Remove `listener` from the list of parse listeners.\r\n\t *\r\n\t * If `listener` is `undefined` or has not been added as a parse\r\n\t * listener, this method does nothing.\r\n\t *\r\n\t * @see #addParseListener\r\n\t *\r\n\t * @param listener the listener to remove\r\n\t */\r\n\tpublic removeParseListener(listener: ParseTreeListener): void {\r\n\t\tlet index = this._parseListeners.findIndex((l) => l === listener);\r\n\t\tif (index !== -1) {\r\n\t\t\tthis._parseListeners.splice(index, 1);\r\n\t\t}\r\n\t}\r\n\r\n\r\n\t/**\r\n\t * Remove all parse listeners.\r\n\t *\r\n\t * @see #addParseListener\r\n\t */\r\n\tpublic removeParseListeners(): void {\r\n\t\tthis._parseListeners.length = 0;\r\n\t}\r\n\r\n\t/**\r\n\t * Notify any parse listeners of an enter rule event.\r\n\t *\r\n\t * @see #addParseListener\r\n\t */\r\n\tprotected triggerEnterRuleEvent(): void {\r\n\t\tfor (let listener of this._parseListeners) {\r\n\t\t\tif (listener.enterEveryRule) {\r\n\t\t\t\tlistener.enterEveryRule(this._ctx);\r\n\t\t\t}\r\n\r\n\t\t\tthis._ctx.enterRule(listener);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Notify any parse listeners of an exit rule event.\r\n\t *\r\n\t * @see #addParseListener\r\n\t */\r\n\tprotected triggerExitRuleEvent(): void {\r\n\t\t// reverse order walk of listeners\r\n\t\tfor (let i = this._parseListeners.length - 1; i >= 0; i--) {\r\n\t\t\tlet listener: ParseTreeListener = this._parseListeners[i];\r\n\t\t\tthis._ctx.exitRule(listener);\r\n\t\t\tif (listener.exitEveryRule) {\r\n\t\t\t\tlistener.exitEveryRule(this._ctx);\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the number of syntax errors reported during parsing. This value is\r\n\t * incremented each time {@link #notifyErrorListeners} is called.\r\n\t *\r\n\t * @see #notifyErrorListeners\r\n\t */\r\n\tget numberOfSyntaxErrors(): number {\r\n\t\treturn this._syntaxErrors;\r\n\t}\r\n\r\n\tget tokenFactory(): TokenFactory {\r\n\t\treturn this._input.tokenSource.tokenFactory;\r\n\t}\r\n\r\n\t/**\r\n\t * The ATN with bypass alternatives is expensive to create so we create it\r\n\t * lazily.\r\n\t *\r\n\t * @ if the current parser does not\r\n\t * implement the `serializedATN` property.\r\n\t */\r\n\t@NotNull\r\n\tpublic getATNWithBypassAlts(): ATN {\r\n\t\tlet serializedAtn: string = this.serializedATN;\r\n\t\tif (serializedAtn == null) {\r\n\t\t\tthrow new Error(\"The current parser does not support an ATN with bypass alternatives.\");\r\n\t\t}\r\n\r\n\t\tlet result = Parser.bypassAltsAtnCache.get(serializedAtn);\r\n\t\tif (result == null) {\r\n\t\t\tlet deserializationOptions: ATNDeserializationOptions = new ATNDeserializationOptions();\r\n\t\t\tdeserializationOptions.isGenerateRuleBypassTransitions = true;\r\n\t\t\tresult = new ATNDeserializer(deserializationOptions).deserialize(Utils.toCharArray(serializedAtn));\r\n\t\t\tParser.bypassAltsAtnCache.set(serializedAtn, result);\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\t/**\r\n\t * The preferred method of getting a tree pattern. For example, here's a\r\n\t * sample use:\r\n\t *\r\n\t * ```\r\n\t * let t: ParseTree = parser.expr();\r\n\t * let p: ParseTreePattern = await parser.compileParseTreePattern(\"+0\", MyParser.RULE_expr);\r\n\t * let m: ParseTreeMatch = p.match(t);\r\n\t * let id: string = m.get(\"ID\");\r\n\t * ```\r\n\t */\r\n\tpublic compileParseTreePattern(pattern: string, patternRuleIndex: number): Promise;\r\n\r\n\t/**\r\n\t * The same as {@link #compileParseTreePattern(String, int)} but specify a\r\n\t * {@link Lexer} rather than trying to deduce it from this parser.\r\n\t */\r\n\tpublic compileParseTreePattern(pattern: string, patternRuleIndex: number, lexer?: Lexer): Promise;\r\n\r\n\tpublic async compileParseTreePattern(pattern: string, patternRuleIndex: number, lexer?: Lexer): Promise {\r\n\t\tif (!lexer) {\r\n\t\t\tif (this.inputStream) {\r\n\t\t\t\tlet tokenSource = this.inputStream.tokenSource;\r\n\t\t\t\tif (tokenSource instanceof Lexer) {\r\n\t\t\t\t\tlexer = tokenSource;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\r\n\t\t\tif (!lexer) {\r\n\t\t\t\tthrow new Error(\"Parser can't discover a lexer to use\");\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tlet currentLexer = lexer;\r\n\t\tlet m = await import(\"./tree/pattern/ParseTreePatternMatcher\");\r\n\t\tlet matcher = new m.ParseTreePatternMatcher(currentLexer, this);\r\n\t\treturn matcher.compile(pattern, patternRuleIndex);\r\n\t}\r\n\r\n\t@NotNull\r\n\tget errorHandler(): ANTLRErrorStrategy {\r\n\t\treturn this._errHandler;\r\n\t}\r\n\r\n\tset errorHandler(@NotNull handler: ANTLRErrorStrategy) {\r\n\t\tthis._errHandler = handler;\r\n\t}\r\n\r\n\t@Override\r\n\tget inputStream(): TokenStream {\r\n\t\treturn this._input;\r\n\t}\r\n\r\n\t/** Set the token stream and reset the parser. */\r\n\tset inputStream(input: TokenStream) {\r\n\t\tthis.reset(false);\r\n\t\tthis._input = input;\r\n\t}\r\n\r\n\t/** Match needs to return the current input symbol, which gets put\r\n\t * into the label for the associated token ref; e.g., x=ID.\r\n\t */\r\n\t@NotNull\r\n\tget currentToken(): Token {\r\n\t\treturn this._input.LT(1);\r\n\t}\r\n\r\n\tpublic notifyErrorListeners(/*@NotNull*/ msg: string): void;\r\n\tpublic notifyErrorListeners(/*@NotNull*/ msg: string, /*@NotNull*/ offendingToken: Token | null, e: RecognitionException | undefined): void;\r\n\r\n\tpublic notifyErrorListeners(msg: string, offendingToken?: Token | null, e?: RecognitionException | undefined): void {\r\n\t\tif (offendingToken === undefined) {\r\n\t\t\toffendingToken = this.currentToken;\r\n\t\t} else if (offendingToken === null) {\r\n\t\t\toffendingToken = undefined;\r\n\t\t}\r\n\r\n\t\tthis._syntaxErrors++;\r\n\t\tlet line: number = -1;\r\n\t\tlet charPositionInLine: number = -1;\r\n\t\tif (offendingToken != null) {\r\n\t\t\tline = offendingToken.line;\r\n\t\t\tcharPositionInLine = offendingToken.charPositionInLine;\r\n\t\t}\r\n\r\n\t\tlet listener = this.getErrorListenerDispatch();\r\n\t\tif (listener.syntaxError) {\r\n\t\t\tlistener.syntaxError(this, offendingToken, line, charPositionInLine, msg, e);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Consume and return the [current symbol](`currentToken`).\r\n\t *\r\n\t * E.g., given the following input with `A` being the current\r\n\t * lookahead symbol, this function moves the cursor to `B` and returns\r\n\t * `A`.\r\n\t *\r\n\t * ```\r\n\t * A B\r\n\t * ^\r\n\t * ```\r\n\t *\r\n\t * If the parser is not in error recovery mode, the consumed symbol is added\r\n\t * to the parse tree using {@link ParserRuleContext#addChild(TerminalNode)}, and\r\n\t * {@link ParseTreeListener#visitTerminal} is called on any parse listeners.\r\n\t * If the parser *is* in error recovery mode, the consumed symbol is\r\n\t * added to the parse tree using {@link #createErrorNode(ParserRuleContext, Token)} then\r\n\t * {@link ParserRuleContext#addErrorNode(ErrorNode)} and\r\n\t * {@link ParseTreeListener#visitErrorNode} is called on any parse\r\n\t * listeners.\r\n\t */\r\n\tpublic consume(): Token {\r\n\t\tlet o: Token = this.currentToken;\r\n\t\tif (o.type !== Parser.EOF) {\r\n\t\t\tthis.inputStream.consume();\r\n\t\t}\r\n\t\tlet hasListener: boolean = this._parseListeners.length !== 0;\r\n\t\tif (this._buildParseTrees || hasListener) {\r\n\t\t\tif (this._errHandler.inErrorRecoveryMode(this)) {\r\n\t\t\t\tlet node: ErrorNode = this._ctx.addErrorNode(this.createErrorNode(this._ctx, o));\r\n\t\t\t\tif (hasListener) {\r\n\t\t\t\t\tfor (let listener of this._parseListeners) {\r\n\t\t\t\t\t\tif (listener.visitErrorNode) {\r\n\t\t\t\t\t\t\tlistener.visitErrorNode(node);\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\tlet node: TerminalNode = this.createTerminalNode(this._ctx, o);\r\n\t\t\t\tthis._ctx.addChild(node);\r\n\t\t\t\tif (hasListener) {\r\n\t\t\t\t\tfor (let listener of this._parseListeners) {\r\n\t\t\t\t\t\tif (listener.visitTerminal) {\r\n\t\t\t\t\t\t\tlistener.visitTerminal(node);\r\n\t\t\t\t\t\t}\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn o;\r\n\t}\r\n\r\n\t/**\r\n\t * How to create a token leaf node associated with a parent.\r\n\t * Typically, the terminal node to create is not a function of the parent.\r\n\t *\r\n\t * @since 4.7\r\n\t */\r\n\tpublic createTerminalNode(parent: ParserRuleContext, t: Token): TerminalNode {\r\n\t\treturn new TerminalNode(t);\r\n\t}\r\n\r\n\t/**\r\n\t * How to create an error node, given a token, associated with a parent.\r\n\t * Typically, the error node to create is not a function of the parent.\r\n\t *\r\n\t * @since 4.7\r\n\t */\r\n\tpublic createErrorNode(parent: ParserRuleContext, t: Token): ErrorNode {\r\n\t\treturn new ErrorNode(t);\r\n\t}\r\n\r\n\tprotected addContextToParseTree(): void {\r\n\t\tlet parent = this._ctx._parent as ParserRuleContext | undefined;\r\n\t\t// add current context to parent if we have a parent\r\n\t\tif (parent != null) {\r\n\t\t\tparent.addChild(this._ctx);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Always called by generated parsers upon entry to a rule. Access field\r\n\t * {@link #_ctx} get the current context.\r\n\t */\r\n\tpublic enterRule(@NotNull localctx: ParserRuleContext, state: number, ruleIndex: number): void {\r\n\t\tthis.state = state;\r\n\t\tthis._ctx = localctx;\r\n\t\tthis._ctx._start = this._input.LT(1);\r\n\t\tif (this._buildParseTrees) {\r\n\t\t\tthis.addContextToParseTree();\r\n\t\t}\r\n\t\tthis.triggerEnterRuleEvent();\r\n\t}\r\n\r\n\tpublic enterLeftFactoredRule(localctx: ParserRuleContext, state: number, ruleIndex: number): void {\r\n\t\tthis.state = state;\r\n\t\tif (this._buildParseTrees) {\r\n\t\t\tlet factoredContext = this._ctx.getChild(this._ctx.childCount - 1) as ParserRuleContext;\r\n\t\t\tthis._ctx.removeLastChild();\r\n\t\t\tfactoredContext._parent = localctx;\r\n\t\t\tlocalctx.addChild(factoredContext);\r\n\t\t}\r\n\r\n\t\tthis._ctx = localctx;\r\n\t\tthis._ctx._start = this._input.LT(1);\r\n\t\tif (this._buildParseTrees) {\r\n\t\t\tthis.addContextToParseTree();\r\n\t\t}\r\n\r\n\t\tthis.triggerEnterRuleEvent();\r\n\t}\r\n\r\n\tpublic exitRule(): void {\r\n\t\tif (this.matchedEOF) {\r\n\t\t\t// if we have matched EOF, it cannot consume past EOF so we use LT(1) here\r\n\t\t\tthis._ctx._stop = this._input.LT(1); // LT(1) will be end of file\r\n\t\t}\r\n\t\telse {\r\n\t\t\tthis._ctx._stop = this._input.tryLT(-1); // stop node is what we just matched\r\n\t\t}\r\n\t\t// trigger event on _ctx, before it reverts to parent\r\n\t\tthis.triggerExitRuleEvent();\r\n\t\tthis.state = this._ctx.invokingState;\r\n\t\tthis._ctx = this._ctx._parent as ParserRuleContext;\r\n\t}\r\n\r\n\tpublic enterOuterAlt(localctx: ParserRuleContext, altNum: number): void {\r\n\t\tlocalctx.altNumber = altNum;\r\n\t\t// if we have new localctx, make sure we replace existing ctx\r\n\t\t// that is previous child of parse tree\r\n\t\tif (this._buildParseTrees && this._ctx !== localctx) {\r\n\t\t\tlet parent = this._ctx._parent as ParserRuleContext | undefined;\r\n\t\t\tif (parent != null) {\r\n\t\t\t\tparent.removeLastChild();\r\n\t\t\t\tparent.addChild(localctx);\r\n\t\t\t}\r\n\t\t}\r\n\t\tthis._ctx = localctx;\r\n\t}\r\n\r\n\t/**\r\n\t * Get the precedence level for the top-most precedence rule.\r\n\t *\r\n\t * @returns The precedence level for the top-most precedence rule, or -1 if\r\n\t * the parser context is not nested within a precedence rule.\r\n\t */\r\n\tget precedence(): number {\r\n\t\tif (this._precedenceStack.isEmpty) {\r\n\t\t\treturn -1;\r\n\t\t}\r\n\r\n\t\treturn this._precedenceStack.peek();\r\n\t}\r\n\r\n\tpublic enterRecursionRule(localctx: ParserRuleContext, state: number, ruleIndex: number, precedence: number): void {\r\n\t\tthis.state = state;\r\n\t\tthis._precedenceStack.push(precedence);\r\n\t\tthis._ctx = localctx;\r\n\t\tthis._ctx._start = this._input.LT(1);\r\n\t\tthis.triggerEnterRuleEvent(); // simulates rule entry for left-recursive rules\r\n\t}\r\n\r\n\t/** Like {@link #enterRule} but for recursive rules.\r\n\t * Make the current context the child of the incoming localctx.\r\n\t */\r\n\tpublic pushNewRecursionContext(localctx: ParserRuleContext, state: number, ruleIndex: number): void {\r\n\t\tlet previous: ParserRuleContext = this._ctx;\r\n\t\tprevious._parent = localctx;\r\n\t\tprevious.invokingState = state;\r\n\t\tprevious._stop = this._input.tryLT(-1);\r\n\r\n\t\tthis._ctx = localctx;\r\n\t\tthis._ctx._start = previous._start;\r\n\t\tif (this._buildParseTrees) {\r\n\t\t\tthis._ctx.addChild(previous);\r\n\t\t}\r\n\r\n\t\tthis.triggerEnterRuleEvent(); // simulates rule entry for left-recursive rules\r\n\t}\r\n\r\n\tpublic unrollRecursionContexts(_parentctx: ParserRuleContext): void {\r\n\t\tthis._precedenceStack.pop();\r\n\t\tthis._ctx._stop = this._input.tryLT(-1);\r\n\t\tlet retctx: ParserRuleContext = this._ctx; // save current ctx (return value)\r\n\r\n\t\t// unroll so _ctx is as it was before call to recursive method\r\n\t\tif (this._parseListeners.length > 0) {\r\n\t\t\twhile (this._ctx !== _parentctx) {\r\n\t\t\t\tthis.triggerExitRuleEvent();\r\n\t\t\t\tthis._ctx = this._ctx._parent as ParserRuleContext;\r\n\t\t\t}\r\n\t\t}\r\n\t\telse {\r\n\t\t\tthis._ctx = _parentctx;\r\n\t\t}\r\n\r\n\t\t// hook into tree\r\n\t\tretctx._parent = _parentctx;\r\n\r\n\t\tif (this._buildParseTrees && _parentctx != null) {\r\n\t\t\t// add return ctx into invoking rule's tree\r\n\t\t\t_parentctx.addChild(retctx);\r\n\t\t}\r\n\t}\r\n\r\n\tpublic getInvokingContext(ruleIndex: number): ParserRuleContext | undefined {\r\n\t\tlet p = this._ctx;\r\n\t\twhile (p && p.ruleIndex !== ruleIndex) {\r\n\t\t\tp = p._parent as ParserRuleContext;\r\n\t\t}\r\n\t\treturn p;\r\n\t}\r\n\r\n\tget context(): ParserRuleContext {\r\n\t\treturn this._ctx;\r\n\t}\r\n\r\n\tset context(ctx: ParserRuleContext) {\r\n\t\tthis._ctx = ctx;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic precpred(@Nullable localctx: RuleContext, precedence: number): boolean {\r\n\t\treturn precedence >= this._precedenceStack.peek();\r\n\t}\r\n\r\n\t@Override\r\n\tpublic getErrorListenerDispatch(): ParserErrorListener {\r\n\t\treturn new ProxyParserErrorListener(this.getErrorListeners());\r\n\t}\r\n\r\n\tpublic inContext(context: string): boolean {\r\n\t\t// TODO: useful in parser?\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/**\r\n\t * Checks whether or not `symbol` can follow the current state in the\r\n\t * ATN. The behavior of this method is equivalent to the following, but is\r\n\t * implemented such that the complete context-sensitive follow set does not\r\n\t * need to be explicitly constructed.\r\n\t *\r\n\t * ```\r\n\t * return getExpectedTokens().contains(symbol);\r\n\t * ```\r\n\t *\r\n\t * @param symbol the symbol type to check\r\n\t * @returns `true` if `symbol` can follow the current state in\r\n\t * the ATN, otherwise `false`.\r\n\t */\r\n\tpublic isExpectedToken(symbol: number): boolean {\r\n// \t\treturn interpreter.atn.nextTokens(_ctx);\r\n\t\tlet atn: ATN = this.interpreter.atn;\r\n\t\tlet ctx: ParserRuleContext = this._ctx;\r\n\t\tlet s: ATNState = atn.states[this.state];\r\n\t\tlet following: IntervalSet = atn.nextTokens(s);\r\n\t\tif (following.contains(symbol)) {\r\n\t\t\treturn true;\r\n\t\t}\r\n// System.out.println(\"following \"+s+\"=\"+following);\r\n\t\tif (!following.contains(Token.EPSILON)) {\r\n\t\t\treturn false;\r\n\t\t}\r\n\r\n\t\twhile (ctx != null && ctx.invokingState >= 0 && following.contains(Token.EPSILON)) {\r\n\t\t\tlet invokingState: ATNState = atn.states[ctx.invokingState];\r\n\t\t\tlet rt = invokingState.transition(0) as RuleTransition;\r\n\t\t\tfollowing = atn.nextTokens(rt.followState);\r\n\t\t\tif (following.contains(symbol)) {\r\n\t\t\t\treturn true;\r\n\t\t\t}\r\n\r\n\t\t\tctx = ctx._parent as ParserRuleContext;\r\n\t\t}\r\n\r\n\t\tif (following.contains(Token.EPSILON) && symbol === Token.EOF) {\r\n\t\t\treturn true;\r\n\t\t}\r\n\r\n\t\treturn false;\r\n\t}\r\n\r\n\tget isMatchedEOF(): boolean {\r\n\t\treturn this.matchedEOF;\r\n\t}\r\n\r\n\t/**\r\n\t * Computes the set of input symbols which could follow the current parser\r\n\t * state and context, as given by {@link #getState} and {@link #getContext},\r\n\t * respectively.\r\n\t *\r\n\t * @see ATN#getExpectedTokens(int, RuleContext)\r\n\t */\r\n\t@NotNull\r\n\tpublic getExpectedTokens(): IntervalSet {\r\n\t\treturn this.atn.getExpectedTokens(this.state, this.context);\r\n\t}\r\n\r\n\t@NotNull\r\n\tpublic getExpectedTokensWithinCurrentRule(): IntervalSet {\r\n\t\tlet atn: ATN = this.interpreter.atn;\r\n\t\tlet s: ATNState = atn.states[this.state];\r\n\t\treturn atn.nextTokens(s);\r\n\t}\r\n\r\n\t/** Get a rule's index (i.e., `RULE_ruleName` field) or -1 if not found. */\r\n\tpublic getRuleIndex(ruleName: string): number {\r\n\t\tlet ruleIndex = this.getRuleIndexMap().get(ruleName);\r\n\t\tif (ruleIndex != null) {\r\n\t\t\treturn ruleIndex;\r\n\t\t}\r\n\t\treturn -1;\r\n\t}\r\n\r\n\tget ruleContext(): ParserRuleContext { return this._ctx; }\r\n\r\n\t/** Return List<String> of the rule names in your parser instance\r\n\t * leading up to a call to the current rule. You could override if\r\n\t * you want more details such as the file/line info of where\r\n\t * in the ATN a rule is invoked.\r\n\t *\r\n\t * This is very useful for error messages.\r\n\t */\r\n\r\n\tpublic getRuleInvocationStack(ctx: RuleContext = this._ctx): string[] {\r\n\t\tlet p: RuleContext | undefined = ctx; \t\t// Workaround for Microsoft/TypeScript#14487\r\n\t\tlet ruleNames: string[] = this.ruleNames;\r\n\t\tlet stack: string[] = [];\r\n\t\twhile (p != null) {\r\n\t\t\t// compute what follows who invoked us\r\n\t\t\tlet ruleIndex: number = p.ruleIndex;\r\n\t\t\tif (ruleIndex < 0) {\r\n\t\t\t\tstack.push(\"n/a\");\r\n\t\t\t} else {\r\n\t\t\t\tstack.push(ruleNames[ruleIndex]);\r\n\t\t\t}\r\n\t\t\tp = p._parent as RuleContext;\r\n\t\t}\r\n\t\treturn stack;\r\n\t}\r\n\r\n\t/** For debugging and other purposes. */\r\n\tpublic getDFAStrings(): string[] {\r\n\t\tlet s: string[] = [];\r\n\t\tfor (let dfa of this._interp.atn.decisionToDFA) {\r\n\t\t\ts.push(dfa.toString(this.vocabulary, this.ruleNames));\r\n\t\t}\r\n\t\treturn s;\r\n\t}\r\n\r\n\t/** For debugging and other purposes. */\r\n\tpublic dumpDFA(): void {\r\n\t\tlet seenOne: boolean = false;\r\n\t\tfor (let dfa of this._interp.atn.decisionToDFA) {\r\n\t\t\tif (!dfa.isEmpty) {\r\n\t\t\t\tif (seenOne) {\r\n\t\t\t\t\tconsole.log();\r\n\t\t\t\t}\r\n\t\t\t\tconsole.log(\"Decision \" + dfa.decision + \":\");\r\n\t\t\t\tprocess.stdout.write(dfa.toString(this.vocabulary, this.ruleNames));\r\n\t\t\t\tseenOne = true;\r\n\t\t\t}\r\n\t\t}\r\n\t}\r\n\r\n\tget sourceName(): string {\r\n\t\treturn this._input.sourceName;\r\n\t}\r\n\r\n\t@Override\r\n\tget parseInfo(): Promise {\r\n\t\treturn import(\"./atn/ProfilingATNSimulator\").then((m) => {\r\n\t\t\tlet interp: ParserATNSimulator = this.interpreter;\r\n\t\t\tif (interp instanceof m.ProfilingATNSimulator) {\r\n\t\t\t\treturn new ParseInfo(interp);\r\n\t\t\t}\r\n\r\n\t\t\treturn undefined;\r\n\t\t});\r\n\t}\r\n\r\n\t/**\r\n\t * @since 4.3\r\n\t */\r\n\tpublic async setProfile(profile: boolean): Promise {\r\n\t\tlet m = await import(\"./atn/ProfilingATNSimulator\");\r\n\t\tlet interp: ParserATNSimulator = this.interpreter;\r\n\t\tif (profile) {\r\n\t\t\tif (!(interp instanceof m.ProfilingATNSimulator)) {\r\n\t\t\t\tthis.interpreter = new m.ProfilingATNSimulator(this);\r\n\t\t\t}\r\n\t\t} else if (interp instanceof m.ProfilingATNSimulator) {\r\n\t\t\tthis.interpreter = new ParserATNSimulator(this.atn, this);\r\n\t\t}\r\n\r\n\t\tthis.interpreter.setPredictionMode(interp.getPredictionMode());\r\n\t}\r\n\r\n\t/** During a parse is sometimes useful to listen in on the rule entry and exit\r\n\t * events as well as token matches. This is for quick and dirty debugging.\r\n\t */\r\n\tset isTrace(trace: boolean) {\r\n\t\tif (!trace) {\r\n\t\t\tif (this._tracer) {\r\n\t\t\t\tthis.removeParseListener(this._tracer);\r\n\t\t\t\tthis._tracer = undefined;\r\n\t\t\t}\r\n\t\t}\r\n\t\telse {\r\n\t\t\tif (this._tracer) {\r\n\t\t\t\tthis.removeParseListener(this._tracer);\r\n\t\t\t} else {\r\n\t\t\t\tthis._tracer = new TraceListener(this.ruleNames, this._input);\r\n\t\t\t}\r\n\r\n\t\t\tthis.addParseListener(this._tracer);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * Gets whether a {@link TraceListener} is registered as a parse listener\r\n\t * for the parser.\r\n\t */\r\n\tget isTrace(): boolean {\r\n\t\treturn this._tracer != null;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:52.3255548-07:00\r\n\r\nimport { ATNConfigSet } from \"./atn/ATNConfigSet\";\r\nimport { Parser } from \"./Parser\";\r\nimport { ParserRuleContext } from \"./ParserRuleContext\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { Recognizer } from \"./Recognizer\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenStream } from \"./TokenStream\";\r\nimport { IntStream } from \"./IntStream\";\r\nimport { NotNull } from \"./Decorators\";\r\n\r\n/** Indicates that the parser could not decide which of two or more paths\r\n * to take based upon the remaining input. It tracks the starting token\r\n * of the offending input and also knows where the parser was\r\n * in the various paths when the error. Reported by reportNoViableAlternative()\r\n */\r\nexport class NoViableAltException extends RecognitionException {\r\n\t//private static serialVersionUID: number = 5096000008992867052L;\r\n\r\n\t/** Which configurations did we try at input.index that couldn't match input.LT(1)? */\r\n\tprivate _deadEndConfigs?: ATNConfigSet;\r\n\r\n\t/** The token object at the start index; the input stream might\r\n\t * \tnot be buffering tokens so get a reference to it. (At the\r\n\t * time the error occurred, of course the stream needs to keep a\r\n\t * buffer all of the tokens but later we might not have access to those.)\r\n\t */\r\n\t@NotNull\r\n\tprivate _startToken: Token;\r\n\r\n\tconstructor(/*@NotNull*/ recognizer: Parser);\r\n\tconstructor(\r\n\t\t/*@NotNull*/\r\n\t\trecognizer: Recognizer,\r\n\t\t/*@NotNull*/\r\n\t\tinput: TokenStream,\r\n\t\t/*@NotNull*/\r\n\t\tstartToken: Token,\r\n\t\t/*@NotNull*/\r\n\t\toffendingToken: Token,\r\n\t\tdeadEndConfigs: ATNConfigSet | undefined,\r\n\t\t/*@NotNull*/\r\n\t\tctx: ParserRuleContext);\r\n\r\n\tconstructor(\r\n\t\trecognizer: Recognizer,\r\n\t\tinput?: TokenStream,\r\n\t\tstartToken?: Token,\r\n\t\toffendingToken?: Token,\r\n\t\tdeadEndConfigs?: ATNConfigSet,\r\n\t\tctx?: ParserRuleContext) {\r\n\t\tif (recognizer instanceof Parser) {\r\n\t\t\tif (input === undefined) {\r\n\t\t\t\tinput = recognizer.inputStream;\r\n\t\t\t}\r\n\r\n\t\t\tif (startToken === undefined) {\r\n\t\t\t\tstartToken = recognizer.currentToken;\r\n\t\t\t}\r\n\r\n\t\t\tif (offendingToken === undefined) {\r\n\t\t\t\toffendingToken = recognizer.currentToken;\r\n\t\t\t}\r\n\r\n\t\t\tif (ctx === undefined) {\r\n\t\t\t\tctx = recognizer.context;\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\tsuper(recognizer, input, ctx);\r\n\t\tthis._deadEndConfigs = deadEndConfigs;\r\n\t\tthis._startToken = startToken as Token;\r\n\t\tthis.setOffendingToken(recognizer, offendingToken);\r\n\t}\r\n\r\n\tget startToken(): Token {\r\n\t\treturn this._startToken;\r\n\t}\r\n\r\n\tget deadEndConfigs(): ATNConfigSet | undefined {\r\n\t\treturn this._deadEndConfigs;\r\n\t}\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:50.8290527-07:00\r\n\r\nimport { ANTLRErrorStrategy } from \"./ANTLRErrorStrategy\";\r\nimport { ATN } from \"./atn/ATN\";\r\nimport { ATNState } from \"./atn/ATNState\";\r\nimport { ATNStateType } from \"./atn/ATNStateType\";\r\nimport { FailedPredicateException } from \"./FailedPredicateException\";\r\nimport { InputMismatchException } from \"./InputMismatchException\";\r\nimport { IntervalSet } from \"./misc/IntervalSet\";\r\nimport { NoViableAltException } from \"./NoViableAltException\";\r\nimport { Parser } from \"./Parser\";\r\nimport { ParserRuleContext } from \"./ParserRuleContext\";\r\nimport { PredictionContext } from \"./atn/PredictionContext\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { RuleContext } from \"./RuleContext\";\r\nimport { RuleTransition } from \"./atn/RuleTransition\";\r\nimport { TokenStream } from \"./TokenStream\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenFactory } from \"./TokenFactory\";\r\nimport { TokenSource } from \"./TokenSource\";\r\nimport { Vocabulary } from \"./Vocabulary\";\r\nimport { Override, NotNull } from \"./Decorators\";\r\n\r\n/**\r\n * This is the default implementation of {@link ANTLRErrorStrategy} used for\r\n * error reporting and recovery in ANTLR parsers.\r\n */\r\nexport class DefaultErrorStrategy implements ANTLRErrorStrategy {\r\n\t/**\r\n\t * Indicates whether the error strategy is currently \"recovering from an\r\n\t * error\". This is used to suppress reporting multiple error messages while\r\n\t * attempting to recover from a detected syntax error.\r\n\t *\r\n\t * @see #inErrorRecoveryMode\r\n\t */\r\n\tprotected errorRecoveryMode: boolean = false;\r\n\r\n\t/** The index into the input stream where the last error occurred.\r\n\t * \tThis is used to prevent infinite loops where an error is found\r\n\t * but no token is consumed during recovery...another error is found,\r\n\t * ad nauseum. This is a failsafe mechanism to guarantee that at least\r\n\t * one token/tree node is consumed for two errors.\r\n\t */\r\n\tprotected lastErrorIndex: number = -1;\r\n\r\n\tprotected lastErrorStates?: IntervalSet;\r\n\r\n\t/**\r\n\t * This field is used to propagate information about the lookahead following\r\n\t * the previous match. Since prediction prefers completing the current rule\r\n\t * to error recovery efforts, error reporting may occur later than the\r\n\t * original point where it was discoverable. The original context is used to\r\n\t * compute the true expected sets as though the reporting occurred as early\r\n\t * as possible.\r\n\t */\r\n\tprotected nextTokensContext?: ParserRuleContext;\r\n\r\n\t/**\r\n\t * @see #nextTokensContext\r\n\t */\r\n\tprotected nextTokensState: number = ATNState.INVALID_STATE_NUMBER;\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The default implementation simply calls {@link #endErrorCondition} to\r\n\t * ensure that the handler is not in error recovery mode.\r\n\t */\r\n\t@Override\r\n\tpublic reset(recognizer: Parser): void {\r\n\t\tthis.endErrorCondition(recognizer);\r\n\t}\r\n\r\n\t/**\r\n\t * This method is called to enter error recovery mode when a recognition\r\n\t * exception is reported.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t */\r\n\tprotected beginErrorCondition(@NotNull recognizer: Parser): void {\r\n\t\tthis.errorRecoveryMode = true;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t */\r\n\t@Override\r\n\tpublic inErrorRecoveryMode(recognizer: Parser): boolean {\r\n\t\treturn this.errorRecoveryMode;\r\n\t}\r\n\r\n\t/**\r\n\t * This method is called to leave error recovery mode after recovering from\r\n\t * a recognition exception.\r\n\t *\r\n\t * @param recognizer\r\n\t */\r\n\tprotected endErrorCondition(@NotNull recognizer: Parser): void {\r\n\t\tthis.errorRecoveryMode = false;\r\n\t\tthis.lastErrorStates = undefined;\r\n\t\tthis.lastErrorIndex = -1;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The default implementation simply calls {@link #endErrorCondition}.\r\n\t */\r\n\t@Override\r\n\tpublic reportMatch(recognizer: Parser): void {\r\n\t\tthis.endErrorCondition(recognizer);\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The default implementation returns immediately if the handler is already\r\n\t * in error recovery mode. Otherwise, it calls {@link #beginErrorCondition}\r\n\t * and dispatches the reporting task based on the runtime type of `e`\r\n\t * according to the following table.\r\n\t *\r\n\t * * {@link NoViableAltException}: Dispatches the call to\r\n\t * {@link #reportNoViableAlternative}\r\n\t * * {@link InputMismatchException}: Dispatches the call to\r\n\t * {@link #reportInputMismatch}\r\n\t * * {@link FailedPredicateException}: Dispatches the call to\r\n\t * {@link #reportFailedPredicate}\r\n\t * * All other types: calls {@link Parser#notifyErrorListeners} to report\r\n\t * the exception\r\n\t */\r\n\t@Override\r\n\tpublic reportError(\r\n\t\trecognizer: Parser,\r\n\t\te: RecognitionException): void {\r\n\t\t// if we've already reported an error and have not matched a token\r\n\t\t// yet successfully, don't report any errors.\r\n\t\tif (this.inErrorRecoveryMode(recognizer)) {\r\n//\t\t\tSystem.err.print(\"[SPURIOUS] \");\r\n\t\t\treturn; // don't report spurious errors\r\n\t\t}\r\n\t\tthis.beginErrorCondition(recognizer);\r\n\t\tif (e instanceof NoViableAltException) {\r\n\t\t\tthis.reportNoViableAlternative(recognizer, e);\r\n\t\t}\r\n\t\telse if (e instanceof InputMismatchException) {\r\n\t\t\tthis.reportInputMismatch(recognizer, e);\r\n\t\t}\r\n\t\telse if (e instanceof FailedPredicateException) {\r\n\t\t\tthis.reportFailedPredicate(recognizer, e);\r\n\t\t}\r\n\t\telse {\r\n\t\t\tconsole.error(`unknown recognition error type: ${e}`);\r\n\t\t\tthis.notifyErrorListeners(recognizer, e.toString(), e);\r\n\t\t}\r\n\t}\r\n\r\n\tprotected notifyErrorListeners(@NotNull recognizer: Parser, message: string, e: RecognitionException): void {\r\n\t\tlet offendingToken: Token | null | undefined = e.getOffendingToken(recognizer);\r\n\t\tif (offendingToken === undefined) {\r\n\t\t\t// Pass null to notifyErrorListeners so it in turn calls the error listeners with undefined as the offending\r\n\t\t\t// token. If we passed undefined, it would instead call the listeners with currentToken from the parser.\r\n\t\t\toffendingToken = null;\r\n\t\t}\r\n\r\n\t\trecognizer.notifyErrorListeners(message, offendingToken, e);\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The default implementation resynchronizes the parser by consuming tokens\r\n\t * until we find one in the resynchronization set--loosely the set of tokens\r\n\t * that can follow the current rule.\r\n\t */\r\n\t@Override\r\n\tpublic recover(recognizer: Parser, e: RecognitionException): void {\r\n//\t\tSystem.out.println(\"recover in \"+recognizer.getRuleInvocationStack()+\r\n//\t\t\t\t\t\t \" index=\"+recognizer.inputStream.index+\r\n//\t\t\t\t\t\t \", lastErrorIndex=\"+\r\n//\t\t\t\t\t\t lastErrorIndex+\r\n//\t\t\t\t\t\t \", states=\"+lastErrorStates);\r\n\t\tif (this.lastErrorIndex === recognizer.inputStream.index &&\r\n\t\t\tthis.lastErrorStates &&\r\n\t\t\tthis.lastErrorStates.contains(recognizer.state)) {\r\n\t\t\t// uh oh, another error at same token index and previously-visited\r\n\t\t\t// state in ATN; must be a case where LT(1) is in the recovery\r\n\t\t\t// token set so nothing got consumed. Consume a single token\r\n\t\t\t// at least to prevent an infinite loop; this is a failsafe.\r\n//\t\t\tSystem.err.println(\"seen error condition before index=\"+\r\n//\t\t\t\t\t\t\t lastErrorIndex+\", states=\"+lastErrorStates);\r\n//\t\t\tSystem.err.println(\"FAILSAFE consumes \"+recognizer.getTokenNames()[recognizer.inputStream.LA(1)]);\r\n\t\t\trecognizer.consume();\r\n\t\t}\r\n\t\tthis.lastErrorIndex = recognizer.inputStream.index;\r\n\t\tif (!this.lastErrorStates) {\r\n\t\t\tthis.lastErrorStates = new IntervalSet();\r\n\t\t}\r\n\t\tthis.lastErrorStates.add(recognizer.state);\r\n\t\tlet followSet: IntervalSet = this.getErrorRecoverySet(recognizer);\r\n\t\tthis.consumeUntil(recognizer, followSet);\r\n\t}\r\n\r\n\t/**\r\n\t * The default implementation of {@link ANTLRErrorStrategy#sync} makes sure\r\n\t * that the current lookahead symbol is consistent with what were expecting\r\n\t * at this point in the ATN. You can call this anytime but ANTLR only\r\n\t * generates code to check before subrules/loops and each iteration.\r\n\t *\r\n\t * Implements Jim Idle's magic sync mechanism in closures and optional\r\n\t * subrules. E.g.,\r\n\t *\r\n\t * ```antlr\r\n\t * a : sync ( stuff sync )* ;\r\n\t * sync : {consume to what can follow sync} ;\r\n\t * ```\r\n\t *\r\n\t * At the start of a sub rule upon error, {@link #sync} performs single\r\n\t * token deletion, if possible. If it can't do that, it bails on the current\r\n\t * rule and uses the default error recovery, which consumes until the\r\n\t * resynchronization set of the current rule.\r\n\t *\r\n\t * If the sub rule is optional (`(...)?`, `(...)*`, or block\r\n\t * with an empty alternative), then the expected set includes what follows\r\n\t * the subrule.\r\n\t *\r\n\t * During loop iteration, it consumes until it sees a token that can start a\r\n\t * sub rule or what follows loop. Yes, that is pretty aggressive. We opt to\r\n\t * stay in the loop as long as possible.\r\n\t *\r\n\t * **ORIGINS**\r\n\t *\r\n\t * Previous versions of ANTLR did a poor job of their recovery within loops.\r\n\t * A single mismatch token or missing token would force the parser to bail\r\n\t * out of the entire rules surrounding the loop. So, for rule\r\n\t *\r\n\t * ```antlr\r\n\t * classDef : 'class' ID '{' member* '}'\r\n\t * ```\r\n\t *\r\n\t * input with an extra token between members would force the parser to\r\n\t * consume until it found the next class definition rather than the next\r\n\t * member definition of the current class.\r\n\t *\r\n\t * This functionality cost a little bit of effort because the parser has to\r\n\t * compare token set at the start of the loop and at each iteration. If for\r\n\t * some reason speed is suffering for you, you can turn off this\r\n\t * functionality by simply overriding this method as a blank { }.\r\n\t */\r\n\t@Override\r\n\tpublic sync(recognizer: Parser): void {\r\n\t\tlet s: ATNState = recognizer.interpreter.atn.states[recognizer.state];\r\n//\t\tSystem.err.println(\"sync @ \"+s.stateNumber+\"=\"+s.getClass().getSimpleName());\r\n\t\t// If already recovering, don't try to sync\r\n\t\tif (this.inErrorRecoveryMode(recognizer)) {\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tlet tokens: TokenStream = recognizer.inputStream;\r\n\t\tlet la: number = tokens.LA(1);\r\n\r\n\t\t// try cheaper subset first; might get lucky. seems to shave a wee bit off\r\n\t\tlet nextTokens: IntervalSet = recognizer.atn.nextTokens(s);\r\n\t\tif (nextTokens.contains(la)) {\r\n\t\t\t// We are sure the token matches\r\n\t\t\tthis.nextTokensContext = undefined;\r\n\t\t\tthis.nextTokensState = ATNState.INVALID_STATE_NUMBER;\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tif (nextTokens.contains(Token.EPSILON)) {\r\n\t\t\tif (this.nextTokensContext === undefined) {\r\n\t\t\t\t// It's possible the next token won't match; information tracked\r\n\t\t\t\t// by sync is restricted for performance.\r\n\t\t\t\tthis.nextTokensContext = recognizer.context;\r\n\t\t\t\tthis.nextTokensState = recognizer.state;\r\n\t\t\t}\r\n\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tswitch (s.stateType) {\r\n\t\tcase ATNStateType.BLOCK_START:\r\n\t\tcase ATNStateType.STAR_BLOCK_START:\r\n\t\tcase ATNStateType.PLUS_BLOCK_START:\r\n\t\tcase ATNStateType.STAR_LOOP_ENTRY:\r\n\t\t\t// report error and recover if possible\r\n\t\t\tif (this.singleTokenDeletion(recognizer)) {\r\n\t\t\t\treturn;\r\n\t\t\t}\r\n\r\n\t\t\tthrow new InputMismatchException(recognizer);\r\n\r\n\t\tcase ATNStateType.PLUS_LOOP_BACK:\r\n\t\tcase ATNStateType.STAR_LOOP_BACK:\r\n//\t\t\tSystem.err.println(\"at loop back: \"+s.getClass().getSimpleName());\r\n\t\t\tthis.reportUnwantedToken(recognizer);\r\n\t\t\tlet expecting: IntervalSet = recognizer.getExpectedTokens();\r\n\t\t\tlet whatFollowsLoopIterationOrRule: IntervalSet =\r\n\t\t\t\texpecting.or(this.getErrorRecoverySet(recognizer));\r\n\t\t\tthis.consumeUntil(recognizer, whatFollowsLoopIterationOrRule);\r\n\t\t\tbreak;\r\n\r\n\t\tdefault:\r\n\t\t\t// do nothing if we can't identify the exact kind of ATN state\r\n\t\t\tbreak;\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * This is called by {@link #reportError} when the exception is a\r\n\t * {@link NoViableAltException}.\r\n\t *\r\n\t * @see #reportError\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @param e the recognition exception\r\n\t */\r\n\tprotected reportNoViableAlternative(\r\n\t\t@NotNull recognizer: Parser,\r\n\t\t@NotNull e: NoViableAltException): void {\r\n\t\tlet tokens: TokenStream = recognizer.inputStream;\r\n\t\tlet input: string;\r\n\t\tif (tokens) {\r\n\t\t\tif (e.startToken.type === Token.EOF) {\r\n\t\t\t\tinput = \"\";\r\n\t\t\t} else {\r\n\t\t\t\tinput = tokens.getTextFromRange(e.startToken, e.getOffendingToken());\r\n\t\t\t}\r\n\t\t}\r\n\t\telse {\r\n\t\t\tinput = \"\";\r\n\t\t}\r\n\t\tlet msg: string = \"no viable alternative at input \" + this.escapeWSAndQuote(input);\r\n\t\tthis.notifyErrorListeners(recognizer, msg, e);\r\n\t}\r\n\r\n\t/**\r\n\t * This is called by {@link #reportError} when the exception is an\r\n\t * {@link InputMismatchException}.\r\n\t *\r\n\t * @see #reportError\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @param e the recognition exception\r\n\t */\r\n\tprotected reportInputMismatch(\r\n\t\t@NotNull recognizer: Parser,\r\n\t\t@NotNull e: InputMismatchException): void {\r\n\t\tlet expected = e.expectedTokens;\r\n\t\tlet expectedString = expected ? expected.toStringVocabulary(recognizer.vocabulary) : \"\";\r\n\t\tlet msg: string = \"mismatched input \" + this.getTokenErrorDisplay(e.getOffendingToken(recognizer)) +\r\n\t\t\t\" expecting \" + expectedString;\r\n\t\tthis.notifyErrorListeners(recognizer, msg, e);\r\n\t}\r\n\r\n\t/**\r\n\t * This is called by {@link #reportError} when the exception is a\r\n\t * {@link FailedPredicateException}.\r\n\t *\r\n\t * @see #reportError\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @param e the recognition exception\r\n\t */\r\n\tprotected reportFailedPredicate(\r\n\t\t@NotNull recognizer: Parser,\r\n\t\t@NotNull e: FailedPredicateException): void {\r\n\t\tlet ruleName: string = recognizer.ruleNames[recognizer.context.ruleIndex];\r\n\t\tlet msg: string = \"rule \" + ruleName + \" \" + e.message;\r\n\t\tthis.notifyErrorListeners(recognizer, msg, e);\r\n\t}\r\n\r\n\t/**\r\n\t * This method is called to report a syntax error which requires the removal\r\n\t * of a token from the input stream. At the time this method is called, the\r\n\t * erroneous symbol is current `LT(1)` symbol and has not yet been\r\n\t * removed from the input stream. When this method returns,\r\n\t * `recognizer` is in error recovery mode.\r\n\t *\r\n\t * This method is called when {@link #singleTokenDeletion} identifies\r\n\t * single-token deletion as a viable recovery strategy for a mismatched\r\n\t * input error.\r\n\t *\r\n\t * The default implementation simply returns if the handler is already in\r\n\t * error recovery mode. Otherwise, it calls {@link #beginErrorCondition} to\r\n\t * enter error recovery mode, followed by calling\r\n\t * {@link Parser#notifyErrorListeners}.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t */\r\n\tprotected reportUnwantedToken(@NotNull recognizer: Parser): void {\r\n\t\tif (this.inErrorRecoveryMode(recognizer)) {\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tthis.beginErrorCondition(recognizer);\r\n\r\n\t\tlet t: Token = recognizer.currentToken;\r\n\t\tlet tokenName: string = this.getTokenErrorDisplay(t);\r\n\t\tlet expecting: IntervalSet = this.getExpectedTokens(recognizer);\r\n\t\tlet msg: string = \"extraneous input \" + tokenName + \" expecting \" +\r\n\t\t\texpecting.toStringVocabulary(recognizer.vocabulary);\r\n\t\trecognizer.notifyErrorListeners(msg, t, undefined);\r\n\t}\r\n\r\n\t/**\r\n\t * This method is called to report a syntax error which requires the\r\n\t * insertion of a missing token into the input stream. At the time this\r\n\t * method is called, the missing token has not yet been inserted. When this\r\n\t * method returns, `recognizer` is in error recovery mode.\r\n\t *\r\n\t * This method is called when {@link #singleTokenInsertion} identifies\r\n\t * single-token insertion as a viable recovery strategy for a mismatched\r\n\t * input error.\r\n\t *\r\n\t * The default implementation simply returns if the handler is already in\r\n\t * error recovery mode. Otherwise, it calls {@link #beginErrorCondition} to\r\n\t * enter error recovery mode, followed by calling\r\n\t * {@link Parser#notifyErrorListeners}.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t */\r\n\tprotected reportMissingToken(@NotNull recognizer: Parser): void {\r\n\t\tif (this.inErrorRecoveryMode(recognizer)) {\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tthis.beginErrorCondition(recognizer);\r\n\r\n\t\tlet t: Token = recognizer.currentToken;\r\n\t\tlet expecting: IntervalSet = this.getExpectedTokens(recognizer);\r\n\t\tlet msg: string = \"missing \" + expecting.toStringVocabulary(recognizer.vocabulary) +\r\n\t\t\t\" at \" + this.getTokenErrorDisplay(t);\r\n\r\n\t\trecognizer.notifyErrorListeners(msg, t, undefined);\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The default implementation attempts to recover from the mismatched input\r\n\t * by using single token insertion and deletion as described below. If the\r\n\t * recovery attempt fails, this method\r\n\t * {@link InputMismatchException}.\r\n\t *\r\n\t * **EXTRA TOKEN** (single token deletion)\r\n\t *\r\n\t * `LA(1)` is not what we are looking for. If `LA(2)` has the\r\n\t * right token, however, then assume `LA(1)` is some extra spurious\r\n\t * token and delete it. Then consume and return the next token (which was\r\n\t * the `LA(2)` token) as the successful result of the match operation.\r\n\t *\r\n\t * This recovery strategy is implemented by {@link #singleTokenDeletion}.\r\n\t *\r\n\t * **MISSING TOKEN** (single token insertion)\r\n\t *\r\n\t * If current token (at `LA(1)`) is consistent with what could come\r\n\t * after the expected `LA(1)` token, then assume the token is missing\r\n\t * and use the parser's {@link TokenFactory} to create it on the fly. The\r\n\t * \"insertion\" is performed by returning the created token as the successful\r\n\t * result of the match operation.\r\n\t *\r\n\t * This recovery strategy is implemented by {@link #singleTokenInsertion}.\r\n\t *\r\n\t * **EXAMPLE**\r\n\t *\r\n\t * For example, Input `i=(3;` is clearly missing the `')'`. When\r\n\t * the parser returns from the nested call to `expr`, it will have\r\n\t * call chain:\r\n\t *\r\n\t * ```\r\n\t * stat \u2192 expr \u2192 atom\r\n\t * ```\r\n\t *\r\n\t * and it will be trying to match the `')'` at this point in the\r\n\t * derivation:\r\n\t *\r\n\t * ```\r\n\t * => ID '=' '(' INT ')' ('+' atom)* ';'\r\n\t * ^\r\n\t * ```\r\n\t *\r\n\t * The attempt to match `')'` will fail when it sees `';'` and\r\n\t * call {@link #recoverInline}. To recover, it sees that `LA(1)==';'`\r\n\t * is in the set of tokens that can follow the `')'` token reference\r\n\t * in rule `atom`. It can assume that you forgot the `')'`.\r\n\t */\r\n\t@Override\r\n\tpublic recoverInline(recognizer: Parser): Token {\r\n\t\t// SINGLE TOKEN DELETION\r\n\t\tlet matchedSymbol = this.singleTokenDeletion(recognizer);\r\n\t\tif (matchedSymbol) {\r\n\t\t\t// we have deleted the extra token.\r\n\t\t\t// now, move past ttype token as if all were ok\r\n\t\t\trecognizer.consume();\r\n\t\t\treturn matchedSymbol;\r\n\t\t}\r\n\r\n\t\t// SINGLE TOKEN INSERTION\r\n\t\tif (this.singleTokenInsertion(recognizer)) {\r\n\t\t\treturn this.getMissingSymbol(recognizer);\r\n\t\t}\r\n\r\n\t\t// even that didn't work; must throw the exception\r\n\t\tif (this.nextTokensContext === undefined) {\r\n\t\t\tthrow new InputMismatchException(recognizer);\r\n\t\t} else {\r\n\t\t\tthrow new InputMismatchException(recognizer, this.nextTokensState, this.nextTokensContext);\r\n\t\t}\r\n\t}\r\n\r\n\t/**\r\n\t * This method implements the single-token insertion inline error recovery\r\n\t * strategy. It is called by {@link #recoverInline} if the single-token\r\n\t * deletion strategy fails to recover from the mismatched input. If this\r\n\t * method returns `true`, `recognizer` will be in error recovery\r\n\t * mode.\r\n\t *\r\n\t * This method determines whether or not single-token insertion is viable by\r\n\t * checking if the `LA(1)` input symbol could be successfully matched\r\n\t * if it were instead the `LA(2)` symbol. If this method returns\r\n\t * `true`, the caller is responsible for creating and inserting a\r\n\t * token with the correct type to produce this behavior.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @returns `true` if single-token insertion is a viable recovery\r\n\t * strategy for the current mismatched input, otherwise `false`\r\n\t */\r\n\tprotected singleTokenInsertion(@NotNull recognizer: Parser): boolean {\r\n\t\tlet currentSymbolType: number = recognizer.inputStream.LA(1);\r\n\t\t// if current token is consistent with what could come after current\r\n\t\t// ATN state, then we know we're missing a token; error recovery\r\n\t\t// is free to conjure up and insert the missing token\r\n\t\tlet currentState = recognizer.interpreter.atn.states[recognizer.state];\r\n\t\tlet next: ATNState = currentState.transition(0).target;\r\n\t\tlet atn: ATN = recognizer.interpreter.atn;\r\n\t\tlet expectingAtLL2: IntervalSet = atn.nextTokens(next, PredictionContext.fromRuleContext(atn, recognizer.context));\r\n//\t\tconsole.warn(\"LT(2) set=\"+expectingAtLL2.toString(recognizer.getTokenNames()));\r\n\t\tif (expectingAtLL2.contains(currentSymbolType)) {\r\n\t\t\tthis.reportMissingToken(recognizer);\r\n\t\t\treturn true;\r\n\t\t}\r\n\t\treturn false;\r\n\t}\r\n\r\n\t/**\r\n\t * This method implements the single-token deletion inline error recovery\r\n\t * strategy. It is called by {@link #recoverInline} to attempt to recover\r\n\t * from mismatched input. If this method returns `undefined`, the parser and error\r\n\t * handler state will not have changed. If this method returns non-`undefined`,\r\n\t * `recognizer` will *not* be in error recovery mode since the\r\n\t * returned token was a successful match.\r\n\t *\r\n\t * If the single-token deletion is successful, this method calls\r\n\t * {@link #reportUnwantedToken} to report the error, followed by\r\n\t * {@link Parser#consume} to actually \"delete\" the extraneous token. Then,\r\n\t * before returning {@link #reportMatch} is called to signal a successful\r\n\t * match.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @returns the successfully matched {@link Token} instance if single-token\r\n\t * deletion successfully recovers from the mismatched input, otherwise\r\n\t * `undefined`\r\n\t */\r\n\tprotected singleTokenDeletion(@NotNull recognizer: Parser): Token | undefined {\r\n\t\tlet nextTokenType: number = recognizer.inputStream.LA(2);\r\n\t\tlet expecting: IntervalSet = this.getExpectedTokens(recognizer);\r\n\t\tif (expecting.contains(nextTokenType)) {\r\n\t\t\tthis.reportUnwantedToken(recognizer);\r\n\t\t\t/*\r\n\t\t\tSystem.err.println(\"recoverFromMismatchedToken deleting \"+\r\n\t\t\t\t\t\t\t ((TokenStream)recognizer.inputStream).LT(1)+\r\n\t\t\t\t\t\t\t \" since \"+((TokenStream)recognizer.inputStream).LT(2)+\r\n\t\t\t\t\t\t\t \" is what we want\");\r\n\t\t\t*/\r\n\t\t\trecognizer.consume(); // simply delete extra token\r\n\t\t\t// we want to return the token we're actually matching\r\n\t\t\tlet matchedSymbol: Token = recognizer.currentToken;\r\n\t\t\tthis.reportMatch(recognizer); // we know current token is correct\r\n\t\t\treturn matchedSymbol;\r\n\t\t}\r\n\t\treturn undefined;\r\n\t}\r\n\r\n\t/** Conjure up a missing token during error recovery.\r\n\t *\r\n\t * The recognizer attempts to recover from single missing\r\n\t * symbols. But, actions might refer to that missing symbol.\r\n\t * For example, x=ID {f($x);}. The action clearly assumes\r\n\t * that there has been an identifier matched previously and that\r\n\t * $x points at that token. If that token is missing, but\r\n\t * the next token in the stream is what we want we assume that\r\n\t * this token is missing and we keep going. Because we\r\n\t * have to return some token to replace the missing token,\r\n\t * we have to conjure one up. This method gives the user control\r\n\t * over the tokens returned for missing tokens. Mostly,\r\n\t * you will want to create something special for identifier\r\n\t * tokens. For literals such as '{' and ',', the default\r\n\t * action in the parser or tree parser works. It simply creates\r\n\t * a CommonToken of the appropriate type. The text will be the token.\r\n\t * If you change what tokens must be created by the lexer,\r\n\t * override this method to create the appropriate tokens.\r\n\t */\r\n\t@NotNull\r\n\tprotected getMissingSymbol(@NotNull recognizer: Parser): Token {\r\n\t\tlet currentSymbol: Token = recognizer.currentToken;\r\n\t\tlet expecting: IntervalSet = this.getExpectedTokens(recognizer);\r\n\t\tlet expectedTokenType: number = Token.INVALID_TYPE;\r\n\t\tif (!expecting.isNil) {\r\n\t\t\t// get any element\r\n\t\t\texpectedTokenType = expecting.minElement;\r\n\t\t}\r\n\r\n\t\tlet tokenText: string;\r\n\t\tif (expectedTokenType === Token.EOF) {\r\n\t\t\ttokenText = \"\";\r\n\t\t} else {\r\n\t\t\ttokenText = \"\";\r\n\t\t}\r\n\t\tlet current: Token = currentSymbol;\r\n\t\tlet lookback = recognizer.inputStream.tryLT(-1);\r\n\t\tif (current.type === Token.EOF && lookback != null) {\r\n\t\t\tcurrent = lookback;\r\n\t\t}\r\n\r\n\t\treturn this.constructToken(recognizer.inputStream.tokenSource, expectedTokenType, tokenText, current);\r\n\t}\r\n\r\n\tprotected constructToken(\r\n\t\ttokenSource: TokenSource,\r\n\t\texpectedTokenType: number,\r\n\t\ttokenText: string,\r\n\t\tcurrent: Token): Token {\r\n\t\tlet factory: TokenFactory = tokenSource.tokenFactory;\r\n\t\tlet x = current.tokenSource;\r\n\t\tlet stream = x ? x.inputStream : undefined;\r\n\r\n\t\treturn factory.create(\r\n\t\t\t{ source: tokenSource, stream },\r\n\t\t\texpectedTokenType, tokenText,\r\n\t\t\tToken.DEFAULT_CHANNEL,\r\n\t\t\t-1, -1,\r\n\t\t\tcurrent.line, current.charPositionInLine);\r\n\t}\r\n\r\n\t@NotNull\r\n\tprotected getExpectedTokens(@NotNull recognizer: Parser): IntervalSet {\r\n\t\treturn recognizer.getExpectedTokens();\r\n\t}\r\n\r\n\t/** How should a token be displayed in an error message? The default\r\n\t * is to display just the text, but during development you might\r\n\t * want to have a lot of information spit out. Override in that case\r\n\t * to use t.toString() (which, for CommonToken, dumps everything about\r\n\t * the token). This is better than forcing you to override a method in\r\n\t * your token objects because you don't have to go modify your lexer\r\n\t * so that it creates a new Java type.\r\n\t */\r\n\tprotected getTokenErrorDisplay(t: Token | undefined): string {\r\n\t\tif (!t) {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\t\tlet s = this.getSymbolText(t);\r\n\t\tif (!s) {\r\n\t\t\tif (this.getSymbolType(t) === Token.EOF) {\r\n\t\t\t\ts = \"\";\r\n\t\t\t} else {\r\n\t\t\t\ts = `<${this.getSymbolType(t)}>`;\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn this.escapeWSAndQuote(s);\r\n\t}\r\n\r\n\tprotected getSymbolText(@NotNull symbol: Token): string | undefined {\r\n\t\treturn symbol.text;\r\n\t}\r\n\r\n\tprotected getSymbolType(@NotNull symbol: Token): number {\r\n\t\treturn symbol.type;\r\n\t}\r\n\r\n\t@NotNull\r\n\tprotected escapeWSAndQuote(@NotNull s: string): string {\r\n//\t\tif ( s==null ) return s;\r\n\t\ts = s.replace(\"\\n\", \"\\\\n\");\r\n\t\ts = s.replace(\"\\r\", \"\\\\r\");\r\n\t\ts = s.replace(\"\\t\", \"\\\\t\");\r\n\t\treturn \"'\" + s + \"'\";\r\n\t}\r\n\r\n\t/* Compute the error recovery set for the current rule. During\r\n\t * rule invocation, the parser pushes the set of tokens that can\r\n\t * follow that rule reference on the stack; this amounts to\r\n\t * computing FIRST of what follows the rule reference in the\r\n\t * enclosing rule. See LinearApproximator.FIRST().\r\n\t * This local follow set only includes tokens\r\n\t * from within the rule; i.e., the FIRST computation done by\r\n\t * ANTLR stops at the end of a rule.\r\n\t *\r\n\t * EXAMPLE\r\n\t *\r\n\t * When you find a \"no viable alt exception\", the input is not\r\n\t * consistent with any of the alternatives for rule r. The best\r\n\t * thing to do is to consume tokens until you see something that\r\n\t * can legally follow a call to r *or* any rule that called r.\r\n\t * You don't want the exact set of viable next tokens because the\r\n\t * input might just be missing a token--you might consume the\r\n\t * rest of the input looking for one of the missing tokens.\r\n\t *\r\n\t * Consider grammar:\r\n\t *\r\n\t * a : '[' b ']'\r\n\t * | '(' b ')'\r\n\t * ;\r\n\t * b : c '^' INT ;\r\n\t * c : ID\r\n\t * | INT\r\n\t * ;\r\n\t *\r\n\t * At each rule invocation, the set of tokens that could follow\r\n\t * that rule is pushed on a stack. Here are the various\r\n\t * context-sensitive follow sets:\r\n\t *\r\n\t * FOLLOW(b1_in_a) = FIRST(']') = ']'\r\n\t * FOLLOW(b2_in_a) = FIRST(')') = ')'\r\n\t * FOLLOW(c_in_b) = FIRST('^') = '^'\r\n\t *\r\n\t * Upon erroneous input \"[]\", the call chain is\r\n\t *\r\n\t * a -> b -> c\r\n\t *\r\n\t * and, hence, the follow context stack is:\r\n\t *\r\n\t * depth follow set start of rule execution\r\n\t * 0 a (from main())\r\n\t * 1 ']' b\r\n\t * 2 '^' c\r\n\t *\r\n\t * Notice that ')' is not included, because b would have to have\r\n\t * been called from a different context in rule a for ')' to be\r\n\t * included.\r\n\t *\r\n\t * For error recovery, we cannot consider FOLLOW(c)\r\n\t * (context-sensitive or otherwise). We need the combined set of\r\n\t * all context-sensitive FOLLOW sets--the set of all tokens that\r\n\t * could follow any reference in the call chain. We need to\r\n\t * resync to one of those tokens. Note that FOLLOW(c)='^' and if\r\n\t * we resync'd to that token, we'd consume until EOF. We need to\r\n\t * sync to context-sensitive FOLLOWs for a, b, and c: {']','^'}.\r\n\t * In this case, for input \"[]\", LA(1) is ']' and in the set, so we would\r\n\t * not consume anything. After printing an error, rule c would\r\n\t * return normally. Rule b would not find the required '^' though.\r\n\t * At this point, it gets a mismatched token error and\r\n\t * exception (since LA(1) is not in the viable following token\r\n\t * set). The rule exception handler tries to recover, but finds\r\n\t * the same recovery set and doesn't consume anything. Rule b\r\n\t * exits normally returning to rule a. Now it finds the ']' (and\r\n\t * with the successful match exits errorRecovery mode).\r\n\t *\r\n\t * So, you can see that the parser walks up the call chain looking\r\n\t * for the token that was a member of the recovery set.\r\n\t *\r\n\t * Errors are not generated in errorRecovery mode.\r\n\t *\r\n\t * ANTLR's error recovery mechanism is based upon original ideas:\r\n\t *\r\n\t * \"Algorithms + Data Structures = Programs\" by Niklaus Wirth\r\n\t *\r\n\t * and\r\n\t *\r\n\t * \"A note on error recovery in recursive descent parsers\":\r\n\t * http://portal.acm.org/citation.cfm?id=947902.947905\r\n\t *\r\n\t * Later, Josef Grosch had some good ideas:\r\n\t *\r\n\t * \"Efficient and Comfortable Error Recovery in Recursive Descent\r\n\t * Parsers\":\r\n\t * ftp://www.cocolab.com/products/cocktail/doca4.ps/ell.ps.zip\r\n\t *\r\n\t * Like Grosch I implement context-sensitive FOLLOW sets that are combined\r\n\t * at run-time upon error to avoid overhead during parsing.\r\n\t */\r\n\t@NotNull\r\n\tprotected getErrorRecoverySet(@NotNull recognizer: Parser): IntervalSet {\r\n\t\tlet atn: ATN = recognizer.interpreter.atn;\r\n\t\tlet ctx: RuleContext | undefined = recognizer.context;\r\n\t\tlet recoverSet: IntervalSet = new IntervalSet();\r\n\t\twhile (ctx && ctx.invokingState >= 0) {\r\n\t\t\t// compute what follows who invoked us\r\n\t\t\tlet invokingState: ATNState = atn.states[ctx.invokingState];\r\n\t\t\tlet rt = invokingState.transition(0) as RuleTransition;\r\n\t\t\tlet follow: IntervalSet = atn.nextTokens(rt.followState);\r\n\t\t\trecoverSet.addAll(follow);\r\n\t\t\tctx = ctx._parent;\r\n\t\t}\r\n\t\trecoverSet.remove(Token.EPSILON);\r\n//\t\tSystem.out.println(\"recover set \"+recoverSet.toString(recognizer.getTokenNames()));\r\n\t\treturn recoverSet;\r\n\t}\r\n\r\n\t/** Consume tokens until one matches the given token set. */\r\n\tprotected consumeUntil(@NotNull recognizer: Parser, @NotNull set: IntervalSet): void {\r\n//\t\tSystem.err.println(\"consumeUntil(\"+set.toString(recognizer.getTokenNames())+\")\");\r\n\t\tlet ttype: number = recognizer.inputStream.LA(1);\r\n\t\twhile (ttype !== Token.EOF && !set.contains(ttype)) {\r\n\t\t\t//System.out.println(\"consume during recover LA(1)=\"+getTokenNames()[input.LA(1)]);\r\n//\t\t\trecognizer.inputStream.consume();\r\n\t\t\trecognizer.consume();\r\n\t\t\tttype = recognizer.inputStream.LA(1);\r\n\t\t}\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:49.2855056-07:00\r\n\r\nimport { DefaultErrorStrategy } from \"./DefaultErrorStrategy\";\r\nimport { Parser } from \"./Parser\";\r\nimport { InputMismatchException } from \"./InputMismatchException\";\r\nimport { Override } from \"./Decorators\";\r\nimport { ParseCancellationException } from \"./misc/ParseCancellationException\";\r\nimport { ParserRuleContext } from \"./ParserRuleContext\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { Token } from \"./Token\";\r\n\r\n/**\r\n * This implementation of {@link ANTLRErrorStrategy} responds to syntax errors\r\n * by immediately canceling the parse operation with a\r\n * {@link ParseCancellationException}. The implementation ensures that the\r\n * {@link ParserRuleContext#exception} field is set for all parse tree nodes\r\n * that were not completed prior to encountering the error.\r\n *\r\n * This error strategy is useful in the following scenarios.\r\n *\r\n * * **Two-stage parsing:** This error strategy allows the first\r\n * stage of two-stage parsing to immediately terminate if an error is\r\n * encountered, and immediately fall back to the second stage. In addition to\r\n * avoiding wasted work by attempting to recover from errors here, the empty\r\n * implementation of {@link BailErrorStrategy#sync} improves the performance of\r\n * the first stage.\r\n * * **Silent validation:** When syntax errors are not being\r\n * reported or logged, and the parse result is simply ignored if errors occur,\r\n * the {@link BailErrorStrategy} avoids wasting work on recovering from errors\r\n * when the result will be ignored either way.\r\n *\r\n * ```\r\n * myparser.errorHandler = new BailErrorStrategy();\r\n * ```\r\n *\r\n * @see Parser.errorHandler\r\n */\r\nexport class BailErrorStrategy extends DefaultErrorStrategy {\r\n\t/** Instead of recovering from exception `e`, re-throw it wrapped\r\n\t * in a {@link ParseCancellationException} so it is not caught by the\r\n\t * rule function catches. Use {@link Exception#getCause()} to get the\r\n\t * original {@link RecognitionException}.\r\n\t */\r\n\t@Override\r\n\tpublic recover(recognizer: Parser, e: RecognitionException): void {\r\n\t\tfor (let context: ParserRuleContext | undefined = recognizer.context; context; context = context.parent) {\r\n\t\t\tcontext.exception = e;\r\n\t\t}\r\n\r\n\t\tthrow new ParseCancellationException(e);\r\n\t}\r\n\r\n\t/** Make sure we don't attempt to recover inline; if the parser\r\n\t * successfully recovers, it won't throw an exception.\r\n\t */\r\n\t@Override\r\n\tpublic recoverInline(recognizer: Parser): Token {\r\n\t\tlet e = new InputMismatchException(recognizer);\r\n\t\tfor (let context: ParserRuleContext | undefined = recognizer.context; context; context = context.parent) {\r\n\t\t\tcontext.exception = e;\r\n\t\t}\r\n\r\n\t\tthrow new ParseCancellationException(e);\r\n\t}\r\n\r\n\t/** Make sure we don't attempt to recover from problems in subrules. */\r\n\t@Override\r\n\tpublic sync(recognizer: Parser): void {\r\n\t\t// intentionally empty\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:50.0659297-07:00\r\n\r\nimport { Interval } from \"./misc/Interval\";\r\nimport { IntStream } from \"./IntStream\";\r\n\r\n/** A source of characters for an ANTLR lexer. */\r\nexport interface CharStream extends IntStream {\r\n\t/**\r\n\t * This method returns the text for a range of characters within this input\r\n\t * stream. This method is guaranteed to not throw an exception if the\r\n\t * specified `interval` lies entirely within a marked range. For more\r\n\t * information about marked ranges, see {@link IntStream#mark}.\r\n\t *\r\n\t * @param interval an interval within the stream\r\n\t * @returns the text of the specified interval\r\n\t *\r\n\t * @throws NullPointerException if `interval` is `undefined`\r\n\t * @throws IllegalArgumentException if `interval.a < 0`, or if\r\n\t * `interval.b < interval.a - 1`, or if `interval.b` lies at or\r\n\t * past the end of the stream\r\n\t * @throws UnsupportedOperationException if the stream does not support\r\n\t * getting the text of the specified interval\r\n\t */\r\n\t//@NotNull\r\n\tgetText(/*@NotNull*/ interval: Interval): string;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:51.1349829-07:00\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport enum Dependents {\r\n\r\n\t/**\r\n\t * The element is dependent upon the specified rule.\r\n\t */\r\n\tSELF,\r\n\t/**\r\n\t * The element is dependent upon the set of the specified rule's parents\r\n\t * (rules which directly reference it).\r\n\t */\r\n\tPARENTS,\r\n\t/**\r\n\t * The element is dependent upon the set of the specified rule's children\r\n\t * (rules which it directly references).\r\n\t */\r\n\tCHILDREN,\r\n\t/**\r\n\t * The element is dependent upon the set of the specified rule's ancestors\r\n\t * (the transitive closure of `PARENTS` rules).\r\n\t */\r\n\tANCESTORS,\r\n\t/**\r\n\t * The element is dependent upon the set of the specified rule's descendants\r\n\t * (the transitive closure of `CHILDREN` rules).\r\n\t */\r\n\tDESCENDANTS,\r\n\t/**\r\n\t * The element is dependent upon the set of the specified rule's siblings\r\n\t * (the union of `CHILDREN` of its `PARENTS`).\r\n\t */\r\n\tSIBLINGS,\r\n\t/**\r\n\t * The element is dependent upon the set of the specified rule's preceeding\r\n\t * siblings (the union of `CHILDREN` of its `PARENTS` which\r\n\t * appear before a reference to the rule).\r\n\t */\r\n\tPRECEEDING_SIBLINGS,\r\n\t/**\r\n\t * The element is dependent upon the set of the specified rule's following\r\n\t * siblings (the union of `CHILDREN` of its `PARENTS` which\r\n\t * appear after a reference to the rule).\r\n\t */\r\n\tFOLLOWING_SIBLINGS,\r\n\t/**\r\n\t * The element is dependent upon the set of the specified rule's preceeding\r\n\t * elements (rules which might end before the start of the specified rule\r\n\t * while parsing). This is calculated by taking the\r\n\t * `PRECEEDING_SIBLINGS` of the rule and each of its\r\n\t * `ANCESTORS`, along with the `DESCENDANTS` of those\r\n\t * elements.\r\n\t */\r\n\tPRECEEDING,\r\n\t/**\r\n\t * The element is dependent upon the set of the specified rule's following\r\n\t * elements (rules which might start after the end of the specified rule\r\n\t * while parsing). This is calculated by taking the\r\n\t * `FOLLOWING_SIBLINGS` of the rule and each of its\r\n\t * `ANCESTORS`, along with the `DESCENDANTS` of those\r\n\t * elements.\r\n\t */\r\n\tFOLLOWING,\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:51.2133685-07:00\r\n\r\nimport { ATNConfig } from \"./atn/ATNConfig\";\r\nimport { ATNConfigSet } from \"./atn/ATNConfigSet\";\r\nimport { BitSet } from \"./misc/BitSet\";\r\nimport { DFA } from \"./dfa/DFA\";\r\nimport { Parser } from \"./Parser\";\r\nimport { ParserErrorListener } from \"./ParserErrorListener\";\r\nimport { RecognitionException } from \"./RecognitionException\";\r\nimport { Recognizer } from \"./Recognizer\";\r\nimport { SimulatorState } from \"./atn/SimulatorState\";\r\nimport { Token } from \"./Token\";\r\nimport { Override, NotNull } from \"./Decorators\";\r\nimport { Interval } from \"./misc/Interval\";\r\n\r\n/**\r\n * This implementation of {@link ANTLRErrorListener} can be used to identify\r\n * certain potential correctness and performance problems in grammars. \"Reports\"\r\n * are made by calling {@link Parser#notifyErrorListeners} with the appropriate\r\n * message.\r\n *\r\n * * **Ambiguities**: These are cases where more than one path through the\r\n * grammar can match the input.\r\n * * **Weak context sensitivity**: These are cases where full-context\r\n * prediction resolved an SLL conflict to a unique alternative which equaled the\r\n * minimum alternative of the SLL conflict.\r\n * * **Strong (forced) context sensitivity**: These are cases where the\r\n * full-context prediction resolved an SLL conflict to a unique alternative,\r\n * *and* the minimum alternative of the SLL conflict was found to not be\r\n * a truly viable alternative. Two-stage parsing cannot be used for inputs where\r\n * this situation occurs.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport class DiagnosticErrorListener implements ParserErrorListener {\r\n\r\n\t/**\r\n\t * Initializes a new instance of {@link DiagnosticErrorListener}, specifying\r\n\t * whether all ambiguities or only exact ambiguities are reported.\r\n\t *\r\n\t * @param exactOnly `true` to report only exact ambiguities, otherwise\r\n\t * `false` to report all ambiguities. Defaults to true.\r\n\t */\r\n\tconstructor(protected exactOnly: boolean = true) {\r\n\t\tthis.exactOnly = exactOnly;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic syntaxError(\r\n\t\t/*@NotNull*/\r\n\t\trecognizer: Recognizer,\r\n\t\toffendingSymbol: T | undefined,\r\n\t\tline: number,\r\n\t\tcharPositionInLine: number,\r\n\t\t/*@NotNull*/\r\n\t\tmsg: string,\r\n\t\te: RecognitionException | undefined): void\r\n\t{\r\n\t\t// intentionally empty\r\n\t}\r\n\r\n\t@Override\r\n\tpublic reportAmbiguity(\r\n\t\t@NotNull recognizer: Parser,\r\n\t\t@NotNull dfa: DFA,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\texact: boolean,\r\n\t\tambigAlts: BitSet | undefined,\r\n\t\t@NotNull configs: ATNConfigSet): void {\r\n\t\tif (this.exactOnly && !exact) {\r\n\t\t\treturn;\r\n\t\t}\r\n\r\n\t\tlet decision: string = this.getDecisionDescription(recognizer, dfa);\r\n\t\tlet conflictingAlts: BitSet = this.getConflictingAlts(ambigAlts, configs);\r\n\t\tlet text: string = recognizer.inputStream.getText(Interval.of(startIndex, stopIndex));\r\n\t\tlet message: string = `reportAmbiguity d=${decision}: ambigAlts=${conflictingAlts}, input='${text}'`;\r\n\t\trecognizer.notifyErrorListeners(message);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic reportAttemptingFullContext(\r\n\t\t@NotNull recognizer: Parser,\r\n\t\t@NotNull dfa: DFA,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\tconflictingAlts: BitSet | undefined,\r\n\t\t@NotNull conflictState: SimulatorState): void {\r\n\t\tlet format: string = \"reportAttemptingFullContext d=%s, input='%s'\";\r\n\t\tlet decision: string = this.getDecisionDescription(recognizer, dfa);\r\n\t\tlet text: string = recognizer.inputStream.getText(Interval.of(startIndex, stopIndex));\r\n\t\tlet message: string = `reportAttemptingFullContext d=${decision}, input='${text}'`;\r\n\t\trecognizer.notifyErrorListeners(message);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic reportContextSensitivity(\r\n\t\t@NotNull recognizer: Parser,\r\n\t\t@NotNull dfa: DFA,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\tprediction: number,\r\n\t\t@NotNull acceptState: SimulatorState): void {\r\n\t\tlet format: string = \"reportContextSensitivity d=%s, input='%s'\";\r\n\t\tlet decision: string = this.getDecisionDescription(recognizer, dfa);\r\n\t\tlet text: string = recognizer.inputStream.getText(Interval.of(startIndex, stopIndex));\r\n\t\tlet message: string = `reportContextSensitivity d=${decision}, input='${text}'`;\r\n\t\trecognizer.notifyErrorListeners(message);\r\n\t}\r\n\r\n\tprotected getDecisionDescription(\r\n\t\t@NotNull recognizer: Parser,\r\n\t\t@NotNull dfa: DFA): string {\r\n\t\tlet decision: number = dfa.decision;\r\n\t\tlet ruleIndex: number = dfa.atnStartState.ruleIndex;\r\n\r\n\t\tlet ruleNames: string[] = recognizer.ruleNames;\r\n\t\tif (ruleIndex < 0 || ruleIndex >= ruleNames.length) {\r\n\t\t\treturn decision.toString();\r\n\t\t}\r\n\r\n\t\tlet ruleName: string = ruleNames[ruleIndex];\r\n\t\tif (!ruleName) {\r\n\t\t\treturn decision.toString();\r\n\t\t}\r\n\r\n\t\treturn `${decision} (${ruleName})`;\r\n\t}\r\n\r\n\t/**\r\n\t * Computes the set of conflicting or ambiguous alternatives from a\r\n\t * configuration set, if that information was not already provided by the\r\n\t * parser.\r\n\t *\r\n\t * @param reportedAlts The set of conflicting or ambiguous alternatives, as\r\n\t * reported by the parser.\r\n\t * @param configs The conflicting or ambiguous configuration set.\r\n\t * @returns Returns `reportedAlts` if it is not `undefined`, otherwise\r\n\t * returns the set of alternatives represented in `configs`.\r\n\t */\r\n\t@NotNull\r\n\tprotected getConflictingAlts(reportedAlts: BitSet | undefined, @NotNull configs: ATNConfigSet): BitSet {\r\n\t\tif (reportedAlts != null) {\r\n\t\t\treturn reportedAlts;\r\n\t\t}\r\n\r\n\t\tlet result: BitSet = new BitSet();\r\n\t\tfor (let config of configs) {\r\n\t\t\tresult.set(config.alt);\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:51.9954566-07:00\r\n\r\nimport { ATN } from \"./atn/ATN\";\r\nimport { ATNType } from \"./atn/ATNType\";\r\nimport { CharStream } from \"./CharStream\";\r\nimport { Lexer } from \"./Lexer\";\r\nimport { LexerATNSimulator } from \"./atn/LexerATNSimulator\";\r\nimport { NotNull } from \"./Decorators\";\r\nimport { Override } from \"./Decorators\";\r\nimport { Vocabulary } from \"./Vocabulary\";\r\n\r\nexport class LexerInterpreter extends Lexer {\r\n\tprotected _grammarFileName: string;\r\n\tprotected _atn: ATN;\r\n\r\n\tprotected _ruleNames: string[];\r\n\tprotected _channelNames: string[];\r\n\tprotected _modeNames: string[];\r\n\t@NotNull\r\n\tprivate _vocabulary: Vocabulary;\r\n\r\n\tconstructor(grammarFileName: string, @NotNull vocabulary: Vocabulary, ruleNames: string[], channelNames: string[], modeNames: string[], atn: ATN, input: CharStream) {\r\n\t\tsuper(input);\r\n\r\n\t\tif (atn.grammarType !== ATNType.LEXER) {\r\n\t\t\tthrow new Error(\"IllegalArgumentException: The ATN must be a lexer ATN.\");\r\n\t\t}\r\n\r\n\t\tthis._grammarFileName = grammarFileName;\r\n\t\tthis._atn = atn;\r\n\r\n\t\tthis._ruleNames = ruleNames.slice(0);\r\n\t\tthis._channelNames = channelNames.slice(0);\r\n\t\tthis._modeNames = modeNames.slice(0);\r\n\t\tthis._vocabulary = vocabulary;\r\n\t\tthis._interp = new LexerATNSimulator(atn, this);\r\n\t}\r\n\r\n\t@Override\r\n\tget atn(): ATN {\r\n\t\treturn this._atn;\r\n\t}\r\n\r\n\t@Override\r\n\tget grammarFileName(): string {\r\n\t\treturn this._grammarFileName;\r\n\t}\r\n\r\n\t@Override\r\n\tget ruleNames(): string[] {\r\n\t\treturn this._ruleNames;\r\n\t}\r\n\r\n\t@Override\r\n\tget channelNames(): string[] {\r\n\t\treturn this._channelNames;\r\n\t}\r\n\r\n\t@Override\r\n\tget modeNames(): string[] {\r\n\t\treturn this._modeNames;\r\n\t}\r\n\r\n\t@Override\r\n\tget vocabulary(): Vocabulary {\r\n\t\treturn this._vocabulary;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:52.9471863-07:00\r\nimport { ANTLRErrorListener } from \"./ANTLRErrorListener\";\r\nimport { ATNConfigSet } from \"./atn/ATNConfigSet\";\r\nimport { BitSet } from \"./misc/BitSet\";\r\nimport { DFA } from \"./dfa/DFA\";\r\nimport { Parser } from \"./Parser\";\r\nimport { SimulatorState } from \"./atn/SimulatorState\";\r\nimport { Token } from \"./Token\";\r\nimport * as Stubs from \"./misc/Stubs\";\r\nimport * as Exception from \"./RecognitionException\";\r\n\r\n/** How to emit recognition errors for parsers.\r\n */\r\nexport interface ParserErrorListener extends ANTLRErrorListener {\r\n\t/**\r\n\t * This method is called by the parser when a full-context prediction\r\n\t * results in an ambiguity.\r\n\t *\r\n\t * Each full-context prediction which does not result in a syntax error\r\n\t * will call either {@link #reportContextSensitivity} or\r\n\t * {@link #reportAmbiguity}.\r\n\t *\r\n\t * When `ambigAlts` is not `undefined`, it contains the set of potentially\r\n\t * viable alternatives identified by the prediction algorithm. When\r\n\t * `ambigAlts` is `undefined`, use\r\n\t * {@link ATNConfigSet#getRepresentedAlternatives} to obtain the represented\r\n\t * alternatives from the `configs` argument.\r\n\t *\r\n\t * When `exact` is `true`, *all* of the potentially\r\n\t * viable alternatives are truly viable, i.e. this is reporting an exact\r\n\t * ambiguity. When `exact` is `false`, *at least two* of\r\n\t * the potentially viable alternatives are viable for the current input, but\r\n\t * the prediction algorithm terminated as soon as it determined that at\r\n\t * least the *minimum* potentially viable alternative is truly\r\n\t * viable.\r\n\t *\r\n\t * When the {@link PredictionMode#LL_EXACT_AMBIG_DETECTION} prediction\r\n\t * mode is used, the parser is required to identify exact ambiguities so\r\n\t * `exact` will always be `true`.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @param dfa the DFA for the current decision\r\n\t * @param startIndex the input index where the decision started\r\n\t * @param stopIndex the input input where the ambiguity was identified\r\n\t * @param exact `true` if the ambiguity is exactly known, otherwise\r\n\t * `false`. This is always `true` when\r\n\t * {@link PredictionMode#LL_EXACT_AMBIG_DETECTION} is used.\r\n\t * @param ambigAlts the potentially ambiguous alternatives, or `undefined`\r\n\t * to indicate that the potentially ambiguous alternatives are the complete\r\n\t * set of represented alternatives in `configs`\r\n\t * @param configs the ATN configuration set where the ambiguity was\r\n\t * identified\r\n\t */\r\n\treportAmbiguity?: (\r\n\t\t/*@NotNull*/\r\n\t\trecognizer: Parser,\r\n\t\t/*@NotNull*/\r\n\t\tdfa: DFA,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\texact: boolean,\r\n\t\tambigAlts: BitSet | undefined,\r\n\t\t/*@NotNull*/\r\n\t\tconfigs: ATNConfigSet) => void;\r\n\r\n\t/**\r\n\t * This method is called when an SLL conflict occurs and the parser is about\r\n\t * to use the full context information to make an LL decision.\r\n\t *\r\n\t * If one or more configurations in `configs` contains a semantic\r\n\t * predicate, the predicates are evaluated before this method is called. The\r\n\t * subset of alternatives which are still viable after predicates are\r\n\t * evaluated is reported in `conflictingAlts`.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @param dfa the DFA for the current decision\r\n\t * @param startIndex the input index where the decision started\r\n\t * @param stopIndex the input index where the SLL conflict occurred\r\n\t * @param conflictingAlts The specific conflicting alternatives. If this is\r\n\t * `undefined`, the conflicting alternatives are all alternatives\r\n\t * represented in `configs`.\r\n\t * @param conflictState the simulator state when the SLL conflict was\r\n\t * detected\r\n\t */\r\n\treportAttemptingFullContext?: (\r\n\t\t/*@NotNull*/\r\n\t\trecognizer: Parser,\r\n\t\t/*@NotNull*/\r\n\t\tdfa: DFA,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\tconflictingAlts: BitSet | undefined,\r\n\t\t/*@NotNull*/\r\n\t\tconflictState: SimulatorState) => void;\r\n\r\n\t/**\r\n\t * This method is called by the parser when a full-context prediction has a\r\n\t * unique result.\r\n\t *\r\n\t * Each full-context prediction which does not result in a syntax error\r\n\t * will call either {@link #reportContextSensitivity} or\r\n\t * {@link #reportAmbiguity}.\r\n\t *\r\n\t * For prediction implementations that only evaluate full-context\r\n\t * predictions when an SLL conflict is found (including the default\r\n\t * {@link ParserATNSimulator} implementation), this method reports cases\r\n\t * where SLL conflicts were resolved to unique full-context predictions,\r\n\t * i.e. the decision was context-sensitive. This report does not necessarily\r\n\t * indicate a problem, and it may appear even in completely unambiguous\r\n\t * grammars.\r\n\t *\r\n\t * `configs` may have more than one represented alternative if the\r\n\t * full-context prediction algorithm does not evaluate predicates before\r\n\t * beginning the full-context prediction. In all cases, the final prediction\r\n\t * is passed as the `prediction` argument.\r\n\t *\r\n\t * Note that the definition of \"context sensitivity\" in this method\r\n\t * differs from the concept in {@link DecisionInfo#contextSensitivities}.\r\n\t * This method reports all instances where an SLL conflict occurred but LL\r\n\t * parsing produced a unique result, whether or not that unique result\r\n\t * matches the minimum alternative in the SLL conflicting set.\r\n\t *\r\n\t * @param recognizer the parser instance\r\n\t * @param dfa the DFA for the current decision\r\n\t * @param startIndex the input index where the decision started\r\n\t * @param stopIndex the input index where the context sensitivity was\r\n\t * finally determined\r\n\t * @param prediction the unambiguous result of the full-context prediction\r\n\t * @param acceptState the simulator state when the unambiguous prediction\r\n\t * was determined\r\n\t */\r\n\treportContextSensitivity?: (\r\n\t\t/*@NotNull*/\r\n\t\trecognizer: Parser,\r\n\t\t/*@NotNull*/\r\n\t\tdfa: DFA,\r\n\t\tstartIndex: number,\r\n\t\tstopIndex: number,\r\n\t\tprediction: number,\r\n\t\t/*@NotNull*/\r\n\t\tacceptState: SimulatorState) => void;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:57.4741196-07:00\r\n\r\nimport { ATN } from \"./atn/ATN\";\r\nimport { Override } from \"./Decorators\";\r\nimport { ParserRuleContext } from \"./ParserRuleContext\";\r\n\r\n/** A handy class for use with\r\n *\r\n * options {contextSuperClass=org.antlr.v4.runtime.RuleContextWithAltNum;}\r\n *\r\n * that provides a backing field / impl for the outer alternative number\r\n * matched for an internal parse tree node.\r\n *\r\n * I'm only putting into Java runtime as I'm certain I'm the only one that\r\n * will really every use this.\r\n */\r\nexport class RuleContextWithAltNum extends ParserRuleContext {\r\n\tprivate _altNumber: number;\r\n\r\n\tconstructor();\r\n\tconstructor(parent: ParserRuleContext | undefined, invokingStateNumber: number);\r\n\tconstructor(parent?: ParserRuleContext, invokingStateNumber?: number) {\r\n\t\tif (invokingStateNumber !== undefined) {\r\n\t\t\tsuper(parent, invokingStateNumber);\r\n\t\t} else {\r\n\t\t\tsuper();\r\n\t\t}\r\n\r\n\t\tthis._altNumber = ATN.INVALID_ALT_NUMBER;\r\n\t}\r\n\r\n\t@Override\r\n\tget altNumber(): number {\r\n\t\treturn this._altNumber;\r\n\t}\r\n\r\n\t// @Override\r\n\tset altNumber(altNum: number) {\r\n\t\tthis._altNumber = altNum;\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:57.6271221-07:00\r\n\r\nimport { Dependents } from \"./Dependents\";\r\nimport { Parser } from \"./Parser\";\r\n\r\n/**\r\n * Declares a dependency upon a grammar rule, along with a set of zero or more dependent rules.\r\n *\r\n * Version numbers within a grammar should be assigned on a monotonically increasing basis to allow for accurate\r\n * tracking of dependent rules.\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport function RuleDependency(dependency: DependencySpecification) {\r\n\treturn (target: object, propertyKey: PropertyKey, propertyDescriptor: PropertyDescriptor) => {\r\n\t\t// intentionally empty\r\n\t};\r\n}\r\n\r\nexport interface DependencySpecification {\r\n\treadonly recognizer: { new (...args: any[]): Parser; };\r\n\r\n\treadonly rule: number;\r\n\r\n\treadonly version: number;\r\n\r\n\t/**\r\n\t * Specifies the set of grammar rules related to `rule` which the annotated element depends on. Even when absent\r\n\t * from this set, the annotated element is implicitly dependent upon the explicitly specified `rule`, which\r\n\t * corresponds to the `Dependents.SELF` element.\r\n\t *\r\n\t * By default, the annotated element is dependent upon the specified `rule` and its `Dependents.PARENTS`, i.e. the\r\n\t * rule within one level of context information. The parents are included since the most frequent assumption about a\r\n\t * rule is where it's used in the grammar.\r\n\t */\r\n\treadonly dependents?: Dependents[];\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:57.7170027-07:00\r\n\r\nimport { Parser } from \"./Parser\";\r\nimport { ParserRuleContext } from \"./ParserRuleContext\";\r\n\r\n/**\r\n *\r\n * @author Sam Harwell\r\n */\r\nexport function RuleVersion(version: number) {\r\n\r\n\treturn (target: Parser, propertyKey: PropertyKey, propertyDescriptor: TypedPropertyDescriptor<(...args: any[]) => T>) => {\r\n\t\t// intentionally empty\r\n\t};\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:57.8783640-07:00\r\n\r\nimport { CharStream } from \"./CharStream\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenSource } from \"./TokenSource\";\r\n\r\n/** The default mechanism for creating tokens. It's used by default in Lexer and\r\n * the error handling strategy (to create missing tokens). Notifying the parser\r\n * of a new factory means that it notifies its token source and error strategy.\r\n */\r\nexport interface TokenFactory {\r\n\t/** This is the method used to create tokens in the lexer and in the\r\n\t * error handling strategy. If text!=undefined, than the start and stop positions\r\n\t * are wiped to -1 in the text override is set in the CommonToken.\r\n\t */\r\n\t//@NotNull\r\n\tcreate(\r\n\t\t/*@NotNull*/\r\n\t\tsource: { source?: TokenSource, stream?: CharStream },\r\n\t\ttype: number,\r\n\t\ttext: string | undefined,\r\n\t\tchannel: number,\r\n\t\tstart: number,\r\n\t\tstop: number,\r\n\t\tline: number,\r\n\t\tcharPositionInLine: number): Token;\r\n\r\n\t/** Generically useful */\r\n\t//@NotNull\r\n\tcreateSimple(type: number, text: string): Token;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:57.9604200-07:00\r\n\r\nimport { CharStream } from \"./CharStream\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenFactory } from \"./TokenFactory\";\r\n\r\n/**\r\n * A source of tokens must provide a sequence of tokens via {@link #nextToken()}\r\n * and also must reveal it's source of characters; {@link CommonToken}'s text is\r\n * computed from a {@link CharStream}; it only store indices into the char\r\n * stream.\r\n *\r\n * Errors from the lexer are never passed to the parser. Either you want to keep\r\n * going or you do not upon token recognition error. If you do not want to\r\n * continue lexing then you do not want to continue parsing. Just throw an\r\n * exception not under {@link RecognitionException} and Java will naturally toss\r\n * you all the way out of the recognizers. If you want to continue lexing then\r\n * you should not throw an exception to the parser--it has already requested a\r\n * token. Keep lexing until you get a valid one. Just report errors and keep\r\n * going, looking for a valid token.\r\n */\r\nexport interface TokenSource {\r\n\t/**\r\n\t * Return a {@link Token} object from your input stream (usually a\r\n\t * {@link CharStream}). Do not fail/return upon lexing error; keep chewing\r\n\t * on the characters until you get a good one; errors are not passed through\r\n\t * to the parser.\r\n\t */\r\n\t//@NotNull\r\n\tnextToken(): Token;\r\n\r\n\t/**\r\n\t * Get the line number for the current position in the input stream. The\r\n\t * first line in the input is line 1.\r\n\t *\r\n\t * @returns The line number for the current position in the input stream, or\r\n\t * 0 if the current token source does not track line numbers.\r\n\t */\r\n\treadonly line: number;\r\n\r\n\t/**\r\n\t * Get the index into the current line for the current position in the input\r\n\t * stream. The first character on a line has position 0.\r\n\t *\r\n\t * @returns The line number for the current position in the input stream, or\r\n\t * -1 if the current token source does not track character positions.\r\n\t */\r\n\treadonly charPositionInLine: number;\r\n\r\n\t/**\r\n\t * Get the {@link CharStream} from which this token source is currently\r\n\t * providing tokens.\r\n\t *\r\n\t * @returns The {@link CharStream} associated with the current position in\r\n\t * the input, or `undefined` if no input stream is available for the token\r\n\t * source.\r\n\t */\r\n\treadonly inputStream: CharStream | undefined;\r\n\r\n\t/**\r\n\t * Gets the name of the underlying input source. This method returns a\r\n\t * non-undefined, non-empty string. If such a name is not known, this method\r\n\t * returns {@link IntStream#UNKNOWN_SOURCE_NAME}.\r\n\t */\r\n\t//@NotNull\r\n\treadonly sourceName: string;\r\n\r\n\t/**\r\n\t * Gets or sets the `TokenFactory` this token source is currently using for\r\n\t * creating `Token` objects from the input.\r\n\t */\r\n\t//@NotNull\r\n\ttokenFactory: TokenFactory;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:58.0433998-07:00\r\n\r\nimport { Interval } from \"./misc/Interval\";\r\nimport { IntStream } from \"./IntStream\";\r\nimport { RuleContext } from \"./RuleContext\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenSource } from \"./TokenSource\";\r\n\r\n/**\r\n * An {@link IntStream} whose symbols are {@link Token} instances.\r\n */\r\nexport interface TokenStream extends IntStream {\r\n\t/**\r\n\t * Get the `Token` instance associated with the value returned by `LA(k)`. This method has the same pre- and\r\n\t * post-conditions as `IntStream.LA`. In addition, when the preconditions of this method are met, the return value\r\n\t * is non-undefined and the value of `LT(k).type === LA(k)`.\r\n\t *\r\n\t * A `RangeError` is thrown if `k<0` and fewer than `-k` calls to `consume()` have occurred from the beginning of\r\n\t * the stream before calling this method.\r\n\t *\r\n\t * See `IntStream.LA`\r\n\t */\r\n\tLT(k: number): Token;\r\n\r\n\t/**\r\n\t * Get the `Token` instance associated with the value returned by `LA(k)`. This method has the same pre- and\r\n\t * post-conditions as `IntStream.LA`. In addition, when the preconditions of this method are met, the return value\r\n\t * is non-undefined and the value of `tryLT(k).type === LA(k)`.\r\n\t *\r\n\t * The return value is `undefined` if `k<0` and fewer than `-k` calls to `consume()` have occurred from the\r\n\t * beginning of the stream before calling this method.\r\n\t *\r\n\t * See `IntStream.LA`\r\n\t */\r\n\ttryLT(k: number): Token | undefined;\r\n\r\n\t/**\r\n\t * Gets the {@link Token} at the specified `index` in the stream. When\r\n\t * the preconditions of this method are met, the return value is non-undefined.\r\n\t *\r\n\t * The preconditions for this method are the same as the preconditions of\r\n\t * {@link IntStream#seek}. If the behavior of `seek(index)` is\r\n\t * unspecified for the current state and given `index`, then the\r\n\t * behavior of this method is also unspecified.\r\n\t *\r\n\t * The symbol referred to by `index` differs from `seek()` only\r\n\t * in the case of filtering streams where `index` lies before the end\r\n\t * of the stream. Unlike `seek()`, this method does not adjust\r\n\t * `index` to point to a non-ignored symbol.\r\n\t *\r\n\t * @throws IllegalArgumentException if {code index} is less than 0\r\n\t * @throws UnsupportedOperationException if the stream does not support\r\n\t * retrieving the token at the specified index\r\n\t */\r\n\t//@NotNull\r\n\tget(i: number): Token;\r\n\r\n\t/**\r\n\t * Gets the underlying {@link TokenSource} which provides tokens for this\r\n\t * stream.\r\n\t */\r\n\t//@NotNull\r\n\treadonly tokenSource: TokenSource;\r\n\r\n\t/**\r\n\t * Return the text of all tokens within the specified `interval`. This\r\n\t * method behaves like the following code (including potential exceptions\r\n\t * for violating preconditions of {@link #get}, but may be optimized by the\r\n\t * specific implementation.\r\n\t *\r\n\t * ```\r\n\t * TokenStream stream = ...;\r\n\t * String text = \"\";\r\n\t * for (int i = interval.a; i <= interval.b; i++) {\r\n\t * text += stream.get(i).text;\r\n\t * }\r\n\t * ```\r\n\t *\r\n\t * @param interval The interval of tokens within this stream to get text\r\n\t * for.\r\n\t * @returns The text of all tokens within the specified interval in this\r\n\t * stream.\r\n\t *\r\n\t * @throws NullPointerException if `interval` is `undefined`\r\n\t */\r\n\t//@NotNull\r\n\tgetText(/*@NotNull*/ interval: Interval): string;\r\n\r\n\t/**\r\n\t * Return the text of all tokens in the stream. This method behaves like the\r\n\t * following code, including potential exceptions from the calls to\r\n\t * {@link IntStream#size} and {@link #getText(Interval)}, but may be\r\n\t * optimized by the specific implementation.\r\n\t *\r\n\t * ```\r\n\t * TokenStream stream = ...;\r\n\t * String text = stream.getText(new Interval(0, stream.size));\r\n\t * ```\r\n\t *\r\n\t * @returns The text of all tokens in the stream.\r\n\t */\r\n\t//@NotNull\r\n\tgetText(): string;\r\n\r\n\t/**\r\n\t * Return the text of all tokens in the source interval of the specified\r\n\t * context. This method behaves like the following code, including potential\r\n\t * exceptions from the call to {@link #getText(Interval)}, but may be\r\n\t * optimized by the specific implementation.\r\n\t *\r\n\t * If `ctx.sourceInterval` does not return a valid interval of\r\n\t * tokens provided by this stream, the behavior is unspecified.\r\n\t *\r\n\t * ```\r\n\t * TokenStream stream = ...;\r\n\t * String text = stream.getText(ctx.sourceInterval);\r\n\t * ```\r\n\t *\r\n\t * @param ctx The context providing the source interval of tokens to get\r\n\t * text for.\r\n\t * @returns The text of all tokens within the source interval of `ctx`.\r\n\t */\r\n\t//@NotNull\r\n\tgetText(/*@NotNull*/ ctx: RuleContext): string;\r\n\r\n\t/**\r\n\t * Return the text of all tokens in this stream between `start` and\r\n\t * `stop` (inclusive).\r\n\t *\r\n\t * If the specified `start` or `stop` token was not provided by\r\n\t * this stream, or if the `stop` occurred before the `start`}\r\n\t * token, the behavior is unspecified.\r\n\t *\r\n\t * For streams which ensure that the `Token.tokenIndex` method is\r\n\t * accurate for all of its provided tokens, this method behaves like the\r\n\t * following code. Other streams may implement this method in other ways\r\n\t * provided the behavior is consistent with this at a high level.\r\n\t *\r\n\t * ```\r\n\t * TokenStream stream = ...;\r\n\t * String text = \"\";\r\n\t * for (int i = start.tokenIndex; i <= stop.tokenIndex; i++) {\r\n\t * text += stream.get(i).text;\r\n\t * }\r\n\t * ```\r\n\t *\r\n\t * @param start The first token in the interval to get text for.\r\n\t * @param stop The last token in the interval to get text for (inclusive).\r\n\t * @returns The text of all tokens lying between the specified `start`\r\n\t * and `stop` tokens.\r\n\t *\r\n\t * @throws UnsupportedOperationException if this stream does not support\r\n\t * this method for the specified tokens\r\n\t */\r\n\t//@NotNull\r\n\tgetTextFromRange(start: any, stop: any): string;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:58.1768850-07:00\r\n\r\nimport { Interval } from \"./misc/Interval\";\r\nimport { Override } from \"./Decorators\";\r\nimport { Token } from \"./Token\";\r\nimport { TokenStream } from \"./TokenStream\";\r\n\r\nimport * as Utils from \"./misc/Utils\";\r\n\r\n/**\r\n * Useful for rewriting out a buffered input token stream after doing some\r\n * augmentation or other manipulations on it.\r\n *\r\n * You can insert stuff, replace, and delete chunks. Note that the operations\r\n * are done lazily--only if you convert the buffer to a {@link String} with\r\n * {@link TokenStream#getText()}. This is very efficient because you are not\r\n * moving data around all the time. As the buffer of tokens is converted to\r\n * strings, the {@link #getText()} method(s) scan the input token stream and\r\n * check to see if there is an operation at the current index. If so, the\r\n * operation is done and then normal {@link String} rendering continues on the\r\n * buffer. This is like having multiple Turing machine instruction streams\r\n * (programs) operating on a single input tape. :)\r\n *\r\n * This rewriter makes no modifications to the token stream. It does not ask the\r\n * stream to fill itself up nor does it advance the input cursor. The token\r\n * stream `TokenStream.index` will return the same value before and\r\n * after any {@link #getText()} call.\r\n *\r\n * The rewriter only works on tokens that you have in the buffer and ignores the\r\n * current input cursor. If you are buffering tokens on-demand, calling\r\n * {@link #getText()} halfway through the input will only do rewrites for those\r\n * tokens in the first half of the file.\r\n *\r\n * Since the operations are done lazily at {@link #getText}-time, operations do\r\n * not screw up the token index values. That is, an insert operation at token\r\n * index `i` does not change the index values for tokens\r\n * `i`+1..n-1.\r\n *\r\n * Because operations never actually alter the buffer, you may always get the\r\n * original token stream back without undoing anything. Since the instructions\r\n * are queued up, you can easily simulate transactions and roll back any changes\r\n * if there is an error just by removing instructions. For example,\r\n *\r\n * ```\r\n * CharStream input = new ANTLRFileStream(\"input\");\r\n * TLexer lex = new TLexer(input);\r\n * CommonTokenStream tokens = new CommonTokenStream(lex);\r\n * T parser = new T(tokens);\r\n * TokenStreamRewriter rewriter = new TokenStreamRewriter(tokens);\r\n * parser.startRule();\r\n * ```\r\n *\r\n * Then in the rules, you can execute (assuming rewriter is visible):\r\n *\r\n * ```\r\n * Token t,u;\r\n * ...\r\n * rewriter.insertAfter(t, \"text to put after t\");}\r\n * rewriter.insertAfter(u, \"text after u\");}\r\n * System.out.println(rewriter.getText());\r\n * ```\r\n *\r\n * You can also have multiple \"instruction streams\" and get multiple rewrites\r\n * from a single pass over the input. Just name the instruction streams and use\r\n * that name again when printing the buffer. This could be useful for generating\r\n * a C file and also its header file--all from the same buffer:\r\n *\r\n * ```\r\n * rewriter.insertAfter(\"pass1\", t, \"text to put after t\");}\r\n * rewriter.insertAfter(\"pass2\", u, \"text after u\");}\r\n * System.out.println(rewriter.getText(\"pass1\"));\r\n * System.out.println(rewriter.getText(\"pass2\"));\r\n * ```\r\n *\r\n * If you don't use named rewrite streams, a \"default\" stream is used as the\r\n * first example shows.\r\n */\r\nexport class TokenStreamRewriter {\r\n\tpublic static readonly DEFAULT_PROGRAM_NAME: string = \"default\";\r\n\tpublic static readonly PROGRAM_INIT_SIZE: number = 100;\r\n\tpublic static readonly MIN_TOKEN_INDEX: number = 0;\r\n\r\n\t/** Our source stream */\r\n\tprotected tokens: TokenStream;\r\n\r\n\t/** You may have multiple, named streams of rewrite operations.\r\n\t * I'm calling these things \"programs.\"\r\n\t * Maps String (name) → rewrite (List)\r\n\t */\r\n\tprotected programs: Map;\r\n\r\n\t/** Map String (program name) → Integer index */\r\n\tprotected lastRewriteTokenIndexes: Map;\r\n\r\n\tconstructor(tokens: TokenStream) {\r\n\t\tthis.tokens = tokens;\r\n\t\tthis.programs = new Map();\r\n\t\tthis.programs.set(TokenStreamRewriter.DEFAULT_PROGRAM_NAME, []);\r\n\t\tthis.lastRewriteTokenIndexes = new Map();\r\n\t}\r\n\r\n\tpublic getTokenStream(): TokenStream {\r\n\t\treturn this.tokens;\r\n\t}\r\n\r\n\tpublic rollback(instructionIndex: number): void;\r\n\t/** Rollback the instruction stream for a program so that\r\n\t * the indicated instruction (via instructionIndex) is no\r\n\t * longer in the stream. UNTESTED!\r\n\t */\r\n\tpublic rollback(instructionIndex: number, programName: string): void;\r\n\tpublic rollback(instructionIndex: number, programName: string = TokenStreamRewriter.DEFAULT_PROGRAM_NAME): void {\r\n\t\tlet is: RewriteOperation[] | undefined = this.programs.get(programName);\r\n\t\tif ( is != null ) {\r\n\t\t\tthis.programs.set(programName, is.slice(TokenStreamRewriter.MIN_TOKEN_INDEX, instructionIndex));\r\n\t\t}\r\n\t}\r\n\r\n\tpublic deleteProgram(): void;\r\n\r\n\t/** Reset the program so that no instructions exist */\r\n\tpublic deleteProgram(programName: string): void;\r\n\tpublic deleteProgram(programName: string = TokenStreamRewriter.DEFAULT_PROGRAM_NAME): void {\r\n\t\tthis.rollback(TokenStreamRewriter.MIN_TOKEN_INDEX, programName);\r\n\t}\r\n\r\n\tpublic insertAfter(t: Token, text: {}): void;\r\n\tpublic insertAfter(index: number, text: {}): void;\r\n\tpublic insertAfter(t: Token, text: {}, programName: string): void;\r\n\tpublic insertAfter(index: number, text: {}, programName: string): void;\r\n\tpublic insertAfter(tokenOrIndex: Token | number, text: {}, programName: string = TokenStreamRewriter.DEFAULT_PROGRAM_NAME): void {\r\n\t\tlet index: number;\r\n\t\tif (typeof tokenOrIndex === \"number\") {\r\n\t\t\tindex = tokenOrIndex;\r\n\t\t} else {\r\n\t\t\tindex = tokenOrIndex.tokenIndex;\r\n\t\t}\r\n\r\n\t\t// to insert after, just insert before next index (even if past end)\r\n\t\tlet rewrites: RewriteOperation[] = this.getProgram(programName);\r\n\t\tlet op = new InsertAfterOp(this.tokens, index, rewrites.length, text);\r\n\t\trewrites.push(op);\r\n\t}\r\n\r\n\tpublic insertBefore(t: Token, text: {}): void;\r\n\tpublic insertBefore(index: number, text: {}): void;\r\n\tpublic insertBefore(t: Token, text: {}, programName: string): void;\r\n\tpublic insertBefore(index: number, text: {}, programName: string): void;\r\n\tpublic insertBefore(tokenOrIndex: Token | number, text: {}, programName: string = TokenStreamRewriter.DEFAULT_PROGRAM_NAME): void {\r\n\t\tlet index: number;\r\n\t\tif (typeof tokenOrIndex === \"number\") {\r\n\t\t\tindex = tokenOrIndex;\r\n\t\t} else {\r\n\t\t\tindex = tokenOrIndex.tokenIndex;\r\n\t\t}\r\n\r\n\t\tlet rewrites: RewriteOperation[] = this.getProgram(programName);\r\n\t\tlet op: RewriteOperation = new InsertBeforeOp(this.tokens, index, rewrites.length, text);\r\n\t\trewrites.push(op);\r\n\t}\r\n\r\n\tpublic replaceSingle(index: number, text: {}): void;\r\n\tpublic replaceSingle(indexT: Token, text: {}): void;\r\n\tpublic replaceSingle(index: Token | number, text: {}): void {\r\n\t\tif (typeof index === \"number\") {\r\n\t\t\tthis.replace(index, index, text);\r\n\t\t} else {\r\n\t\t\tthis.replace(index, index, text);\r\n\t\t}\r\n\t}\r\n\r\n\tpublic replace(from: number, to: number, text: {}): void;\r\n\r\n\tpublic replace(from: Token, to: Token, text: {}): void;\r\n\r\n\tpublic replace(from: number, to: number, text: {}, programName: string): void;\r\n\r\n\tpublic replace(from: Token, to: Token, text: {}, programName: string): void;\r\n\r\n\tpublic replace(from: Token | number, to: Token | number, text: {}, programName: string = TokenStreamRewriter.DEFAULT_PROGRAM_NAME): void {\r\n\t\tif (typeof from !== \"number\") {\r\n\t\t\tfrom = from.tokenIndex;\r\n\t\t}\r\n\r\n\t\tif (typeof to !== \"number\") {\r\n\t\t\tto = to.tokenIndex;\r\n\t\t}\r\n\r\n\t\tif ( from > to || from < 0 || to < 0 || to >= this.tokens.size ) {\r\n\t\t\tthrow new RangeError(`replace: range invalid: ${from}..${to}(size=${this.tokens.size})`);\r\n\t\t}\r\n\r\n\t\tlet rewrites: RewriteOperation[] = this.getProgram(programName);\r\n\t\tlet op: RewriteOperation = new ReplaceOp(this.tokens, from, to, rewrites.length, text);\r\n\t\trewrites.push(op);\r\n\t}\r\n\r\n\tpublic delete(index: number): void;\r\n\r\n\tpublic delete(from: number, to: number): void;\r\n\r\n\tpublic delete(indexT: Token): void;\r\n\r\n\tpublic delete(from: Token, to: Token): void;\r\n\r\n\tpublic delete(from: number, to: number, programName: string): void;\r\n\r\n\tpublic delete(from: Token, to: Token, programName: string): void;\r\n\r\n\tpublic delete(from: Token | number, to?: Token | number, programName: string = TokenStreamRewriter.DEFAULT_PROGRAM_NAME): void {\r\n\t\tif (to === undefined) {\r\n\t\t\tto = from;\r\n\t\t}\r\n\r\n\t\tif (typeof from === \"number\") {\r\n\t\t\tthis.replace(from, to as number, \"\", programName);\r\n\t\t} else {\r\n\t\t\tthis.replace(from, to as Token, \"\", programName);\r\n\t\t}\r\n\t}\r\n\r\n\tprotected getLastRewriteTokenIndex(): number;\r\n\r\n\tprotected getLastRewriteTokenIndex(programName: string): number;\r\n\r\n\tprotected getLastRewriteTokenIndex(programName: string = TokenStreamRewriter.DEFAULT_PROGRAM_NAME): number {\r\n\t\tlet I: number | undefined = this.lastRewriteTokenIndexes.get(programName);\r\n\t\tif ( I == null ) {\r\n\t\t\treturn -1;\r\n\t\t}\r\n\r\n\t\treturn I;\r\n\t}\r\n\r\n\tprotected setLastRewriteTokenIndex(programName: string, i: number): void {\r\n\t\tthis.lastRewriteTokenIndexes.set(programName, i);\r\n\t}\r\n\r\n\tprotected getProgram(name: string): RewriteOperation[] {\r\n\t\tlet is: RewriteOperation[] | undefined = this.programs.get(name);\r\n\t\tif ( is == null ) {\r\n\t\t\tis = this.initializeProgram(name);\r\n\t\t}\r\n\r\n\t\treturn is;\r\n\t}\r\n\r\n\tprivate initializeProgram(name: string): RewriteOperation[] {\r\n\t\tlet is: RewriteOperation[] = [];\r\n\t\tthis.programs.set(name, is);\r\n\t\treturn is;\r\n\t}\r\n\r\n\t/** Return the text from the original tokens altered per the\r\n\t * instructions given to this rewriter.\r\n\t */\r\n\tpublic getText(): string;\r\n\r\n\t/** Return the text from the original tokens altered per the\r\n\t * instructions given to this rewriter in programName.\r\n\t *\r\n\t * @since 4.5\r\n\t */\r\n\tpublic getText(programName: string): string;\r\n\r\n\t/** Return the text associated with the tokens in the interval from the\r\n\t * original token stream but with the alterations given to this rewriter.\r\n\t * The interval refers to the indexes in the original token stream.\r\n\t * We do not alter the token stream in any way, so the indexes\r\n\t * and intervals are still consistent. Includes any operations done\r\n\t * to the first and last token in the interval. So, if you did an\r\n\t * insertBefore on the first token, you would get that insertion.\r\n\t * The same is true if you do an insertAfter the stop token.\r\n\t */\r\n\tpublic getText(interval: Interval): string;\r\n\r\n\tpublic getText(interval: Interval, programName: string): string;\r\n\r\n\tpublic getText(intervalOrProgram?: Interval | string, programName: string = TokenStreamRewriter.DEFAULT_PROGRAM_NAME): string {\r\n\t\tlet interval: Interval;\r\n\t\tif (intervalOrProgram instanceof Interval) {\r\n\t\t\tinterval = intervalOrProgram;\r\n\t\t} else {\r\n\t\t\tinterval = Interval.of(0, this.tokens.size - 1);\r\n\t\t}\r\n\r\n\t\tif (typeof intervalOrProgram === \"string\") {\r\n\t\t\tprogramName = intervalOrProgram;\r\n\t\t}\r\n\r\n\t\tlet rewrites: RewriteOperation[] | undefined = this.programs.get(programName);\r\n\t\tlet start: number = interval.a;\r\n\t\tlet stop: number = interval.b;\r\n\r\n\t\t// ensure start/end are in range\r\n\t\tif ( stop > this.tokens.size - 1 ) {\r\n\t\t\tstop = this.tokens.size - 1;\r\n\t\t}\r\n\t\tif ( start < 0 ) {\r\n\t\t\tstart = 0;\r\n\t\t}\r\n\r\n\t\tif ( rewrites == null || rewrites.length === 0 ) {\r\n\t\t\treturn this.tokens.getText(interval); // no instructions to execute\r\n\t\t}\r\n\r\n\t\tlet buf: string[] = [];\r\n\r\n\t\t// First, optimize instruction stream\r\n\t\tlet indexToOp: Map = this.reduceToSingleOperationPerIndex(rewrites);\r\n\r\n\t\t// Walk buffer, executing instructions and emitting tokens\r\n\t\tlet i: number = start;\r\n\t\twhile ( i <= stop && i < this.tokens.size ) {\r\n\t\t\tlet op: RewriteOperation | undefined = indexToOp.get(i);\r\n\t\t\tindexToOp.delete(i); // remove so any left have index size-1\r\n\t\t\tlet t: Token = this.tokens.get(i);\r\n\t\t\tif ( op == null ) {\r\n\t\t\t\t// no operation at that index, just dump token\r\n\t\t\t\tif ( t.type !== Token.EOF ) {\r\n\t\t\t\t\tbuf.push(String(t.text));\r\n\t\t\t\t}\r\n\t\t\t\ti++; // move to next token\r\n\t\t\t}\r\n\t\t\telse {\r\n\t\t\t\ti = op.execute(buf); // execute operation and skip\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// include stuff after end if it's last index in buffer\r\n\t\t// So, if they did an insertAfter(lastValidIndex, \"foo\"), include\r\n\t\t// foo if end==lastValidIndex.\r\n\t\tif ( stop === this.tokens.size - 1 ) {\r\n\t\t\t// Scan any remaining operations after last token\r\n\t\t\t// should be included (they will be inserts).\r\n\t\t\tfor (let op of indexToOp.values()) {\r\n\t\t\t\tif ( op.index >= this.tokens.size - 1 ) {\r\n\t\t\t\t\tbuf.push(op.text.toString());\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\treturn buf.join(\"\");\r\n\t}\r\n\r\n\t/** We need to combine operations and report invalid operations (like\r\n\t * overlapping replaces that are not completed nested). Inserts to\r\n\t * same index need to be combined etc... Here are the cases:\r\n\t *\r\n\t * I.i.u I.j.v\t\t\t\t\t\t\t\tleave alone, nonoverlapping\r\n\t * I.i.u I.i.v\t\t\t\t\t\t\t\tcombine: Iivu\r\n\t *\r\n\t * R.i-j.u R.x-y.v\t| i-j in x-y\t\t\tdelete first R\r\n\t * R.i-j.u R.i-j.v\t\t\t\t\t\t\tdelete first R\r\n\t * R.i-j.u R.x-y.v\t| x-y in i-j\t\t\tERROR\r\n\t * R.i-j.u R.x-y.v\t| boundaries overlap\tERROR\r\n\t *\r\n\t * Delete special case of replace (text==undefined):\r\n\t * D.i-j.u D.x-y.v\t| boundaries overlap\tcombine to max(min)..max(right)\r\n\t *\r\n\t * I.i.u R.x-y.v | i in (x+1)-y\t\t\tdelete I (since insert before\r\n\t * \t\t\t\t\t\t\t\t\t\t\twe're not deleting i)\r\n\t * I.i.u R.x-y.v | i not in (x+1)-y\t\tleave alone, nonoverlapping\r\n\t * R.x-y.v I.i.u | i in x-y\t\t\t\tERROR\r\n\t * R.x-y.v I.x.u \t\t\t\t\t\t\tR.x-y.uv (combine, delete I)\r\n\t * R.x-y.v I.i.u | i not in x-y\t\t\tleave alone, nonoverlapping\r\n\t *\r\n\t * I.i.u = insert u before op @ index i\r\n\t * R.x-y.u = replace x-y indexed tokens with u\r\n\t *\r\n\t * First we need to examine replaces. For any replace op:\r\n\t *\r\n\t * \t\t1. wipe out any insertions before op within that range.\r\n\t * \t\t2. Drop any replace op before that is contained completely within\r\n\t * \t that range.\r\n\t * \t\t3. Throw exception upon boundary overlap with any previous replace.\r\n\t *\r\n\t * Then we can deal with inserts:\r\n\t *\r\n\t * \t\t1. for any inserts to same index, combine even if not adjacent.\r\n\t * \t\t2. for any prior replace with same left boundary, combine this\r\n\t * \t insert with replace and delete this replace.\r\n\t * \t\t3. throw exception if index in same range as previous replace\r\n\t *\r\n\t * Don't actually delete; make op undefined in list. Easier to walk list.\r\n\t * Later we can throw as we add to index → op map.\r\n\t *\r\n\t * Note that I.2 R.2-2 will wipe out I.2 even though, technically, the\r\n\t * inserted stuff would be before the replace range. But, if you\r\n\t * add tokens in front of a method body '{' and then delete the method\r\n\t * body, I think the stuff before the '{' you added should disappear too.\r\n\t *\r\n\t * Return a map from token index to operation.\r\n\t */\r\n\tprotected reduceToSingleOperationPerIndex(rewrites: Array): Map {\r\n\t\t// console.log(`rewrites=[${Utils.join(rewrites, \", \")}]`);\r\n\r\n\t\t// WALK REPLACES\r\n\t\tfor (let i = 0; i < rewrites.length; i++) {\r\n\t\t\tlet op: RewriteOperation | undefined = rewrites[i];\r\n\t\t\tif ( op == null ) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\t\t\tif ( !(op instanceof ReplaceOp) ) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\t\t\tlet rop: ReplaceOp = op;\r\n\t\t\t// Wipe prior inserts within range\r\n\t\t\tlet inserts: InsertBeforeOp[] = this.getKindOfOps(rewrites, InsertBeforeOp, i);\r\n\t\t\tfor (let iop of inserts) {\r\n\t\t\t\tif ( iop.index === rop.index ) {\r\n\t\t\t\t\t// E.g., insert before 2, delete 2..2; update replace\r\n\t\t\t\t\t// text to include insert before, kill insert\r\n\t\t\t\t\trewrites[iop.instructionIndex] = undefined;\r\n\t\t\t\t\trop.text = iop.text.toString() + (rop.text != null ? rop.text.toString() : \"\");\r\n\t\t\t\t}\r\n\t\t\t\telse if ( iop.index > rop.index && iop.index <= rop.lastIndex ) {\r\n\t\t\t\t\t// delete insert as it's a no-op.\r\n\t\t\t\t\trewrites[iop.instructionIndex] = undefined;\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\t// Drop any prior replaces contained within\r\n\t\t\tlet prevReplaces: ReplaceOp[] = this.getKindOfOps(rewrites, ReplaceOp, i);\r\n\t\t\tfor (let prevRop of prevReplaces) {\r\n\t\t\t\tif ( prevRop.index >= rop.index && prevRop.lastIndex <= rop.lastIndex ) {\r\n\t\t\t\t\t// delete replace as it's a no-op.\r\n\t\t\t\t\trewrites[prevRop.instructionIndex] = undefined;\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\t\t\t\t// throw exception unless disjoint or identical\r\n\t\t\t\tlet disjoint: boolean =\r\n\t\t\t\t\tprevRop.lastIndex < rop.index || prevRop.index > rop.lastIndex;\r\n\t\t\t\t// Delete special case of replace (text==null):\r\n\t\t\t\t// D.i-j.u D.x-y.v\t| boundaries overlap\tcombine to max(min)..max(right)\r\n\t\t\t\tif ( prevRop.text == null && rop.text == null && !disjoint ) {\r\n\t\t\t\t\t// console.log(`overlapping deletes: ${prevRop}, ${rop}`);\r\n\t\t\t\t\trewrites[prevRop.instructionIndex] = undefined; // kill first delete\r\n\t\t\t\t\trop.index = Math.min(prevRop.index, rop.index);\r\n\t\t\t\t\trop.lastIndex = Math.max(prevRop.lastIndex, rop.lastIndex);\r\n\t\t\t\t\t// console.log(`new rop ${rop}`);\r\n\t\t\t\t}\r\n\t\t\t\telse if ( !disjoint ) {\r\n\t\t\t\t\tthrow new Error(`replace op boundaries of ${rop} overlap with previous ${prevRop}`);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\r\n\t\t// WALK INSERTS\r\n\t\tfor (let i = 0; i < rewrites.length; i++) {\r\n\t\t\tlet op: RewriteOperation | undefined = rewrites[i];\r\n\t\t\tif ( op == null ) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\t\t\tif ( !(op instanceof InsertBeforeOp) ) {\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\t\t\tlet iop: InsertBeforeOp = op;\r\n\t\t\t// combine current insert with prior if any at same index\r\n\t\t\tlet prevInserts: InsertBeforeOp[] = this.getKindOfOps(rewrites, InsertBeforeOp, i);\r\n\t\t\tfor (let prevIop of prevInserts) {\r\n\t\t\t\tif ( prevIop.index === iop.index ) {\r\n\t\t\t\t\tif (prevIop instanceof InsertAfterOp) {\r\n\t\t\t\t\t\tiop.text = this.catOpText(prevIop.text, iop.text);\r\n\t\t\t\t\t\trewrites[prevIop.instructionIndex] = undefined;\r\n\t\t\t\t\t}\r\n\t\t\t\t\telse if (prevIop instanceof InsertBeforeOp) { // combine objects\r\n\t\t\t\t\t\t// convert to strings...we're in process of toString'ing\r\n\t\t\t\t\t\t// whole token buffer so no lazy eval issue with any templates\r\n\t\t\t\t\t\tiop.text = this.catOpText(iop.text, prevIop.text);\r\n\t\t\t\t\t\t// delete redundant prior insert\r\n\t\t\t\t\t\trewrites[prevIop.instructionIndex] = undefined;\r\n\t\t\t\t\t}\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t\t// look for replaces where iop.index is in range; error\r\n\t\t\tlet prevReplaces: ReplaceOp[] = this.getKindOfOps(rewrites, ReplaceOp, i);\r\n\t\t\tfor (let rop of prevReplaces) {\r\n\t\t\t\tif ( iop.index === rop.index ) {\r\n\t\t\t\t\trop.text = this.catOpText(iop.text, rop.text);\r\n\t\t\t\t\trewrites[i] = undefined;\t// delete current insert\r\n\t\t\t\t\tcontinue;\r\n\t\t\t\t}\r\n\t\t\t\tif ( iop.index >= rop.index && iop.index <= rop.lastIndex ) {\r\n\t\t\t\t\tthrow new Error(`insert op ${iop} within boundaries of previous ${rop}`);\r\n\t\t\t\t}\r\n\t\t\t}\r\n\t\t}\r\n\t\t// console.log(`rewrites after=[${Utils.join(rewrites, \", \")}]`);\r\n\t\tlet m: Map = new Map();\r\n\t\tfor (let op of rewrites) {\r\n\t\t\tif ( op == null ) {\r\n\t\t\t\t// ignore deleted ops\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\t\t\tif ( m.get(op.index) != null ) {\r\n\t\t\t\tthrow new Error(\"should only be one op per index\");\r\n\t\t\t}\r\n\t\t\tm.set(op.index, op);\r\n\t\t}\r\n\t\t// console.log(`index to op: ${m}`);\r\n\t\treturn m;\r\n\t}\r\n\r\n\tprotected catOpText(a: {}, b: {}): string {\r\n\t\tlet x: string = \"\";\r\n\t\tlet y: string = \"\";\r\n\t\tif ( a != null ) {\r\n\t\t\tx = a.toString();\r\n\t\t}\r\n\t\tif ( b != null ) {\r\n\t\t\ty = b.toString();\r\n\t\t}\r\n\t\treturn x + y;\r\n\t}\r\n\r\n\t/** Get all operations before an index of a particular kind */\r\n\tprotected getKindOfOps(rewrites: Array, kind: {new(...args: any[]): T}, before: number): T[] {\r\n\t\tlet ops: T[] = [];\r\n\t\tfor (let i = 0; i < before && i < rewrites.length; i++) {\r\n\t\t\tlet op: RewriteOperation | undefined = rewrites[i];\r\n\t\t\tif ( op == null ) {\r\n\t\t\t\t// ignore deleted\r\n\t\t\t\tcontinue;\r\n\t\t\t}\r\n\t\t\tif ( op instanceof kind ) {\r\n\t\t\t\tops.push(op);\r\n\t\t\t}\r\n\t\t}\r\n\t\treturn ops;\r\n\t}\r\n}\r\n\r\n// Define the rewrite operation hierarchy\r\n\r\nexport class RewriteOperation {\r\n\tprotected readonly tokens: TokenStream;\r\n\t/** What index into rewrites List are we? */\r\n\tpublic readonly instructionIndex: number;\r\n\t/** Token buffer index. */\r\n\tpublic index: number;\r\n\tpublic text: {};\r\n\r\n\tconstructor(tokens: TokenStream, index: number, instructionIndex: number);\r\n\tconstructor(tokens: TokenStream, index: number, instructionIndex: number, text: {});\r\n\tconstructor(tokens: TokenStream, index: number, instructionIndex: number, text?: {}) {\r\n\t\tthis.tokens = tokens;\r\n\t\tthis.instructionIndex = instructionIndex;\r\n\t\tthis.index = index;\r\n\t\tthis.text = text === undefined ? \"\" : text;\r\n\t}\r\n\r\n\t/** Execute the rewrite operation by possibly adding to the buffer.\r\n\t * Return the index of the next token to operate on.\r\n\t */\r\n\tpublic execute(buf: string[]): number {\r\n\t\treturn this.index;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\tlet opName: string = this.constructor.name;\r\n\t\tlet $index = opName.indexOf(\"$\");\r\n\t\topName = opName.substring($index + 1, opName.length);\r\n\t\treturn \"<\" + opName + \"@\" + this.tokens.get(this.index) +\r\n\t\t\t\t\":\\\"\" + this.text + \"\\\">\";\r\n\t}\r\n}\r\n\r\nclass InsertBeforeOp extends RewriteOperation {\r\n\tconstructor(tokens: TokenStream, index: number, instructionIndex: number, text: {}) {\r\n\t\tsuper(tokens, index, instructionIndex, text);\r\n\t}\r\n\r\n\t@Override\r\n\tpublic execute(buf: string[]): number {\r\n\t\tbuf.push(this.text.toString());\r\n\t\tif ( this.tokens.get(this.index).type !== Token.EOF ) {\r\n\t\t\tbuf.push(String(this.tokens.get(this.index).text));\r\n\t\t}\r\n\t\treturn this.index + 1;\r\n\t}\r\n}\r\n\r\n/** Distinguish between insert after/before to do the \"insert afters\"\r\n * first and then the \"insert befores\" at same index. Implementation\r\n * of \"insert after\" is \"insert before index+1\".\r\n */\r\nclass InsertAfterOp extends InsertBeforeOp {\r\n\tconstructor(tokens: TokenStream, index: number, instructionIndex: number, text: {}) {\r\n\t\tsuper(tokens, index + 1, instructionIndex, text); // insert after is insert before index+1\r\n\t}\r\n}\r\n\r\n/** I'm going to try replacing range from x..y with (y-x)+1 ReplaceOp\r\n * instructions.\r\n */\r\nclass ReplaceOp extends RewriteOperation {\r\n\tpublic lastIndex: number;\r\n\tconstructor(tokens: TokenStream, from: number, to: number, instructionIndex: number, text: {}) {\r\n\t\tsuper(tokens, from, instructionIndex, text);\r\n\t\tthis.lastIndex = to;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic execute(buf: string[]): number {\r\n\t\tif ( this.text != null ) {\r\n\t\t\tbuf.push(this.text.toString());\r\n\t\t}\r\n\t\treturn this.lastIndex + 1;\r\n\t}\r\n\r\n\t@Override\r\n\tpublic toString(): string {\r\n\t\tif ( this.text == null ) {\r\n\t\t\treturn \"\";\r\n\t\t}\r\n\t\treturn \"\";\r\n\t}\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:59.4986610-07:00\r\n\r\n/**\r\n * This interface provides information about the vocabulary used by a\r\n * recognizer.\r\n *\r\n * @see Recognizer.vocabulary\r\n * @author Sam Harwell\r\n */\r\nexport interface Vocabulary {\r\n\r\n\t/**\r\n\t * Returns the highest token type value. It can be used to iterate from\r\n\t * zero to that number, inclusively, thus querying all stored entries.\r\n\t * @returns the highest token type value\r\n\t */\r\n\treadonly maxTokenType: number;\r\n\r\n\t/**\r\n\t * Gets the string literal associated with a token type. The string returned\r\n\t * by this method, when not `undefined`, can be used unaltered in a parser\r\n\t * grammar to represent this token type.\r\n\t *\r\n\t * The following table shows examples of lexer rules and the literal\r\n\t * names assigned to the corresponding token types.\r\n\t *\r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t *
RuleLiteral NameJava String Literal
`THIS : 'this';``'this'``\"'this'\"`
`SQUOTE : '\\'';``'\\''``\"'\\\\''\"`
`ID : [A-Z]+;`n/a`undefined`
\r\n\t *\r\n\t * @param tokenType The token type.\r\n\t *\r\n\t * @returns The string literal associated with the specified token type, or\r\n\t * `undefined` if no string literal is associated with the type.\r\n\t */\r\n\tgetLiteralName(tokenType: number): string | undefined;\r\n\r\n\t/**\r\n\t * Gets the symbolic name associated with a token type. The string returned\r\n\t * by this method, when not `undefined`, can be used unaltered in a parser\r\n\t * grammar to represent this token type.\r\n\t *\r\n\t * This method supports token types defined by any of the following\r\n\t * methods:\r\n\t *\r\n\t * * Tokens created by lexer rules.\r\n\t * * Tokens defined in a `tokens{}` block in a lexer or parser\r\n\t * grammar.\r\n\t * * The implicitly defined `EOF` token, which has the token type\r\n\t * {@link Token#EOF}.\r\n\t *\r\n\t * The following table shows examples of lexer rules and the literal\r\n\t * names assigned to the corresponding token types.\r\n\t *\r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t * \r\n\t *
RuleSymbolic Name
`THIS : 'this';``THIS`
`SQUOTE : '\\'';``SQUOTE`
`ID : [A-Z]+;``ID`
\r\n\t *\r\n\t * @param tokenType The token type.\r\n\t *\r\n\t * @returns The symbolic name associated with the specified token type, or\r\n\t * `undefined` if no symbolic name is associated with the type.\r\n\t */\r\n\tgetSymbolicName(tokenType: number): string | undefined;\r\n\r\n\t/**\r\n\t * Gets the display name of a token type.\r\n\t *\r\n\t * ANTLR provides a default implementation of this method, but\r\n\t * applications are free to override the behavior in any manner which makes\r\n\t * sense for the application. The default implementation returns the first\r\n\t * result from the following list which produces a non-`undefined`\r\n\t * result.\r\n\t *\r\n\t * 1. The result of {@link #getLiteralName}\r\n\t * 1. The result of {@link #getSymbolicName}\r\n\t * 1. The result of {@link Integer#toString}\r\n\t *\r\n\t * @param tokenType The token type.\r\n\t *\r\n\t * @returns The display name of the token type, for use in error reporting or\r\n\t * other user-visible messages which reference specific token types.\r\n\t */\r\n\t//@NotNull\r\n\tgetDisplayName(tokenType: number): string;\r\n\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:59.7015751-07:00\r\n\r\nimport { Token } from \"./Token\";\r\n\r\nexport interface WritableToken extends Token {\r\n\ttext: string | undefined;\r\n\r\n\ttype: number;\r\n\r\n\tline: number;\r\n\r\n\tcharPositionInLine: number;\r\n\r\n\tchannel: number;\r\n\r\n\ttokenIndex: number;\r\n}\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\nexport * from \"./ANTLRErrorListener\";\r\nexport * from \"./ANTLRErrorStrategy\";\r\n// export * from \"./ANTLRFileStream\";\r\nexport * from \"./ANTLRInputStream\";\r\nexport * from \"./BailErrorStrategy\";\r\nexport * from \"./BufferedTokenStream\";\r\nexport * from \"./CharStream\";\r\nexport * from \"./CharStreams\";\r\nexport * from \"./CodePointBuffer\";\r\nexport * from \"./CodePointCharStream\";\r\nexport * from \"./CommonToken\";\r\nexport * from \"./CommonTokenFactory\";\r\nexport * from \"./CommonTokenStream\";\r\nexport * from \"./ConsoleErrorListener\";\r\nexport * from \"./DefaultErrorStrategy\";\r\nexport * from \"./Dependents\";\r\nexport * from \"./DiagnosticErrorListener\";\r\nexport * from \"./FailedPredicateException\";\r\nexport * from \"./InputMismatchException\";\r\nexport * from \"./InterpreterRuleContext\";\r\nexport * from \"./IntStream\";\r\nexport * from \"./Lexer\";\r\nexport * from \"./LexerInterpreter\";\r\nexport * from \"./LexerNoViableAltException\";\r\nexport * from \"./ListTokenSource\";\r\nexport * from \"./NoViableAltException\";\r\nexport * from \"./Parser\";\r\nexport * from \"./ParserErrorListener\";\r\nexport * from \"./ParserInterpreter\";\r\nexport * from \"./ParserRuleContext\";\r\nexport * from \"./ProxyErrorListener\";\r\nexport * from \"./ProxyParserErrorListener\";\r\nexport * from \"./RecognitionException\";\r\nexport * from \"./Recognizer\";\r\nexport * from \"./RuleContext\";\r\nexport * from \"./RuleContextWithAltNum\";\r\nexport * from \"./RuleDependency\";\r\nexport * from \"./RuleVersion\";\r\nexport * from \"./Token\";\r\nexport * from \"./TokenFactory\";\r\nexport * from \"./TokenSource\";\r\nexport * from \"./TokenStream\";\r\nexport * from \"./TokenStreamRewriter\";\r\n// export * from \"./UnbufferedCharStream\";\r\n// export * from \"./UnbufferedTokenStream\";\r\nexport * from \"./Vocabulary\";\r\nexport * from \"./VocabularyImpl\";\r\nexport * from \"./WritableToken\";\r\n", "/*!\r\n * Copyright 2016 The ANTLR Project. All rights reserved.\r\n * Licensed under the BSD-3-Clause license. See LICENSE file in the project root for license information.\r\n */\r\n\r\n// ConvertTo-TS run at 2016-10-04T11:26:47.3092279-07:00\r\n\r\nimport { ErrorNode } from \"./ErrorNode\";\r\nimport { NotNull, Override } from \"../Decorators\";\r\nimport { ParseTree } from \"./ParseTree\";\r\nimport { ParseTreeVisitor } from \"./ParseTreeVisitor\";\r\nimport { RuleNode } from \"./RuleNode\";\r\nimport { TerminalNode } from \"./TerminalNode\";\r\n\r\nexport abstract class AbstractParseTreeVisitor implements ParseTreeVisitor {\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The default implementation calls {@link ParseTree#accept} on the\r\n\t * specified tree.\r\n\t */\r\n\t@Override\r\n\tpublic visit(@NotNull tree: ParseTree): Result {\r\n\t\treturn tree.accept(this);\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The default implementation initializes the aggregate result to\r\n\t * {@link #defaultResult defaultResult()}. Before visiting each child, it\r\n\t * calls {@link #shouldVisitNextChild shouldVisitNextChild}; if the result\r\n\t * is `false` no more children are visited and the current aggregate\r\n\t * result is returned. After visiting a child, the aggregate result is\r\n\t * updated by calling {@link #aggregateResult aggregateResult} with the\r\n\t * previous aggregate result and the result of visiting the child.\r\n\t *\r\n\t * The default implementation is not safe for use in visitors that modify\r\n\t * the tree structure. Visitors that modify the tree should override this\r\n\t * method to behave properly in respect to the specific algorithm in use.\r\n\t */\r\n\t@Override\r\n\tpublic visitChildren(@NotNull node: RuleNode): Result {\r\n\t\tlet result: Result = this.defaultResult();\r\n\t\tlet n: number = node.childCount;\r\n\t\tfor (let i = 0; i < n; i++) {\r\n\t\t\tif (!this.shouldVisitNextChild(node, result)) {\r\n\t\t\t\tbreak;\r\n\t\t\t}\r\n\r\n\t\t\tlet c: ParseTree = node.getChild(i);\r\n\t\t\tlet childResult: Result = c.accept(this);\r\n\t\t\tresult = this.aggregateResult(result, childResult);\r\n\t\t}\r\n\r\n\t\treturn result;\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The default implementation returns the result of\r\n\t * {@link #defaultResult defaultResult}.\r\n\t */\r\n\t@Override\r\n\tpublic visitTerminal(@NotNull node: TerminalNode): Result {\r\n\t\treturn this.defaultResult();\r\n\t}\r\n\r\n\t/**\r\n\t * {@inheritDoc}\r\n\t *\r\n\t * The default implementation returns the result of\r\n\t * {@link #defaultResult defaultResult}.\r\n\t */\r\n\t@Override\r\n\tpublic visitErrorNode(@NotNull node: ErrorNode): Result {\r\n\t\treturn this.defaultResult();\r\n\t}\r\n\r\n\t/**\r\n\t * Gets the default value returned by visitor methods. This value is\r\n\t * returned by the default implementations of\r\n\t * {@link #visitTerminal visitTerminal}, {@link #visitErrorNode visitErrorNode}.\r\n\t * The default implementation of {@link #visitChildren visitChildren}\r\n\t * initializes its aggregate result to this value.\r\n\t *\r\n\t * @returns The default value returned by visitor methods.\r\n\t */\r\n\tprotected abstract defaultResult(): Result;\r\n\r\n\t/**\r\n\t * Aggregates the results of visiting multiple children of a node. After\r\n\t * either all children are visited or {@link #shouldVisitNextChild} returns\r\n\t * `false`, the aggregate value is returned as the result of\r\n\t * {@link #visitChildren}.\r\n\t *\r\n\t * The default implementation returns `nextResult`, meaning\r\n\t * {@link #visitChildren} will return the result of the last child visited\r\n\t * (or return the initial value if the node has no children).\r\n\t *\r\n\t * @param aggregate The previous aggregate value. In the default\r\n\t * implementation, the aggregate value is initialized to\r\n\t * {@link #defaultResult}, which is passed as the `aggregate` argument\r\n\t * to this method after the first child node is visited.\r\n\t * @param nextResult The result of the immediately preceeding call to visit\r\n\t * a child node.\r\n\t *\r\n\t * @returns The updated aggregate result.\r\n\t */\r\n\tprotected aggregateResult(aggregate: Result, nextResult: Result): Result {\r\n\t\treturn nextResult;\r\n\t}\r\n\r\n\t/**\r\n\t * This method is called after visiting each child in\r\n\t * {@link #visitChildren}. This method is first called before the first\r\n\t * child is visited; at that point `currentResult` will be the initial\r\n\t * value (in the default implementation, the initial value is returned by a\r\n\t * call to {@link #defaultResult}. This method is not called after the last\r\n\t * child is visited.\r\n\t *\r\n\t * The default implementation always returns `true`, indicating that\r\n\t * `visitChildren` should only return after all children are visited.\r\n\t * One reason to override this method is to provide a \"short circuit\"\r\n\t * evaluation option for situations where the result of visiting a single\r\n\t * child has the potential to determine the result of the visit operation as\r\n\t * a whole.\r\n\t *\r\n\t * @param node The {@link RuleNode} whose children are currently being\r\n\t * visited.\r\n\t * @param currentResult The current aggregate result of the children visited\r\n\t * to the current point.\r\n\t *\r\n\t * @returns `true` to continue visiting children. Otherwise return\r\n\t * `false` to stop visiting children and immediately return the\r\n\t * current aggregate result from {@link #visitChildren}.\r\n\t */\r\n\tprotected shouldVisitNextChild(@NotNull node: RuleNode, currentResult: Result): boolean {\r\n\t\treturn true;\r\n\t}\r\n}\r\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nfunction arrayToString(a) {\n return Array.isArray(a) ? (\"[\" + a.join(\", \") + \"]\") : \"null\";\n}\n\nString.prototype.seed = String.prototype.seed || Math.round(Math.random() * Math.pow(2, 32));\n\nString.prototype.hashCode = function () {\n const key = this.toString();\n let h1b, k1;\n\n const remainder = key.length & 3; // key.length % 4\n const bytes = key.length - remainder;\n let h1 = String.prototype.seed;\n const c1 = 0xcc9e2d51;\n const c2 = 0x1b873593;\n let i = 0;\n\n while (i < bytes) {\n k1 =\n ((key.charCodeAt(i) & 0xff)) |\n ((key.charCodeAt(++i) & 0xff) << 8) |\n ((key.charCodeAt(++i) & 0xff) << 16) |\n ((key.charCodeAt(++i) & 0xff) << 24);\n ++i;\n\n k1 = ((((k1 & 0xffff) * c1) + ((((k1 >>> 16) * c1) & 0xffff) << 16))) & 0xffffffff;\n k1 = (k1 << 15) | (k1 >>> 17);\n k1 = ((((k1 & 0xffff) * c2) + ((((k1 >>> 16) * c2) & 0xffff) << 16))) & 0xffffffff;\n\n h1 ^= k1;\n h1 = (h1 << 13) | (h1 >>> 19);\n h1b = ((((h1 & 0xffff) * 5) + ((((h1 >>> 16) * 5) & 0xffff) << 16))) & 0xffffffff;\n h1 = (((h1b & 0xffff) + 0x6b64) + ((((h1b >>> 16) + 0xe654) & 0xffff) << 16));\n }\n\n k1 = 0;\n\n switch (remainder) {\n case 3:\n k1 ^= (key.charCodeAt(i + 2) & 0xff) << 16;\n case 2:\n k1 ^= (key.charCodeAt(i + 1) & 0xff) << 8;\n case 1:\n k1 ^= (key.charCodeAt(i) & 0xff);\n\n k1 = (((k1 & 0xffff) * c1) + ((((k1 >>> 16) * c1) & 0xffff) << 16)) & 0xffffffff;\n k1 = (k1 << 15) | (k1 >>> 17);\n k1 = (((k1 & 0xffff) * c2) + ((((k1 >>> 16) * c2) & 0xffff) << 16)) & 0xffffffff;\n h1 ^= k1;\n }\n\n h1 ^= key.length;\n\n h1 ^= h1 >>> 16;\n h1 = (((h1 & 0xffff) * 0x85ebca6b) + ((((h1 >>> 16) * 0x85ebca6b) & 0xffff) << 16)) & 0xffffffff;\n h1 ^= h1 >>> 13;\n h1 = ((((h1 & 0xffff) * 0xc2b2ae35) + ((((h1 >>> 16) * 0xc2b2ae35) & 0xffff) << 16))) & 0xffffffff;\n h1 ^= h1 >>> 16;\n\n return h1 >>> 0;\n};\n\nfunction standardEqualsFunction(a, b) {\n return a ? a.equals(b) : a==b;\n}\n\nfunction standardHashCodeFunction(a) {\n return a ? a.hashCode() : -1;\n}\n\nclass Set {\n constructor(hashFunction, equalsFunction) {\n this.data = {};\n this.hashFunction = hashFunction || standardHashCodeFunction;\n this.equalsFunction = equalsFunction || standardEqualsFunction;\n }\n\n add(value) {\n const hash = this.hashFunction(value);\n const key = \"hash_\" + hash;\n if (key in this.data) {\n const values = this.data[key];\n for (let i = 0; i < values.length; i++) {\n if (this.equalsFunction(value, values[i])) {\n return values[i];\n }\n }\n values.push(value);\n return value;\n } else {\n this.data[key] = [value];\n return value;\n }\n }\n\n contains(value) {\n return this.get(value) != null;\n }\n\n get(value) {\n const hash = this.hashFunction(value);\n const key = \"hash_\" + hash;\n if (key in this.data) {\n const values = this.data[key];\n for (let i = 0; i < values.length; i++) {\n if (this.equalsFunction(value, values[i])) {\n return values[i];\n }\n }\n }\n return null;\n }\n\n values() {\n let l = [];\n for (const key in this.data) {\n if (key.indexOf(\"hash_\") === 0) {\n l = l.concat(this.data[key]);\n }\n }\n return l;\n }\n\n toString() {\n return arrayToString(this.values());\n }\n\n get length(){\n let l = 0;\n for (const key in this.data) {\n if (key.indexOf(\"hash_\") === 0) {\n l = l + this.data[key].length;\n }\n }\n return l;\n }\n}\n\n\nclass BitSet {\n constructor() {\n this.data = [];\n }\n\n add(value) {\n this.data[value] = true;\n }\n\n or(set) {\n const bits = this;\n Object.keys(set.data).map(function (alt) {\n bits.add(alt);\n });\n }\n\n remove(value) {\n delete this.data[value];\n }\n\n contains(value) {\n return this.data[value] === true;\n }\n\n values() {\n return Object.keys(this.data);\n }\n\n minValue() {\n return Math.min.apply(null, this.values());\n }\n\n hashCode() {\n const hash = new Hash();\n hash.update(this.values());\n return hash.finish();\n }\n\n equals(other) {\n if (!(other instanceof BitSet)) {\n return false;\n }\n return this.hashCode() === other.hashCode();\n }\n\n toString() {\n return \"{\" + this.values().join(\", \") + \"}\";\n }\n\n get length(){\n return this.values().length;\n }\n}\n\n\nclass Map {\n constructor(hashFunction, equalsFunction) {\n this.data = {};\n this.hashFunction = hashFunction || standardHashCodeFunction;\n this.equalsFunction = equalsFunction || standardEqualsFunction;\n }\n\n put(key, value) {\n const hashKey = \"hash_\" + this.hashFunction(key);\n if (hashKey in this.data) {\n const entries = this.data[hashKey];\n for (let i = 0; i < entries.length; i++) {\n const entry = entries[i];\n if (this.equalsFunction(key, entry.key)) {\n const oldValue = entry.value;\n entry.value = value;\n return oldValue;\n }\n }\n entries.push({key:key, value:value});\n return value;\n } else {\n this.data[hashKey] = [{key:key, value:value}];\n return value;\n }\n }\n\n containsKey(key) {\n const hashKey = \"hash_\" + this.hashFunction(key);\n if(hashKey in this.data) {\n const entries = this.data[hashKey];\n for (let i = 0; i < entries.length; i++) {\n const entry = entries[i];\n if (this.equalsFunction(key, entry.key))\n return true;\n }\n }\n return false;\n }\n\n get(key) {\n const hashKey = \"hash_\" + this.hashFunction(key);\n if(hashKey in this.data) {\n const entries = this.data[hashKey];\n for (let i = 0; i < entries.length; i++) {\n const entry = entries[i];\n if (this.equalsFunction(key, entry.key))\n return entry.value;\n }\n }\n return null;\n }\n\n entries() {\n let l = [];\n for (const key in this.data) {\n if (key.indexOf(\"hash_\") === 0) {\n l = l.concat(this.data[key]);\n }\n }\n return l;\n }\n\n getKeys() {\n return this.entries().map(function(e) {\n return e.key;\n });\n }\n\n getValues() {\n return this.entries().map(function(e) {\n return e.value;\n });\n }\n\n toString() {\n const ss = this.entries().map(function(entry) {\n return '{' + entry.key + ':' + entry.value + '}';\n });\n return '[' + ss.join(\", \") + ']';\n }\n\n get length(){\n let l = 0;\n for (const hashKey in this.data) {\n if (hashKey.indexOf(\"hash_\") === 0) {\n l = l + this.data[hashKey].length;\n }\n }\n return l;\n }\n}\n\n\nclass AltDict {\n constructor() {\n this.data = {};\n }\n\n get(key) {\n key = \"k-\" + key;\n if (key in this.data) {\n return this.data[key];\n } else {\n return null;\n }\n }\n\n put(key, value) {\n key = \"k-\" + key;\n this.data[key] = value;\n }\n\n values() {\n const data = this.data;\n const keys = Object.keys(this.data);\n return keys.map(function (key) {\n return data[key];\n });\n }\n}\n\n\nclass DoubleDict {\n constructor(defaultMapCtor) {\n this.defaultMapCtor = defaultMapCtor || Map;\n this.cacheMap = new this.defaultMapCtor();\n }\n\n get(a, b) {\n const d = this.cacheMap.get(a) || null;\n return d === null ? null : (d.get(b) || null);\n }\n\n set(a, b, o) {\n let d = this.cacheMap.get(a) || null;\n if (d === null) {\n d = new this.defaultMapCtor();\n this.cacheMap.put(a, d);\n }\n d.put(b, o);\n }\n}\n\nclass Hash {\n constructor() {\n this.count = 0;\n this.hash = 0;\n }\n\n update() {\n for(let i=0;i>> (32 - 15));\n k = k * 0x1B873593;\n this.count = this.count + 1;\n let hash = this.hash ^ k;\n hash = (hash << 13) | (hash >>> (32 - 13));\n hash = hash * 5 + 0xE6546B64;\n this.hash = hash;\n }\n }\n }\n\n finish() {\n let hash = this.hash ^ (this.count * 4);\n hash = hash ^ (hash >>> 16);\n hash = hash * 0x85EBCA6B;\n hash = hash ^ (hash >>> 13);\n hash = hash * 0xC2B2AE35;\n hash = hash ^ (hash >>> 16);\n return hash;\n }\n}\n\nfunction hashStuff() {\n const hash = new Hash();\n hash.update.apply(hash, arguments);\n return hash.finish();\n}\n\n\nfunction escapeWhitespace(s, escapeSpaces) {\n s = s.replace(/\\t/g, \"\\\\t\")\n .replace(/\\n/g, \"\\\\n\")\n .replace(/\\r/g, \"\\\\r\");\n if (escapeSpaces) {\n s = s.replace(/ /g, \"\\u00B7\");\n }\n return s;\n}\n\nfunction titleCase(str) {\n return str.replace(/\\w\\S*/g, function (txt) {\n return txt.charAt(0).toUpperCase() + txt.substr(1);\n });\n}\n\nfunction equalArrays(a, b) {\n if (!Array.isArray(a) || !Array.isArray(b))\n return false;\n if (a === b)\n return true;\n if (a.length !== b.length)\n return false;\n for (let i = 0; i < a.length; i++) {\n if (a[i] === b[i])\n continue;\n if (!a[i].equals || !a[i].equals(b[i]))\n return false;\n }\n return true;\n}\n\nmodule.exports = {\n Hash,\n Set,\n Map,\n BitSet,\n AltDict,\n DoubleDict,\n hashStuff,\n escapeWhitespace,\n arrayToString,\n titleCase,\n equalArrays\n}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\n/**\n * A token has properties: text, type, line, character position in the line\n * (so we can ignore tabs), token channel, index, and source from which\n * we obtained this token.\n */\nclass Token {\n\tconstructor() {\n\t\tthis.source = null;\n\t\tthis.type = null; // token type of the token\n\t\tthis.channel = null; // The parser ignores everything not on DEFAULT_CHANNEL\n\t\tthis.start = null; // optional; return -1 if not implemented.\n\t\tthis.stop = null; // optional; return -1 if not implemented.\n\t\tthis.tokenIndex = null; // from 0..n-1 of the token object in the input stream\n\t\tthis.line = null; // line=1..n of the 1st character\n\t\tthis.column = null; // beginning of the line at which it occurs, 0..n-1\n\t\tthis._text = null; // text of the token.\n\t}\n\n\tgetTokenSource() {\n\t\treturn this.source[0];\n\t}\n\n\tgetInputStream() {\n\t\treturn this.source[1];\n\t}\n\n\tget text(){\n\t\treturn this._text;\n\t}\n\n\tset text(text) {\n\t\tthis._text = text;\n\t}\n}\n\nToken.INVALID_TYPE = 0;\n\n/**\n * During lookahead operations, this \"token\" signifies we hit rule end ATN state\n * and did not follow it despite needing to.\n */\nToken.EPSILON = -2;\n\nToken.MIN_USER_TOKEN_TYPE = 1;\n\nToken.EOF = -1;\n\n/**\n * All tokens go to the parser (unless skip() is called in that rule)\n * on a particular \"channel\". The parser tunes to a particular channel\n * so that whitespace etc... can go to the parser on a \"hidden\" channel.\n */\nToken.DEFAULT_CHANNEL = 0;\n\n/**\n * Anything on different channel than DEFAULT_CHANNEL is not parsed\n * by parser.\n */\nToken.HIDDEN_CHANNEL = 1;\n\n\nclass CommonToken extends Token {\n\tconstructor(source, type, channel, start, stop) {\n\t\tsuper();\n\t\tthis.source = source !== undefined ? source : CommonToken.EMPTY_SOURCE;\n\t\tthis.type = type !== undefined ? type : null;\n\t\tthis.channel = channel !== undefined ? channel : Token.DEFAULT_CHANNEL;\n\t\tthis.start = start !== undefined ? start : -1;\n\t\tthis.stop = stop !== undefined ? stop : -1;\n\t\tthis.tokenIndex = -1;\n\t\tif (this.source[0] !== null) {\n\t\t\tthis.line = source[0].line;\n\t\t\tthis.column = source[0].column;\n\t\t} else {\n\t\t\tthis.column = -1;\n\t\t}\n\t}\n\n\t/**\n\t * Constructs a new {@link CommonToken} as a copy of another {@link Token}.\n\t *\n\t *

\n\t * If {@code oldToken} is also a {@link CommonToken} instance, the newly\n\t * constructed token will share a reference to the {@link //text} field and\n\t * the {@link Pair} stored in {@link //source}. Otherwise, {@link //text} will\n\t * be assigned the result of calling {@link //getText}, and {@link //source}\n\t * will be constructed from the result of {@link Token//getTokenSource} and\n\t * {@link Token//getInputStream}.

\n\t *\n\t * @param oldToken The token to copy.\n\t */\n\tclone() {\n\t\tconst t = new CommonToken(this.source, this.type, this.channel, this.start, this.stop);\n\t\tt.tokenIndex = this.tokenIndex;\n\t\tt.line = this.line;\n\t\tt.column = this.column;\n\t\tt.text = this.text;\n\t\treturn t;\n\t}\n\n\ttoString() {\n\t\tlet txt = this.text;\n\t\tif (txt !== null) {\n\t\t\ttxt = txt.replace(/\\n/g, \"\\\\n\").replace(/\\r/g, \"\\\\r\").replace(/\\t/g, \"\\\\t\");\n\t\t} else {\n\t\t\ttxt = \"\";\n\t\t}\n\t\treturn \"[@\" + this.tokenIndex + \",\" + this.start + \":\" + this.stop + \"='\" +\n\t\t\t\ttxt + \"',<\" + this.type + \">\" +\n\t\t\t\t(this.channel > 0 ? \",channel=\" + this.channel : \"\") + \",\" +\n\t\t\t\tthis.line + \":\" + this.column + \"]\";\n\t}\n\n\tget text(){\n\t\tif (this._text !== null) {\n\t\t\treturn this._text;\n\t\t}\n\t\tconst input = this.getInputStream();\n\t\tif (input === null) {\n\t\t\treturn null;\n\t\t}\n\t\tconst n = input.size;\n\t\tif (this.start < n && this.stop < n) {\n\t\t\treturn input.getText(this.start, this.stop);\n\t\t} else {\n\t\t\treturn \"\";\n\t\t}\n\t}\n\n\tset text(text) {\n\t\tthis._text = text;\n\t}\n}\n\n/**\n * An empty {@link Pair} which is used as the default value of\n * {@link //source} for tokens that do not have a source.\n */\nCommonToken.EMPTY_SOURCE = [ null, null ];\n\nmodule.exports = {\n\tToken,\n\tCommonToken\n}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst INITIAL_NUM_TRANSITIONS = 4;\n\n/**\n * The following images show the relation of states and\n * {@link ATNState//transitions} for various grammar constructs.\n *\n *
    \n *\n *
  • Solid edges marked with an &//0949; indicate a required\n * {@link EpsilonTransition}.
  • \n *\n *
  • Dashed edges indicate locations where any transition derived from\n * {@link Transition} might appear.
  • \n *\n *
  • Dashed nodes are place holders for either a sequence of linked\n * {@link BasicState} states or the inclusion of a block representing a nested\n * construct in one of the forms below.
  • \n *\n *
  • Nodes showing multiple outgoing alternatives with a {@code ...} support\n * any number of alternatives (one or more). Nodes without the {@code ...} only\n * support the exact number of alternatives shown in the diagram.
  • \n *\n *
\n *\n *

Basic Blocks

\n *\n *

Rule

\n *\n * \n *\n *

Block of 1 or more alternatives

\n *\n * \n *\n *

Greedy Loops

\n *\n *

Greedy Closure: {@code (...)*}

\n *\n * \n *\n *

Greedy Positive Closure: {@code (...)+}

\n *\n * \n *\n *

Greedy Optional: {@code (...)?}

\n *\n * \n *\n *

Non-Greedy Loops

\n *\n *

Non-Greedy Closure: {@code (...)*?}

\n *\n * \n *\n *

Non-Greedy Positive Closure: {@code (...)+?}

\n *\n * \n *\n *

Non-Greedy Optional: {@code (...)??}

\n *\n * \n */\nclass ATNState {\n constructor() {\n // Which ATN are we in?\n this.atn = null;\n this.stateNumber = ATNState.INVALID_STATE_NUMBER;\n this.stateType = null;\n this.ruleIndex = 0; // at runtime, we don't have Rule objects\n this.epsilonOnlyTransitions = false;\n // Track the transitions emanating from this ATN state.\n this.transitions = [];\n // Used to cache lookahead during parsing, not used during construction\n this.nextTokenWithinRule = null;\n }\n\n toString() {\n return this.stateNumber;\n }\n\n equals(other) {\n if (other instanceof ATNState) {\n return this.stateNumber===other.stateNumber;\n } else {\n return false;\n }\n }\n\n isNonGreedyExitState() {\n return false;\n }\n\n addTransition(trans, index) {\n if(index===undefined) {\n index = -1;\n }\n if (this.transitions.length===0) {\n this.epsilonOnlyTransitions = trans.isEpsilon;\n } else if(this.epsilonOnlyTransitions !== trans.isEpsilon) {\n this.epsilonOnlyTransitions = false;\n }\n if (index===-1) {\n this.transitions.push(trans);\n } else {\n this.transitions.splice(index, 1, trans);\n }\n }\n}\n\n// constants for serialization\nATNState.INVALID_TYPE = 0;\nATNState.BASIC = 1;\nATNState.RULE_START = 2;\nATNState.BLOCK_START = 3;\nATNState.PLUS_BLOCK_START = 4;\nATNState.STAR_BLOCK_START = 5;\nATNState.TOKEN_START = 6;\nATNState.RULE_STOP = 7;\nATNState.BLOCK_END = 8;\nATNState.STAR_LOOP_BACK = 9;\nATNState.STAR_LOOP_ENTRY = 10;\nATNState.PLUS_LOOP_BACK = 11;\nATNState.LOOP_END = 12;\n\nATNState.serializationNames = [\n \"INVALID\",\n \"BASIC\",\n \"RULE_START\",\n \"BLOCK_START\",\n \"PLUS_BLOCK_START\",\n \"STAR_BLOCK_START\",\n \"TOKEN_START\",\n \"RULE_STOP\",\n \"BLOCK_END\",\n \"STAR_LOOP_BACK\",\n \"STAR_LOOP_ENTRY\",\n \"PLUS_LOOP_BACK\",\n \"LOOP_END\" ];\n\nATNState.INVALID_STATE_NUMBER = -1;\n\n\nclass BasicState extends ATNState {\n constructor() {\n super();\n this.stateType = ATNState.BASIC;\n }\n}\n\nclass DecisionState extends ATNState {\n constructor() {\n super();\n this.decision = -1;\n this.nonGreedy = false;\n return this;\n }\n}\n\n/**\n * The start of a regular {@code (...)} block\n */\nclass BlockStartState extends DecisionState {\n constructor() {\n super();\n this.endState = null;\n return this;\n }\n}\n\nclass BasicBlockStartState extends BlockStartState {\n constructor() {\n super();\n this.stateType = ATNState.BLOCK_START;\n return this;\n }\n}\n\n/**\n * Terminal node of a simple {@code (a|b|c)} block\n */\nclass BlockEndState extends ATNState {\n constructor() {\n super();\n this.stateType = ATNState.BLOCK_END;\n this.startState = null;\n return this;\n }\n}\n\n/**\n * The last node in the ATN for a rule, unless that rule is the start symbol.\n * In that case, there is one transition to EOF. Later, we might encode\n * references to all calls to this rule to compute FOLLOW sets for\n * error handling\n */\nclass RuleStopState extends ATNState {\n constructor() {\n super();\n this.stateType = ATNState.RULE_STOP;\n return this;\n }\n}\n\nclass RuleStartState extends ATNState {\n constructor() {\n super();\n this.stateType = ATNState.RULE_START;\n this.stopState = null;\n this.isPrecedenceRule = false;\n return this;\n }\n}\n\n/**\n * Decision state for {@code A+} and {@code (A|B)+}. It has two transitions:\n * one to the loop back to start of the block and one to exit.\n */\nclass PlusLoopbackState extends DecisionState {\n constructor() {\n super();\n this.stateType = ATNState.PLUS_LOOP_BACK;\n return this;\n }\n}\n\n/**\n * Start of {@code (A|B|...)+} loop. Technically a decision state, but\n * we don't use for code generation; somebody might need it, so I'm defining\n * it for completeness. In reality, the {@link PlusLoopbackState} node is the\n * real decision-making note for {@code A+}\n */\nclass PlusBlockStartState extends BlockStartState {\n constructor() {\n super();\n this.stateType = ATNState.PLUS_BLOCK_START;\n this.loopBackState = null;\n return this;\n }\n}\n\n/**\n * The block that begins a closure loop\n */\nclass StarBlockStartState extends BlockStartState {\n constructor() {\n super();\n this.stateType = ATNState.STAR_BLOCK_START;\n return this;\n }\n}\n\nclass StarLoopbackState extends ATNState {\n constructor() {\n super();\n this.stateType = ATNState.STAR_LOOP_BACK;\n return this;\n }\n}\n\nclass StarLoopEntryState extends DecisionState {\n constructor() {\n super();\n this.stateType = ATNState.STAR_LOOP_ENTRY;\n this.loopBackState = null;\n // Indicates whether this state can benefit from a precedence DFA during SLL decision making.\n this.isPrecedenceDecision = null;\n return this;\n }\n}\n\n/**\n * Mark the end of a * or + loop\n */\nclass LoopEndState extends ATNState {\n constructor() {\n super();\n this.stateType = ATNState.LOOP_END;\n this.loopBackState = null;\n return this;\n }\n}\n\n/**\n * The Tokens rule start state linking to each lexer rule start state\n */\nclass TokensStartState extends DecisionState {\n constructor() {\n super();\n this.stateType = ATNState.TOKEN_START;\n return this;\n }\n}\n\nmodule.exports = {\n ATNState,\n BasicState,\n DecisionState,\n BlockStartState,\n BlockEndState,\n LoopEndState,\n RuleStartState,\n RuleStopState,\n TokensStartState,\n PlusLoopbackState,\n StarLoopbackState,\n StarLoopEntryState,\n PlusBlockStartState,\n StarBlockStartState,\n BasicBlockStartState\n}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst { Set, Hash, equalArrays } = require('./../Utils');\n\n/**\n * A tree structure used to record the semantic context in which\n * an ATN configuration is valid. It's either a single predicate,\n * a conjunction {@code p1&&p2}, or a sum of products {@code p1||p2}.\n *\n *

I have scoped the {@link AND}, {@link OR}, and {@link Predicate} subclasses of\n * {@link SemanticContext} within the scope of this outer class.

\n */\nclass SemanticContext {\n\n\thashCode() {\n\t\tconst hash = new Hash();\n\t\tthis.updateHashCode(hash);\n\t\treturn hash.finish();\n\t}\n\n\t/**\n\t * For context independent predicates, we evaluate them without a local\n\t * context (i.e., null context). That way, we can evaluate them without\n\t * having to create proper rule-specific context during prediction (as\n\t * opposed to the parser, which creates them naturally). In a practical\n\t * sense, this avoids a cast exception from RuleContext to myruleContext.\n\t *\n\t *

For context dependent predicates, we must pass in a local context so that\n\t * references such as $arg evaluate properly as _localctx.arg. We only\n\t * capture context dependent predicates in the context in which we begin\n\t * prediction, so we passed in the outer context here in case of context\n\t * dependent predicate evaluation.

\n\t */\n\tevaluate(parser, outerContext) {}\n\n\t/**\n\t * Evaluate the precedence predicates for the context and reduce the result.\n\t *\n\t * @param parser The parser instance.\n\t * @param outerContext The current parser context object.\n\t * @return The simplified semantic context after precedence predicates are\n\t * evaluated, which will be one of the following values.\n\t *
    \n\t *
  • {@link //NONE}: if the predicate simplifies to {@code true} after\n\t * precedence predicates are evaluated.
  • \n\t *
  • {@code null}: if the predicate simplifies to {@code false} after\n\t * precedence predicates are evaluated.
  • \n\t *
  • {@code this}: if the semantic context is not changed as a result of\n\t * precedence predicate evaluation.
  • \n\t *
  • A non-{@code null} {@link SemanticContext}: the new simplified\n\t * semantic context after precedence predicates are evaluated.
  • \n\t *
\n\t */\n\tevalPrecedence(parser, outerContext) {\n\t\treturn this;\n\t}\n\n\tstatic andContext(a, b) {\n\t\tif (a === null || a === SemanticContext.NONE) {\n\t\t\treturn b;\n\t\t}\n\t\tif (b === null || b === SemanticContext.NONE) {\n\t\t\treturn a;\n\t\t}\n\t\tconst result = new AND(a, b);\n\t\tif (result.opnds.length === 1) {\n\t\t\treturn result.opnds[0];\n\t\t} else {\n\t\t\treturn result;\n\t\t}\n\t}\n\n\tstatic orContext(a, b) {\n\t\tif (a === null) {\n\t\t\treturn b;\n\t\t}\n\t\tif (b === null) {\n\t\t\treturn a;\n\t\t}\n\t\tif (a === SemanticContext.NONE || b === SemanticContext.NONE) {\n\t\t\treturn SemanticContext.NONE;\n\t\t}\n\t\tconst result = new OR(a, b);\n\t\tif (result.opnds.length === 1) {\n\t\t\treturn result.opnds[0];\n\t\t} else {\n\t\t\treturn result;\n\t\t}\n\t}\n}\n\n\nclass Predicate extends SemanticContext {\n\n\tconstructor(ruleIndex, predIndex, isCtxDependent) {\n\t\tsuper();\n\t\tthis.ruleIndex = ruleIndex === undefined ? -1 : ruleIndex;\n\t\tthis.predIndex = predIndex === undefined ? -1 : predIndex;\n\t\tthis.isCtxDependent = isCtxDependent === undefined ? false : isCtxDependent; // e.g., $i ref in pred\n\t}\n\n\tevaluate(parser, outerContext) {\n\t\tconst localctx = this.isCtxDependent ? outerContext : null;\n\t\treturn parser.sempred(localctx, this.ruleIndex, this.predIndex);\n\t}\n\n\tupdateHashCode(hash) {\n\t\thash.update(this.ruleIndex, this.predIndex, this.isCtxDependent);\n\t}\n\n\tequals(other) {\n\t\tif (this === other) {\n\t\t\treturn true;\n\t\t} else if (!(other instanceof Predicate)) {\n\t\t\treturn false;\n\t\t} else {\n\t\t\treturn this.ruleIndex === other.ruleIndex &&\n\t\t\t\t\tthis.predIndex === other.predIndex &&\n\t\t\t\t\tthis.isCtxDependent === other.isCtxDependent;\n\t\t}\n\t}\n\n\ttoString() {\n\t\treturn \"{\" + this.ruleIndex + \":\" + this.predIndex + \"}?\";\n\t}\n}\n\n/**\n * The default {@link SemanticContext}, which is semantically equivalent to\n * a predicate of the form {@code {true}?}\n */\nSemanticContext.NONE = new Predicate();\n\n\nclass PrecedencePredicate extends SemanticContext {\n\n\tconstructor(precedence) {\n\t\tsuper();\n\t\tthis.precedence = precedence === undefined ? 0 : precedence;\n\t}\n\n\tevaluate(parser, outerContext) {\n\t\treturn parser.precpred(outerContext, this.precedence);\n\t}\n\n\tevalPrecedence(parser, outerContext) {\n\t\tif (parser.precpred(outerContext, this.precedence)) {\n\t\t\treturn SemanticContext.NONE;\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\tcompareTo(other) {\n\t\treturn this.precedence - other.precedence;\n\t}\n\n\tupdateHashCode(hash) {\n\t\thash.update(this.precedence);\n\t}\n\n\tequals(other) {\n\t\tif (this === other) {\n\t\t\treturn true;\n\t\t} else if (!(other instanceof PrecedencePredicate)) {\n\t\t\treturn false;\n\t\t} else {\n\t\t\treturn this.precedence === other.precedence;\n\t\t}\n\t}\n\n\ttoString() {\n\t\treturn \"{\" + this.precedence + \">=prec}?\";\n\t}\n\n\tstatic filterPrecedencePredicates(set) {\n\t\tconst result = [];\n\t\tset.values().map( function(context) {\n\t\t\tif (context instanceof PrecedencePredicate) {\n\t\t\t\tresult.push(context);\n\t\t\t}\n\t\t});\n\t\treturn result;\n\t}\n}\n\nclass AND extends SemanticContext {\n\t/**\n\t * A semantic context which is true whenever none of the contained contexts\n\t * is false\n\t */\n\tconstructor(a, b) {\n\t\tsuper();\n\t\tconst operands = new Set();\n\t\tif (a instanceof AND) {\n\t\t\ta.opnds.map(function(o) {\n\t\t\t\toperands.add(o);\n\t\t\t});\n\t\t} else {\n\t\t\toperands.add(a);\n\t\t}\n\t\tif (b instanceof AND) {\n\t\t\tb.opnds.map(function(o) {\n\t\t\t\toperands.add(o);\n\t\t\t});\n\t\t} else {\n\t\t\toperands.add(b);\n\t\t}\n\t\tconst precedencePredicates = PrecedencePredicate.filterPrecedencePredicates(operands);\n\t\tif (precedencePredicates.length > 0) {\n\t\t\t// interested in the transition with the lowest precedence\n\t\t\tlet reduced = null;\n\t\t\tprecedencePredicates.map( function(p) {\n\t\t\t\tif(reduced===null || p.precedence\n\t * The evaluation of predicates by this context is short-circuiting, but\n\t * unordered.

\n\t */\n\tevaluate(parser, outerContext) {\n\t\tfor (let i = 0; i < this.opnds.length; i++) {\n\t\t\tif (!this.opnds[i].evaluate(parser, outerContext)) {\n\t\t\t\treturn false;\n\t\t\t}\n\t\t}\n\t\treturn true;\n\t}\n\n\tevalPrecedence(parser, outerContext) {\n\t\tlet differs = false;\n\t\tconst operands = [];\n\t\tfor (let i = 0; i < this.opnds.length; i++) {\n\t\t\tconst context = this.opnds[i];\n\t\t\tconst evaluated = context.evalPrecedence(parser, outerContext);\n\t\t\tdiffers |= (evaluated !== context);\n\t\t\tif (evaluated === null) {\n\t\t\t\t// The AND context is false if any element is false\n\t\t\t\treturn null;\n\t\t\t} else if (evaluated !== SemanticContext.NONE) {\n\t\t\t\t// Reduce the result by skipping true elements\n\t\t\t\toperands.push(evaluated);\n\t\t\t}\n\t\t}\n\t\tif (!differs) {\n\t\t\treturn this;\n\t\t}\n\t\tif (operands.length === 0) {\n\t\t\t// all elements were true, so the AND context is true\n\t\t\treturn SemanticContext.NONE;\n\t\t}\n\t\tlet result = null;\n\t\toperands.map(function(o) {\n\t\t\tresult = result === null ? o : SemanticContext.andContext(result, o);\n\t\t});\n\t\treturn result;\n\t}\n\n\ttoString() {\n\t\tconst s = this.opnds.map(o => o.toString());\n\t\treturn (s.length > 3 ? s.slice(3) : s).join(\"&&\");\n\t}\n}\n\n\nclass OR extends SemanticContext {\n\t/**\n\t * A semantic context which is true whenever at least one of the contained\n\t * contexts is true\n\t */\n\tconstructor(a, b) {\n\t\tsuper();\n\t\tconst operands = new Set();\n\t\tif (a instanceof OR) {\n\t\t\ta.opnds.map(function(o) {\n\t\t\t\toperands.add(o);\n\t\t\t});\n\t\t} else {\n\t\t\toperands.add(a);\n\t\t}\n\t\tif (b instanceof OR) {\n\t\t\tb.opnds.map(function(o) {\n\t\t\t\toperands.add(o);\n\t\t\t});\n\t\t} else {\n\t\t\toperands.add(b);\n\t\t}\n\n\t\tconst precedencePredicates = PrecedencePredicate.filterPrecedencePredicates(operands);\n\t\tif (precedencePredicates.length > 0) {\n\t\t\t// interested in the transition with the highest precedence\n\t\t\tconst s = precedencePredicates.sort(function(a, b) {\n\t\t\t\treturn a.compareTo(b);\n\t\t\t});\n\t\t\tconst reduced = s[s.length-1];\n\t\t\toperands.add(reduced);\n\t\t}\n\t\tthis.opnds = Array.from(operands.values());\n\t}\n\n\tequals(other) {\n\t\tif (this === other) {\n\t\t\treturn true;\n\t\t} else if (!(other instanceof OR)) {\n\t\t\treturn false;\n\t\t} else {\n\t\t\treturn equalArrays(this.opnds, other.opnds);\n\t\t}\n\t}\n\n\tupdateHashCode(hash) {\n\t\thash.update(this.opnds, \"OR\");\n\t}\n\n\t/**\n\t *

\n\t * The evaluation of predicates by this context is short-circuiting, but\n\t * unordered.

\n\t */\n\tevaluate(parser, outerContext) {\n\t\tfor (let i = 0; i < this.opnds.length; i++) {\n\t\t\tif (this.opnds[i].evaluate(parser, outerContext)) {\n\t\t\t\treturn true;\n\t\t\t}\n\t\t}\n\t\treturn false;\n\t}\n\n\tevalPrecedence(parser, outerContext) {\n\t\tlet differs = false;\n\t\tconst operands = [];\n\t\tfor (let i = 0; i < this.opnds.length; i++) {\n\t\t\tconst context = this.opnds[i];\n\t\t\tconst evaluated = context.evalPrecedence(parser, outerContext);\n\t\t\tdiffers |= (evaluated !== context);\n\t\t\tif (evaluated === SemanticContext.NONE) {\n\t\t\t\t// The OR context is true if any element is true\n\t\t\t\treturn SemanticContext.NONE;\n\t\t\t} else if (evaluated !== null) {\n\t\t\t\t// Reduce the result by skipping false elements\n\t\t\t\toperands.push(evaluated);\n\t\t\t}\n\t\t}\n\t\tif (!differs) {\n\t\t\treturn this;\n\t\t}\n\t\tif (operands.length === 0) {\n\t\t\t// all elements were false, so the OR context is false\n\t\t\treturn null;\n\t\t}\n\t\tconst result = null;\n\t\toperands.map(function(o) {\n\t\t\treturn result === null ? o : SemanticContext.orContext(result, o);\n\t\t});\n\t\treturn result;\n\t}\n\n\ttoString() {\n\t\tconst s = this.opnds.map(o => o.toString());\n\t\treturn (s.length > 3 ? s.slice(3) : s).join(\"||\");\n\t}\n}\n\nmodule.exports = {\n\tSemanticContext,\n\tPrecedencePredicate,\n\tPredicate\n}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {DecisionState} = require('./ATNState');\nconst {SemanticContext} = require('./SemanticContext');\nconst {Hash} = require(\"../Utils\");\n\n\nfunction checkParams(params, isCfg) {\n\tif(params===null) {\n\t\tconst result = { state:null, alt:null, context:null, semanticContext:null };\n\t\tif(isCfg) {\n\t\t\tresult.reachesIntoOuterContext = 0;\n\t\t}\n\t\treturn result;\n\t} else {\n\t\tconst props = {};\n\t\tprops.state = params.state || null;\n\t\tprops.alt = (params.alt === undefined) ? null : params.alt;\n\t\tprops.context = params.context || null;\n\t\tprops.semanticContext = params.semanticContext || null;\n\t\tif(isCfg) {\n\t\t\tprops.reachesIntoOuterContext = params.reachesIntoOuterContext || 0;\n\t\t\tprops.precedenceFilterSuppressed = params.precedenceFilterSuppressed || false;\n\t\t}\n\t\treturn props;\n\t}\n}\n\nclass ATNConfig {\n /**\n * @param {Object} params A tuple: (ATN state, predicted alt, syntactic, semantic context).\n * The syntactic context is a graph-structured stack node whose\n * path(s) to the root is the rule invocation(s)\n * chain used to arrive at the state. The semantic context is\n * the tree of semantic predicates encountered before reaching\n * an ATN state\n */\n constructor(params, config) {\n this.checkContext(params, config);\n params = checkParams(params);\n config = checkParams(config, true);\n // The ATN state associated with this configuration///\n this.state = params.state!==null ? params.state : config.state;\n // What alt (or lexer rule) is predicted by this configuration///\n this.alt = params.alt!==null ? params.alt : config.alt;\n /**\n * The stack of invoking states leading to the rule/states associated\n * with this config. We track only those contexts pushed during\n * execution of the ATN simulator\n */\n this.context = params.context!==null ? params.context : config.context;\n this.semanticContext = params.semanticContext!==null ? params.semanticContext :\n (config.semanticContext!==null ? config.semanticContext : SemanticContext.NONE);\n // TODO: make it a boolean then\n /**\n * We cannot execute predicates dependent upon local context unless\n * we know for sure we are in the correct context. Because there is\n * no way to do this efficiently, we simply cannot evaluate\n * dependent predicates unless we are in the rule that initially\n * invokes the ATN simulator.\n * closure() tracks the depth of how far we dip into the\n * outer context: depth > 0. Note that it may not be totally\n * accurate depth since I don't ever decrement\n */\n this.reachesIntoOuterContext = config.reachesIntoOuterContext;\n this.precedenceFilterSuppressed = config.precedenceFilterSuppressed;\n }\n\n checkContext(params, config) {\n if((params.context===null || params.context===undefined) &&\n (config===null || config.context===null || config.context===undefined)) {\n this.context = null;\n }\n }\n\n hashCode() {\n const hash = new Hash();\n this.updateHashCode(hash);\n return hash.finish();\n }\n\n updateHashCode(hash) {\n hash.update(this.state.stateNumber, this.alt, this.context, this.semanticContext);\n }\n\n /**\n * An ATN configuration is equal to another if both have\n * the same state, they predict the same alternative, and\n * syntactic/semantic contexts are the same\n */\n equals(other) {\n if (this === other) {\n return true;\n } else if (! (other instanceof ATNConfig)) {\n return false;\n } else {\n return this.state.stateNumber===other.state.stateNumber &&\n this.alt===other.alt &&\n (this.context===null ? other.context===null : this.context.equals(other.context)) &&\n this.semanticContext.equals(other.semanticContext) &&\n this.precedenceFilterSuppressed===other.precedenceFilterSuppressed;\n }\n }\n\n hashCodeForConfigSet() {\n const hash = new Hash();\n hash.update(this.state.stateNumber, this.alt, this.semanticContext);\n return hash.finish();\n }\n\n equalsForConfigSet(other) {\n if (this === other) {\n return true;\n } else if (! (other instanceof ATNConfig)) {\n return false;\n } else {\n return this.state.stateNumber===other.state.stateNumber &&\n this.alt===other.alt &&\n this.semanticContext.equals(other.semanticContext);\n }\n }\n\n toString() {\n return \"(\" + this.state + \",\" + this.alt +\n (this.context!==null ? \",[\" + this.context.toString() + \"]\" : \"\") +\n (this.semanticContext !== SemanticContext.NONE ?\n (\",\" + this.semanticContext.toString())\n : \"\") +\n (this.reachesIntoOuterContext>0 ?\n (\",up=\" + this.reachesIntoOuterContext)\n : \"\") + \")\";\n }\n}\n\n\nclass LexerATNConfig extends ATNConfig {\n constructor(params, config) {\n super(params, config);\n\n // This is the backing field for {@link //getLexerActionExecutor}.\n const lexerActionExecutor = params.lexerActionExecutor || null;\n this.lexerActionExecutor = lexerActionExecutor || (config!==null ? config.lexerActionExecutor : null);\n this.passedThroughNonGreedyDecision = config!==null ? this.checkNonGreedyDecision(config, this.state) : false;\n this.hashCodeForConfigSet = LexerATNConfig.prototype.hashCode;\n this.equalsForConfigSet = LexerATNConfig.prototype.equals;\n return this;\n }\n\n updateHashCode(hash) {\n hash.update(this.state.stateNumber, this.alt, this.context, this.semanticContext, this.passedThroughNonGreedyDecision, this.lexerActionExecutor);\n }\n\n equals(other) {\n return this === other ||\n (other instanceof LexerATNConfig &&\n this.passedThroughNonGreedyDecision === other.passedThroughNonGreedyDecision &&\n (this.lexerActionExecutor ? this.lexerActionExecutor.equals(other.lexerActionExecutor) : !other.lexerActionExecutor) &&\n super.equals(other));\n }\n\n checkNonGreedyDecision(source, target) {\n return source.passedThroughNonGreedyDecision ||\n (target instanceof DecisionState) && target.nonGreedy;\n }\n}\n\n\nmodule.exports.ATNConfig = ATNConfig;\nmodule.exports.LexerATNConfig = LexerATNConfig;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Token} = require('./Token');\n\n/* stop is not included! */\nclass Interval {\n\tconstructor(start, stop) {\n\t\tthis.start = start;\n\t\tthis.stop = stop;\n\t}\n\n\tcontains(item) {\n\t\treturn item >= this.start && item < this.stop;\n\t}\n\n\ttoString() {\n\t\tif(this.start===this.stop-1) {\n\t\t\treturn this.start.toString();\n\t\t} else {\n\t\t\treturn this.start.toString() + \"..\" + (this.stop-1).toString();\n\t\t}\n\t}\n\n\tget length(){\n\t\treturn this.stop - this.start;\n\t}\n}\n\n\nclass IntervalSet {\n\tconstructor() {\n\t\tthis.intervals = null;\n\t\tthis.readOnly = false;\n\t}\n\n\tfirst(v) {\n\t\tif (this.intervals === null || this.intervals.length===0) {\n\t\t\treturn Token.INVALID_TYPE;\n\t\t} else {\n\t\t\treturn this.intervals[0].start;\n\t\t}\n\t}\n\n\taddOne(v) {\n\t\tthis.addInterval(new Interval(v, v + 1));\n\t}\n\n\taddRange(l, h) {\n\t\tthis.addInterval(new Interval(l, h + 1));\n\t}\n\n\taddInterval(toAdd) {\n\t\tif (this.intervals === null) {\n\t\t\tthis.intervals = [];\n\t\t\tthis.intervals.push(toAdd);\n\t\t} else {\n\t\t\t// find insert pos\n\t\t\tfor (let pos = 0; pos < this.intervals.length; pos++) {\n\t\t\t\tconst existing = this.intervals[pos];\n\t\t\t\t// distinct range -> insert\n\t\t\t\tif (toAdd.stop < existing.start) {\n\t\t\t\t\tthis.intervals.splice(pos, 0, toAdd);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t// contiguous range -> adjust\n\t\t\t\telse if (toAdd.stop === existing.start) {\n\t\t\t\t\tthis.intervals[pos].start = toAdd.start;\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t\t// overlapping range -> adjust and reduce\n\t\t\t\telse if (toAdd.start <= existing.stop) {\n\t\t\t\t\tthis.intervals[pos] = new Interval(Math.min(existing.start, toAdd.start), Math.max(existing.stop, toAdd.stop));\n\t\t\t\t\tthis.reduce(pos);\n\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\t\t\t// greater than any existing\n\t\t\tthis.intervals.push(toAdd);\n\t\t}\n\t}\n\n\taddSet(other) {\n\t\tif (other.intervals !== null) {\n\t\t\tother.intervals.forEach( toAdd => this.addInterval(toAdd), this);\n\t\t}\n\t\treturn this;\n\t}\n\n\treduce(pos) {\n\t\t// only need to reduce if pos is not the last\n\t\tif (pos < this.intervals.length - 1) {\n\t\t\tconst current = this.intervals[pos];\n\t\t\tconst next = this.intervals[pos + 1];\n\t\t\t// if next contained in current\n\t\t\tif (current.stop >= next.stop) {\n\t\t\t\tthis.intervals.splice(pos + 1, 1);\n\t\t\t\tthis.reduce(pos);\n\t\t\t} else if (current.stop >= next.start) {\n\t\t\t\tthis.intervals[pos] = new Interval(current.start, next.stop);\n\t\t\t\tthis.intervals.splice(pos + 1, 1);\n\t\t\t}\n\t\t}\n\t}\n\n\tcomplement(start, stop) {\n\t\tconst result = new IntervalSet();\n\t\tresult.addInterval(new Interval(start,stop+1));\n\t\tif(this.intervals !== null)\n\t\t\tthis.intervals.forEach(toRemove => result.removeRange(toRemove));\n\t\treturn result;\n\t}\n\n\tcontains(item) {\n\t\tif (this.intervals === null) {\n\t\t\treturn false;\n\t\t} else {\n\t\t\tfor (let k = 0; k < this.intervals.length; k++) {\n\t\t\t\tif(this.intervals[k].contains(item)) {\n\t\t\t\t\treturn true;\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn false;\n\t\t}\n\t}\n\n\tremoveRange(toRemove) {\n\t\tif(toRemove.start===toRemove.stop-1) {\n\t\t\tthis.removeOne(toRemove.start);\n\t\t} else if (this.intervals !== null) {\n\t\t\tlet pos = 0;\n\t\t\tfor(let n=0; nexisting.start && toRemove.stop=existing.stop) {\n\t\t\t\t\tthis.intervals.splice(pos, 1);\n\t\t\t\t\tpos = pos - 1; // need another pass\n\t\t\t\t}\n\t\t\t\t// check for lower boundary\n\t\t\t\telse if(toRemove.start\");\n\t\t\t\t} else {\n\t\t\t\t\tnames.push(\"'\" + String.fromCharCode(existing.start) + \"'\");\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tnames.push(\"'\" + String.fromCharCode(existing.start) + \"'..'\" + String.fromCharCode(existing.stop-1) + \"'\");\n\t\t\t}\n\t\t}\n\t\tif (names.length > 1) {\n\t\t\treturn \"{\" + names.join(\", \") + \"}\";\n\t\t} else {\n\t\t\treturn names[0];\n\t\t}\n\t}\n\n\ttoIndexString() {\n\t\tconst names = [];\n\t\tfor (let i = 0; i < this.intervals.length; i++) {\n\t\t\tconst existing = this.intervals[i];\n\t\t\tif(existing.stop===existing.start+1) {\n\t\t\t\tif ( existing.start===Token.EOF ) {\n\t\t\t\t\tnames.push(\"\");\n\t\t\t\t} else {\n\t\t\t\t\tnames.push(existing.start.toString());\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tnames.push(existing.start.toString() + \"..\" + (existing.stop-1).toString());\n\t\t\t}\n\t\t}\n\t\tif (names.length > 1) {\n\t\t\treturn \"{\" + names.join(\", \") + \"}\";\n\t\t} else {\n\t\t\treturn names[0];\n\t\t}\n\t}\n\n\ttoTokenString(literalNames, symbolicNames) {\n\t\tconst names = [];\n\t\tfor (let i = 0; i < this.intervals.length; i++) {\n\t\t\tconst existing = this.intervals[i];\n\t\t\tfor (let j = existing.start; j < existing.stop; j++) {\n\t\t\t\tnames.push(this.elementName(literalNames, symbolicNames, j));\n\t\t\t}\n\t\t}\n\t\tif (names.length > 1) {\n\t\t\treturn \"{\" + names.join(\", \") + \"}\";\n\t\t} else {\n\t\t\treturn names[0];\n\t\t}\n\t}\n\n\telementName(literalNames, symbolicNames, token) {\n\t\tif (token === Token.EOF) {\n\t\t\treturn \"\";\n\t\t} else if (token === Token.EPSILON) {\n\t\t\treturn \"\";\n\t\t} else {\n\t\t\treturn literalNames[token] || symbolicNames[token];\n\t\t}\n\t}\n\n\tget length(){\n\t\treturn this.intervals.map( interval => interval.length ).reduce((acc, val) => acc + val);\n\t}\n}\n\nmodule.exports = {\n\tInterval,\n\tIntervalSet\n};\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Token} = require('./../Token');\nconst {IntervalSet} = require('./../IntervalSet');\nconst {Predicate, PrecedencePredicate} = require('./SemanticContext');\n\n/**\n * An ATN transition between any two ATN states. Subclasses define\n * atom, set, epsilon, action, predicate, rule transitions.\n *\n *

This is a one way link. It emanates from a state (usually via a list of\n * transitions) and has a target state.

\n *\n *

Since we never have to change the ATN transitions once we construct it,\n * we can fix these transitions as specific classes. The DFA transitions\n * on the other hand need to update the labels as it adds transitions to\n * the states. We'll use the term Edge for the DFA to distinguish them from\n * ATN transitions.

\n */\nclass Transition {\n constructor(target) {\n // The target of this transition.\n if (target===undefined || target===null) {\n throw \"target cannot be null.\";\n }\n this.target = target;\n // Are we epsilon, action, sempred?\n this.isEpsilon = false;\n this.label = null;\n }\n}\n\n// constants for serialization\n\nTransition.EPSILON = 1;\nTransition.RANGE = 2;\nTransition.RULE = 3;\n// e.g., {isType(input.LT(1))}?\nTransition.PREDICATE = 4;\nTransition.ATOM = 5;\nTransition.ACTION = 6;\n// ~(A|B) or ~atom, wildcard, which convert to next 2\nTransition.SET = 7;\nTransition.NOT_SET = 8;\nTransition.WILDCARD = 9;\nTransition.PRECEDENCE = 10;\n\nTransition.serializationNames = [\n \"INVALID\",\n \"EPSILON\",\n \"RANGE\",\n \"RULE\",\n \"PREDICATE\",\n \"ATOM\",\n \"ACTION\",\n \"SET\",\n \"NOT_SET\",\n \"WILDCARD\",\n \"PRECEDENCE\"\n ];\n\nTransition.serializationTypes = {\n EpsilonTransition: Transition.EPSILON,\n RangeTransition: Transition.RANGE,\n RuleTransition: Transition.RULE,\n PredicateTransition: Transition.PREDICATE,\n AtomTransition: Transition.ATOM,\n ActionTransition: Transition.ACTION,\n SetTransition: Transition.SET,\n NotSetTransition: Transition.NOT_SET,\n WildcardTransition: Transition.WILDCARD,\n PrecedencePredicateTransition: Transition.PRECEDENCE\n };\n\n\n// TODO: make all transitions sets? no, should remove set edges\n\nclass AtomTransition extends Transition {\n constructor(target, label) {\n super(target);\n // The token type or character value; or, signifies special label.\n this.label_ = label;\n this.label = this.makeLabel();\n this.serializationType = Transition.ATOM;\n }\n\n makeLabel() {\n const s = new IntervalSet();\n s.addOne(this.label_);\n return s;\n }\n\n matches(symbol, minVocabSymbol, maxVocabSymbol) {\n return this.label_ === symbol;\n }\n\n toString() {\n return this.label_;\n }\n}\n\n\nclass RuleTransition extends Transition {\n constructor(ruleStart, ruleIndex, precedence, followState) {\n super(ruleStart);\n // ptr to the rule definition object for this rule ref\n this.ruleIndex = ruleIndex;\n this.precedence = precedence;\n // what node to begin computations following ref to rule\n this.followState = followState;\n this.serializationType = Transition.RULE;\n this.isEpsilon = true;\n }\n\n matches(symbol, minVocabSymbol, maxVocabSymbol) {\n return false;\n }\n}\n\nclass EpsilonTransition extends Transition {\n constructor(target, outermostPrecedenceReturn) {\n super(target);\n this.serializationType = Transition.EPSILON;\n this.isEpsilon = true;\n this.outermostPrecedenceReturn = outermostPrecedenceReturn;\n }\n\n matches(symbol, minVocabSymbol, maxVocabSymbol) {\n return false;\n }\n\n toString() {\n return \"epsilon\";\n }\n}\n\n\nclass RangeTransition extends Transition {\n constructor(target, start, stop) {\n super(target);\n this.serializationType = Transition.RANGE;\n this.start = start;\n this.stop = stop;\n this.label = this.makeLabel();\n }\n\n makeLabel() {\n const s = new IntervalSet();\n s.addRange(this.start, this.stop);\n return s;\n }\n\n matches(symbol, minVocabSymbol, maxVocabSymbol) {\n return symbol >= this.start && symbol <= this.stop;\n }\n\n toString() {\n return \"'\" + String.fromCharCode(this.start) + \"'..'\" + String.fromCharCode(this.stop) + \"'\";\n }\n}\n\n\nclass AbstractPredicateTransition extends Transition {\n constructor(target) {\n super(target);\n }\n}\n\nclass PredicateTransition extends AbstractPredicateTransition {\n constructor(target, ruleIndex, predIndex, isCtxDependent) {\n super(target);\n this.serializationType = Transition.PREDICATE;\n this.ruleIndex = ruleIndex;\n this.predIndex = predIndex;\n this.isCtxDependent = isCtxDependent; // e.g., $i ref in pred\n this.isEpsilon = true;\n }\n\n matches(symbol, minVocabSymbol, maxVocabSymbol) {\n return false;\n }\n\n getPredicate() {\n return new Predicate(this.ruleIndex, this.predIndex, this.isCtxDependent);\n }\n\n toString() {\n return \"pred_\" + this.ruleIndex + \":\" + this.predIndex;\n }\n}\n\n\nclass ActionTransition extends Transition {\n constructor(target, ruleIndex, actionIndex, isCtxDependent) {\n super(target);\n this.serializationType = Transition.ACTION;\n this.ruleIndex = ruleIndex;\n this.actionIndex = actionIndex===undefined ? -1 : actionIndex;\n this.isCtxDependent = isCtxDependent===undefined ? false : isCtxDependent; // e.g., $i ref in pred\n this.isEpsilon = true;\n }\n\n matches(symbol, minVocabSymbol, maxVocabSymbol) {\n return false;\n }\n\n toString() {\n return \"action_\" + this.ruleIndex + \":\" + this.actionIndex;\n }\n}\n\n\n// A transition containing a set of values.\nclass SetTransition extends Transition {\n constructor(target, set) {\n super(target);\n this.serializationType = Transition.SET;\n if (set !==undefined && set !==null) {\n this.label = set;\n } else {\n this.label = new IntervalSet();\n this.label.addOne(Token.INVALID_TYPE);\n }\n }\n\n matches(symbol, minVocabSymbol, maxVocabSymbol) {\n return this.label.contains(symbol);\n }\n\n toString() {\n return this.label.toString();\n }\n}\n\nclass NotSetTransition extends SetTransition {\n constructor(target, set) {\n super(target, set);\n this.serializationType = Transition.NOT_SET;\n }\n\n matches(symbol, minVocabSymbol, maxVocabSymbol) {\n return symbol >= minVocabSymbol && symbol <= maxVocabSymbol &&\n !super.matches(symbol, minVocabSymbol, maxVocabSymbol);\n }\n\n toString() {\n return '~' + super.toString();\n }\n}\n\nclass WildcardTransition extends Transition {\n constructor(target) {\n super(target);\n this.serializationType = Transition.WILDCARD;\n }\n\n matches(symbol, minVocabSymbol, maxVocabSymbol) {\n return symbol >= minVocabSymbol && symbol <= maxVocabSymbol;\n }\n\n toString() {\n return \".\";\n }\n}\n\nclass PrecedencePredicateTransition extends AbstractPredicateTransition {\n constructor(target, precedence) {\n super(target);\n this.serializationType = Transition.PRECEDENCE;\n this.precedence = precedence;\n this.isEpsilon = true;\n }\n\n matches(symbol, minVocabSymbol, maxVocabSymbol) {\n return false;\n }\n\n getPredicate() {\n return new PrecedencePredicate(this.precedence);\n }\n\n toString() {\n return this.precedence + \" >= _p\";\n }\n}\n\nmodule.exports = {\n Transition,\n AtomTransition,\n SetTransition,\n NotSetTransition,\n RuleTransition,\n ActionTransition,\n EpsilonTransition,\n RangeTransition,\n WildcardTransition,\n PredicateTransition,\n PrecedencePredicateTransition,\n AbstractPredicateTransition\n}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Token} = require('./../Token');\nconst {Interval} = require('./../IntervalSet');\nconst INVALID_INTERVAL = new Interval(-1, -2);\n\n/**\n * The basic notion of a tree has a parent, a payload, and a list of children.\n * It is the most abstract interface for all the trees used by ANTLR.\n */\nclass Tree {}\n\nclass SyntaxTree extends Tree {\n\tconstructor() {\n\t\tsuper();\n\t}\n}\n\nclass ParseTree extends SyntaxTree {\n\tconstructor() {\n\t\tsuper();\n\t}\n}\n\nclass RuleNode extends ParseTree {\n\tconstructor() {\n\t\tsuper();\n\t}\n\n\tgetRuleContext(){\n\t\tthrow new Error(\"missing interface implementation\")\n\t}\n}\n\nclass TerminalNode extends ParseTree {\n\tconstructor() {\n\t\tsuper();\n\t}\n}\n\nclass ErrorNode extends TerminalNode {\n\tconstructor() {\n\t\tsuper();\n\t}\n}\n\nclass ParseTreeVisitor {\n\tvisit(ctx) {\n\t\t if (Array.isArray(ctx)) {\n\t\t\treturn ctx.map(function(child) {\n\t\t\t\treturn child.accept(this);\n\t\t\t}, this);\n\t\t} else {\n\t\t\treturn ctx.accept(this);\n\t\t}\n\t}\n\n\tvisitChildren(ctx) {\n\t\tif (ctx.children) {\n\t\t\treturn this.visit(ctx.children);\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\tvisitTerminal(node) {\n\t}\n\n\tvisitErrorNode(node) {\n\t}\n}\n\nclass ParseTreeListener {\n\tvisitTerminal(node) {\n\t}\n\n\tvisitErrorNode(node) {\n\t}\n\n\tenterEveryRule(node) {\n\t}\n\n\texitEveryRule(node) {\n\t}\n}\n\nclass TerminalNodeImpl extends TerminalNode {\n\tconstructor(symbol) {\n\t\tsuper();\n\t\tthis.parentCtx = null;\n\t\tthis.symbol = symbol;\n\t}\n\n\tgetChild(i) {\n\t\treturn null;\n\t}\n\n\tgetSymbol() {\n\t\treturn this.symbol;\n\t}\n\n\tgetParent() {\n\t\treturn this.parentCtx;\n\t}\n\n\tgetPayload() {\n\t\treturn this.symbol;\n\t}\n\n\tgetSourceInterval() {\n\t\tif (this.symbol === null) {\n\t\t\treturn INVALID_INTERVAL;\n\t\t}\n\t\tconst tokenIndex = this.symbol.tokenIndex;\n\t\treturn new Interval(tokenIndex, tokenIndex);\n\t}\n\n\tgetChildCount() {\n\t\treturn 0;\n\t}\n\n\taccept(visitor) {\n\t\treturn visitor.visitTerminal(this);\n\t}\n\n\tgetText() {\n\t\treturn this.symbol.text;\n\t}\n\n\ttoString() {\n\t\tif (this.symbol.type === Token.EOF) {\n\t\t\treturn \"\";\n\t\t} else {\n\t\t\treturn this.symbol.text;\n\t\t}\n\t}\n}\n\n\n/**\n * Represents a token that was consumed during resynchronization\n * rather than during a valid match operation. For example,\n * we will create this kind of a node during single token insertion\n * and deletion as well as during \"consume until error recovery set\"\n * upon no viable alternative exceptions.\n */\nclass ErrorNodeImpl extends TerminalNodeImpl {\n\tconstructor(token) {\n\t\tsuper(token);\n\t}\n\n\tisErrorNode() {\n\t\treturn true;\n\t}\n\n\taccept(visitor) {\n\t\treturn visitor.visitErrorNode(this);\n\t}\n}\n\nclass ParseTreeWalker {\n\n\t/**\n\t * Performs a walk on the given parse tree starting at the root and going down recursively\n\t * with depth-first search. On each node, {@link ParseTreeWalker//enterRule} is called before\n\t * recursively walking down into child nodes, then\n\t * {@link ParseTreeWalker//exitRule} is called after the recursive call to wind up.\n\t * @param listener The listener used by the walker to process grammar rules\n\t * @param t The parse tree to be walked on\n\t */\n\twalk(listener, t) {\n\t\tconst errorNode = t instanceof ErrorNode ||\n\t\t\t\t(t.isErrorNode !== undefined && t.isErrorNode());\n\t\tif (errorNode) {\n\t\t\tlistener.visitErrorNode(t);\n\t\t} else if (t instanceof TerminalNode) {\n\t\t\tlistener.visitTerminal(t);\n\t\t} else {\n\t\t\tthis.enterRule(listener, t);\n\t\t\tfor (let i = 0; i < t.getChildCount(); i++) {\n\t\t\t\tconst child = t.getChild(i);\n\t\t\t\tthis.walk(listener, child);\n\t\t\t}\n\t\t\tthis.exitRule(listener, t);\n\t\t}\n\t}\n\n\t/**\n\t * Enters a grammar rule by first triggering the generic event {@link ParseTreeListener//enterEveryRule}\n\t * then by triggering the event specific to the given parse tree node\n\t * @param listener The listener responding to the trigger events\n\t * @param r The grammar rule containing the rule context\n\t */\n\tenterRule(listener, r) {\n\t\tconst ctx = r.getRuleContext();\n\t\tlistener.enterEveryRule(ctx);\n\t\tctx.enterRule(listener);\n\t}\n\n\t/**\n\t * Exits a grammar rule by first triggering the event specific to the given parse tree node\n\t * then by triggering the generic event {@link ParseTreeListener//exitEveryRule}\n\t * @param listener The listener responding to the trigger events\n\t * @param r The grammar rule containing the rule context\n\t */\n\texitRule(listener, r) {\n\t\tconst ctx = r.getRuleContext();\n\t\tctx.exitRule(listener);\n\t\tlistener.exitEveryRule(ctx);\n\t}\n}\n\nParseTreeWalker.DEFAULT = new ParseTreeWalker();\n\nmodule.exports = {\n\tRuleNode,\n\tErrorNode,\n\tTerminalNode,\n\tErrorNodeImpl,\n\tTerminalNodeImpl,\n\tParseTreeListener,\n\tParseTreeVisitor,\n\tParseTreeWalker,\n\tINVALID_INTERVAL\n}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst Utils = require('./../Utils');\nconst {Token} = require('./../Token');\nconst {ErrorNode, TerminalNode, RuleNode} = require('./Tree');\n\n/** A set of utility routines useful for all kinds of ANTLR trees. */\nconst Trees = {\n /**\n * Print out a whole tree in LISP form. {@link //getNodeText} is used on the\n * node payloads to get the text for the nodes. Detect\n * parse trees and extract data appropriately.\n */\n toStringTree: function(tree, ruleNames, recog) {\n ruleNames = ruleNames || null;\n recog = recog || null;\n if(recog!==null) {\n ruleNames = recog.ruleNames;\n }\n let s = Trees.getNodeText(tree, ruleNames);\n s = Utils.escapeWhitespace(s, false);\n const c = tree.getChildCount();\n if(c===0) {\n return s;\n }\n let res = \"(\" + s + ' ';\n if(c>0) {\n s = Trees.toStringTree(tree.getChild(0), ruleNames);\n res = res.concat(s);\n }\n for(let i=1;i\n\t * Since tokens on hidden channels (e.g. whitespace or comments) are not\n\t * added to the parse trees, they will not appear in the output of this\n\t * method.\n\t */\n\tgetText() {\n\t\tif (this.getChildCount() === 0) {\n\t\t\treturn \"\";\n\t\t} else {\n\t\t\treturn this.children.map(function(child) {\n\t\t\t\treturn child.getText();\n\t\t\t}).join(\"\");\n\t\t}\n\t}\n\n\t/**\n\t * For rule associated with this parse tree internal node, return\n\t * the outer alternative number used to match the input. Default\n\t * implementation does not compute nor store this alt num. Create\n\t * a subclass of ParserRuleContext with backing field and set\n\t * option contextSuperClass.\n\t * to set it.\n\t */\n\tgetAltNumber() {\n\t // use constant value of ATN.INVALID_ALT_NUMBER to avoid circular dependency\n\t return 0;\n }\n\n\t/**\n\t * Set the outer alternative number for this context node. Default\n\t * implementation does nothing to avoid backing field overhead for\n\t * trees that don't need it. Create\n\t * a subclass of ParserRuleContext with backing field and set\n\t * option contextSuperClass.\n\t */\n\tsetAltNumber(altNumber) { }\n\n\tgetChild(i) {\n\t\treturn null;\n\t}\n\n\tgetChildCount() {\n\t\treturn 0;\n\t}\n\n\taccept(visitor) {\n\t\treturn visitor.visitChildren(this);\n\t}\n\n\t/**\n\t * Print out a whole tree, not just a node, in LISP format\n\t * (root child1 .. childN). Print just a node if this is a leaf.\n\t */\n\ttoStringTree(ruleNames, recog) {\n\t\treturn Trees.toStringTree(this, ruleNames, recog);\n\t}\n\n\ttoString(ruleNames, stop) {\n\t\truleNames = ruleNames || null;\n\t\tstop = stop || null;\n\t\tlet p = this;\n\t\tlet s = \"[\";\n\t\twhile (p !== null && p !== stop) {\n\t\t\tif (ruleNames === null) {\n\t\t\t\tif (!p.isEmpty()) {\n\t\t\t\t\ts += p.invokingState;\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\tconst ri = p.ruleIndex;\n\t\t\t\tconst ruleName = (ri >= 0 && ri < ruleNames.length) ? ruleNames[ri]\n\t\t\t\t\t\t: \"\" + ri;\n\t\t\t\ts += ruleName;\n\t\t\t}\n\t\t\tif (p.parentCtx !== null && (ruleNames !== null || !p.parentCtx.isEmpty())) {\n\t\t\t\ts += \" \";\n\t\t\t}\n\t\t\tp = p.parentCtx;\n\t\t}\n\t\ts += \"]\";\n\t\treturn s;\n\t}\n}\n\nmodule.exports = RuleContext;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst RuleContext = require('./RuleContext');\nconst {Hash, Map, equalArrays} = require('./Utils');\n\nclass PredictionContext {\n\n\tconstructor(cachedHashCode) {\n\t\tthis.cachedHashCode = cachedHashCode;\n\t}\n\n\t/**\n\t * Stores the computed hash code of this {@link PredictionContext}. The hash\n\t * code is computed in parts to match the following reference algorithm.\n\t *\n\t *
\n\t * private int referenceHashCode() {\n\t * int hash = {@link MurmurHash//initialize MurmurHash.initialize}({@link\n\t * //INITIAL_HASH});\n\t *\n\t * for (int i = 0; i < {@link //size()}; i++) {\n\t * hash = {@link MurmurHash//update MurmurHash.update}(hash, {@link //getParent\n\t * getParent}(i));\n\t * }\n\t *\n\t * for (int i = 0; i < {@link //size()}; i++) {\n\t * hash = {@link MurmurHash//update MurmurHash.update}(hash, {@link\n\t * //getReturnState getReturnState}(i));\n\t * }\n\t *\n\t * hash = {@link MurmurHash//finish MurmurHash.finish}(hash, 2// {@link\n\t * //size()});\n\t * return hash;\n\t * }\n\t * 
\n\t * This means only the {@link //EMPTY} context is in set.\n\t */\n\tisEmpty() {\n\t\treturn this === PredictionContext.EMPTY;\n\t}\n\n\thasEmptyPath() {\n\t\treturn this.getReturnState(this.length - 1) === PredictionContext.EMPTY_RETURN_STATE;\n\t}\n\n\thashCode() {\n\t\treturn this.cachedHashCode;\n\t}\n\n\tupdateHashCode(hash) {\n\t\thash.update(this.cachedHashCode);\n\t}\n}\n\n/**\n * Represents {@code $} in local context prediction, which means wildcard.\n * {@code//+x =//}.\n */\nPredictionContext.EMPTY = null;\n\n/**\n * Represents {@code $} in an array in full context mode, when {@code $}\n * doesn't mean wildcard: {@code $ + x = [$,x]}. Here,\n * {@code $} = {@link //EMPTY_RETURN_STATE}.\n */\nPredictionContext.EMPTY_RETURN_STATE = 0x7FFFFFFF;\n\nPredictionContext.globalNodeCount = 1;\nPredictionContext.id = PredictionContext.globalNodeCount;\n\n\n/*\nfunction calculateHashString(parent, returnState) {\n\treturn \"\" + parent + returnState;\n}\n*/\n\n/**\n * Used to cache {@link PredictionContext} objects. Its used for the shared\n * context cash associated with contexts in DFA states. This cache\n * can be used for both lexers and parsers.\n */\nclass PredictionContextCache {\n\n\tconstructor() {\n\t\tthis.cache = new Map();\n\t}\n\n\t/**\n\t * Add a context to the cache and return it. If the context already exists,\n\t * return that one instead and do not add a new context to the cache.\n\t * Protect shared cache from unsafe thread access.\n\t */\n\tadd(ctx) {\n\t\tif (ctx === PredictionContext.EMPTY) {\n\t\t\treturn PredictionContext.EMPTY;\n\t\t}\n\t\tconst existing = this.cache.get(ctx) || null;\n\t\tif (existing !== null) {\n\t\t\treturn existing;\n\t\t}\n\t\tthis.cache.put(ctx, ctx);\n\t\treturn ctx;\n\t}\n\n\tget(ctx) {\n\t\treturn this.cache.get(ctx) || null;\n\t}\n\n\tget length(){\n\t\treturn this.cache.length;\n\t}\n}\n\n\nclass SingletonPredictionContext extends PredictionContext {\n\n\tconstructor(parent, returnState) {\n\t\tlet hashCode = 0;\n\t\tconst hash = new Hash();\n\t\tif(parent !== null) {\n\t\t\thash.update(parent, returnState);\n\t\t} else {\n\t\t\thash.update(1);\n\t\t}\n\t\thashCode = hash.finish();\n\t\tsuper(hashCode);\n\t\tthis.parentCtx = parent;\n\t\tthis.returnState = returnState;\n\t}\n\n\tgetParent(index) {\n\t\treturn this.parentCtx;\n\t}\n\n\tgetReturnState(index) {\n\t\treturn this.returnState;\n\t}\n\n\tequals(other) {\n\t\tif (this === other) {\n\t\t\treturn true;\n\t\t} else if (!(other instanceof SingletonPredictionContext)) {\n\t\t\treturn false;\n\t\t} else if (this.hashCode() !== other.hashCode()) {\n\t\t\treturn false; // can't be same if hash is different\n\t\t} else {\n\t\t\tif(this.returnState !== other.returnState)\n\t\t\t\treturn false;\n\t\t\telse if(this.parentCtx==null)\n\t\t\t\treturn other.parentCtx==null\n\t\t\telse\n\t\t\t\treturn this.parentCtx.equals(other.parentCtx);\n\t\t}\n\t}\n\n\ttoString() {\n\t\tconst up = this.parentCtx === null ? \"\" : this.parentCtx.toString();\n\t\tif (up.length === 0) {\n\t\t\tif (this.returnState === PredictionContext.EMPTY_RETURN_STATE) {\n\t\t\t\treturn \"$\";\n\t\t\t} else {\n\t\t\t\treturn \"\" + this.returnState;\n\t\t\t}\n\t\t} else {\n\t\t\treturn \"\" + this.returnState + \" \" + up;\n\t\t}\n\t}\n\n\tget length(){\n\t\treturn 1;\n\t}\n\n\tstatic create(parent, returnState) {\n\t\tif (returnState === PredictionContext.EMPTY_RETURN_STATE && parent === null) {\n\t\t\t// someone can pass in the bits of an array ctx that mean $\n\t\t\treturn PredictionContext.EMPTY;\n\t\t} else {\n\t\t\treturn new SingletonPredictionContext(parent, returnState);\n\t\t}\n\t}\n}\n\nclass EmptyPredictionContext extends SingletonPredictionContext {\n\n\tconstructor() {\n\t\tsuper(null, PredictionContext.EMPTY_RETURN_STATE);\n\t}\n\n\tisEmpty() {\n\t\treturn true;\n\t}\n\n\tgetParent(index) {\n\t\treturn null;\n\t}\n\n\tgetReturnState(index) {\n\t\treturn this.returnState;\n\t}\n\n\tequals(other) {\n\t\treturn this === other;\n\t}\n\n\ttoString() {\n\t\treturn \"$\";\n\t}\n}\n\n\nPredictionContext.EMPTY = new EmptyPredictionContext();\n\nclass ArrayPredictionContext extends PredictionContext {\n\n\tconstructor(parents, returnStates) {\n\t\t/**\n\t\t * Parent can be null only if full ctx mode and we make an array\n\t\t * from {@link //EMPTY} and non-empty. We merge {@link //EMPTY} by using\n\t\t * null parent and\n\t\t * returnState == {@link //EMPTY_RETURN_STATE}.\n\t\t */\n\t\tconst h = new Hash();\n\t\th.update(parents, returnStates);\n\t\tconst hashCode = h.finish();\n\t\tsuper(hashCode);\n\t\tthis.parents = parents;\n\t\tthis.returnStates = returnStates;\n\t\treturn this;\n\t}\n\n\tisEmpty() {\n\t\t// since EMPTY_RETURN_STATE can only appear in the last position, we\n\t\t// don't need to verify that size==1\n\t\treturn this.returnStates[0] === PredictionContext.EMPTY_RETURN_STATE;\n\t}\n\n\tgetParent(index) {\n\t\treturn this.parents[index];\n\t}\n\n\tgetReturnState(index) {\n\t\treturn this.returnStates[index];\n\t}\n\n\tequals(other) {\n\t\tif (this === other) {\n\t\t\treturn true;\n\t\t} else if (!(other instanceof ArrayPredictionContext)) {\n\t\t\treturn false;\n\t\t} else if (this.hashCode() !== other.hashCode()) {\n\t\t\treturn false; // can't be same if hash is different\n\t\t} else {\n\t\t\treturn equalArrays(this.returnStates, other.returnStates) &&\n\t\t\t\tequalArrays(this.parents, other.parents);\n\t\t}\n\t}\n\n\ttoString() {\n\t\tif (this.isEmpty()) {\n\t\t\treturn \"[]\";\n\t\t} else {\n\t\t\tlet s = \"[\";\n\t\t\tfor (let i = 0; i < this.returnStates.length; i++) {\n\t\t\t\tif (i > 0) {\n\t\t\t\t\ts = s + \", \";\n\t\t\t\t}\n\t\t\t\tif (this.returnStates[i] === PredictionContext.EMPTY_RETURN_STATE) {\n\t\t\t\t\ts = s + \"$\";\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\ts = s + this.returnStates[i];\n\t\t\t\tif (this.parents[i] !== null) {\n\t\t\t\t\ts = s + \" \" + this.parents[i];\n\t\t\t\t} else {\n\t\t\t\t\ts = s + \"null\";\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn s + \"]\";\n\t\t}\n\t}\n\n\tget length(){\n\t\treturn this.returnStates.length;\n\t}\n}\n\n\n/**\n * Convert a {@link RuleContext} tree to a {@link PredictionContext} graph.\n * Return {@link //EMPTY} if {@code outerContext} is empty or null.\n */\nfunction predictionContextFromRuleContext(atn, outerContext) {\n\tif (outerContext === undefined || outerContext === null) {\n\t\touterContext = RuleContext.EMPTY;\n\t}\n\t// if we are in RuleContext of start rule, s, then PredictionContext\n\t// is EMPTY. Nobody called us. (if we are empty, return empty)\n\tif (outerContext.parentCtx === null || outerContext === RuleContext.EMPTY) {\n\t\treturn PredictionContext.EMPTY;\n\t}\n\t// If we have a parent, convert it to a PredictionContext graph\n\tconst parent = predictionContextFromRuleContext(atn, outerContext.parentCtx);\n\tconst state = atn.states[outerContext.invokingState];\n\tconst transition = state.transitions[0];\n\treturn SingletonPredictionContext.create(parent, transition.followState.stateNumber);\n}\n/*\nfunction calculateListsHashString(parents, returnStates) {\n\tconst s = \"\";\n\tparents.map(function(p) {\n\t\ts = s + p;\n\t});\n\treturnStates.map(function(r) {\n\t\ts = s + r;\n\t});\n\treturn s;\n}\n*/\nfunction merge(a, b, rootIsWildcard, mergeCache) {\n\t// share same graph if both same\n\tif (a === b) {\n\t\treturn a;\n\t}\n\tif (a instanceof SingletonPredictionContext && b instanceof SingletonPredictionContext) {\n\t\treturn mergeSingletons(a, b, rootIsWildcard, mergeCache);\n\t}\n\t// At least one of a or b is array\n\t// If one is $ and rootIsWildcard, return $ as// wildcard\n\tif (rootIsWildcard) {\n\t\tif (a instanceof EmptyPredictionContext) {\n\t\t\treturn a;\n\t\t}\n\t\tif (b instanceof EmptyPredictionContext) {\n\t\t\treturn b;\n\t\t}\n\t}\n\t// convert singleton so both are arrays to normalize\n\tif (a instanceof SingletonPredictionContext) {\n\t\ta = new ArrayPredictionContext([a.getParent()], [a.returnState]);\n\t}\n\tif (b instanceof SingletonPredictionContext) {\n\t\tb = new ArrayPredictionContext([b.getParent()], [b.returnState]);\n\t}\n\treturn mergeArrays(a, b, rootIsWildcard, mergeCache);\n}\n\n/**\n * Merge two {@link SingletonPredictionContext} instances.\n *\n *

Stack tops equal, parents merge is same; return left graph.
\n *

\n *\n *

Same stack top, parents differ; merge parents giving array node, then\n * remainders of those graphs. A new root node is created to point to the\n * merged parents.
\n *

\n *\n *

Different stack tops pointing to same parent. Make array node for the\n * root where both element in the root point to the same (original)\n * parent.
\n *

\n *\n *

Different stack tops pointing to different parents. Make array node for\n * the root where each element points to the corresponding original\n * parent.
\n *

\n *\n * @param a the first {@link SingletonPredictionContext}\n * @param b the second {@link SingletonPredictionContext}\n * @param rootIsWildcard {@code true} if this is a local-context merge,\n * otherwise false to indicate a full-context merge\n * @param mergeCache\n */\nfunction mergeSingletons(a, b, rootIsWildcard, mergeCache) {\n\tif (mergeCache !== null) {\n\t\tlet previous = mergeCache.get(a, b);\n\t\tif (previous !== null) {\n\t\t\treturn previous;\n\t\t}\n\t\tprevious = mergeCache.get(b, a);\n\t\tif (previous !== null) {\n\t\t\treturn previous;\n\t\t}\n\t}\n\n\tconst rootMerge = mergeRoot(a, b, rootIsWildcard);\n\tif (rootMerge !== null) {\n\t\tif (mergeCache !== null) {\n\t\t\tmergeCache.set(a, b, rootMerge);\n\t\t}\n\t\treturn rootMerge;\n\t}\n\tif (a.returnState === b.returnState) {\n\t\tconst parent = merge(a.parentCtx, b.parentCtx, rootIsWildcard, mergeCache);\n\t\t// if parent is same as existing a or b parent or reduced to a parent,\n\t\t// return it\n\t\tif (parent === a.parentCtx) {\n\t\t\treturn a; // ax + bx = ax, if a=b\n\t\t}\n\t\tif (parent === b.parentCtx) {\n\t\t\treturn b; // ax + bx = bx, if a=b\n\t\t}\n\t\t// else: ax + ay = a'[x,y]\n\t\t// merge parents x and y, giving array node with x,y then remainders\n\t\t// of those graphs. dup a, a' points at merged array\n\t\t// new joined parent so create new singleton pointing to it, a'\n\t\tconst spc = SingletonPredictionContext.create(parent, a.returnState);\n\t\tif (mergeCache !== null) {\n\t\t\tmergeCache.set(a, b, spc);\n\t\t}\n\t\treturn spc;\n\t} else { // a != b payloads differ\n\t\t// see if we can collapse parents due to $+x parents if local ctx\n\t\tlet singleParent = null;\n\t\tif (a === b || (a.parentCtx !== null && a.parentCtx === b.parentCtx)) { // ax +\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// bx =\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// [a,b]x\n\t\t\tsingleParent = a.parentCtx;\n\t\t}\n\t\tif (singleParent !== null) { // parents are same\n\t\t\t// sort payloads and use same parent\n\t\t\tconst payloads = [ a.returnState, b.returnState ];\n\t\t\tif (a.returnState > b.returnState) {\n\t\t\t\tpayloads[0] = b.returnState;\n\t\t\t\tpayloads[1] = a.returnState;\n\t\t\t}\n\t\t\tconst parents = [ singleParent, singleParent ];\n\t\t\tconst apc = new ArrayPredictionContext(parents, payloads);\n\t\t\tif (mergeCache !== null) {\n\t\t\t\tmergeCache.set(a, b, apc);\n\t\t\t}\n\t\t\treturn apc;\n\t\t}\n\t\t// parents differ and can't merge them. Just pack together\n\t\t// into array; can't merge.\n\t\t// ax + by = [ax,by]\n\t\tconst payloads = [ a.returnState, b.returnState ];\n\t\tlet parents = [ a.parentCtx, b.parentCtx ];\n\t\tif (a.returnState > b.returnState) { // sort by payload\n\t\t\tpayloads[0] = b.returnState;\n\t\t\tpayloads[1] = a.returnState;\n\t\t\tparents = [ b.parentCtx, a.parentCtx ];\n\t\t}\n\t\tconst a_ = new ArrayPredictionContext(parents, payloads);\n\t\tif (mergeCache !== null) {\n\t\t\tmergeCache.set(a, b, a_);\n\t\t}\n\t\treturn a_;\n\t}\n}\n\n/**\n * Handle case where at least one of {@code a} or {@code b} is\n * {@link //EMPTY}. In the following diagrams, the symbol {@code $} is used\n * to represent {@link //EMPTY}.\n *\n *

Local-Context Merges

\n *\n *

These local-context merge operations are used when {@code rootIsWildcard}\n * is true.

\n *\n *

{@link //EMPTY} is superset of any graph; return {@link //EMPTY}.
\n *

\n *\n *

{@link //EMPTY} and anything is {@code //EMPTY}, so merged parent is\n * {@code //EMPTY}; return left graph.
\n *

\n *\n *

Special case of last merge if local context.
\n *

\n *\n *

Full-Context Merges

\n *\n *

These full-context merge operations are used when {@code rootIsWildcard}\n * is false.

\n *\n *

\n *\n *

Must keep all contexts; {@link //EMPTY} in array is a special value (and\n * null parent).
\n *

\n *\n *

\n *\n * @param a the first {@link SingletonPredictionContext}\n * @param b the second {@link SingletonPredictionContext}\n * @param rootIsWildcard {@code true} if this is a local-context merge,\n * otherwise false to indicate a full-context merge\n */\nfunction mergeRoot(a, b, rootIsWildcard) {\n\tif (rootIsWildcard) {\n\t\tif (a === PredictionContext.EMPTY) {\n\t\t\treturn PredictionContext.EMPTY; // // + b =//\n\t\t}\n\t\tif (b === PredictionContext.EMPTY) {\n\t\t\treturn PredictionContext.EMPTY; // a +// =//\n\t\t}\n\t} else {\n\t\tif (a === PredictionContext.EMPTY && b === PredictionContext.EMPTY) {\n\t\t\treturn PredictionContext.EMPTY; // $ + $ = $\n\t\t} else if (a === PredictionContext.EMPTY) { // $ + x = [$,x]\n\t\t\tconst payloads = [ b.returnState,\n\t\t\t\t\tPredictionContext.EMPTY_RETURN_STATE ];\n\t\t\tconst parents = [ b.parentCtx, null ];\n\t\t\treturn new ArrayPredictionContext(parents, payloads);\n\t\t} else if (b === PredictionContext.EMPTY) { // x + $ = [$,x] ($ is always first if present)\n\t\t\tconst payloads = [ a.returnState, PredictionContext.EMPTY_RETURN_STATE ];\n\t\t\tconst parents = [ a.parentCtx, null ];\n\t\t\treturn new ArrayPredictionContext(parents, payloads);\n\t\t}\n\t}\n\treturn null;\n}\n\n/**\n * Merge two {@link ArrayPredictionContext} instances.\n *\n *

Different tops, different parents.
\n *

\n *\n *

Shared top, same parents.
\n *

\n *\n *

Shared top, different parents.
\n *

\n *\n *

Shared top, all shared parents.
\n *

\n *\n *

Equal tops, merge parents and reduce top to\n * {@link SingletonPredictionContext}.
\n *

\n */\nfunction mergeArrays(a, b, rootIsWildcard, mergeCache) {\n\tif (mergeCache !== null) {\n\t\tlet previous = mergeCache.get(a, b);\n\t\tif (previous !== null) {\n\t\t\treturn previous;\n\t\t}\n\t\tprevious = mergeCache.get(b, a);\n\t\tif (previous !== null) {\n\t\t\treturn previous;\n\t\t}\n\t}\n\t// merge sorted payloads a + b => M\n\tlet i = 0; // walks a\n\tlet j = 0; // walks b\n\tlet k = 0; // walks target M array\n\n\tlet mergedReturnStates = [];\n\tlet mergedParents = [];\n\t// walk and merge to yield mergedParents, mergedReturnStates\n\twhile (i < a.returnStates.length && j < b.returnStates.length) {\n\t\tconst a_parent = a.parents[i];\n\t\tconst b_parent = b.parents[j];\n\t\tif (a.returnStates[i] === b.returnStates[j]) {\n\t\t\t// same payload (stack tops are equal), must yield merged singleton\n\t\t\tconst payload = a.returnStates[i];\n\t\t\t// $+$ = $\n\t\t\tconst bothDollars = payload === PredictionContext.EMPTY_RETURN_STATE &&\n\t\t\t\t\ta_parent === null && b_parent === null;\n\t\t\tconst ax_ax = (a_parent !== null && b_parent !== null && a_parent === b_parent); // ax+ax\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// ->\n\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t// ax\n\t\t\tif (bothDollars || ax_ax) {\n\t\t\t\tmergedParents[k] = a_parent; // choose left\n\t\t\t\tmergedReturnStates[k] = payload;\n\t\t\t} else { // ax+ay -> a'[x,y]\n\t\t\t\tmergedParents[k] = merge(a_parent, b_parent, rootIsWildcard, mergeCache);\n\t\t\t\tmergedReturnStates[k] = payload;\n\t\t\t}\n\t\t\ti += 1; // hop over left one as usual\n\t\t\tj += 1; // but also skip one in right side since we merge\n\t\t} else if (a.returnStates[i] < b.returnStates[j]) { // copy a[i] to M\n\t\t\tmergedParents[k] = a_parent;\n\t\t\tmergedReturnStates[k] = a.returnStates[i];\n\t\t\ti += 1;\n\t\t} else { // b > a, copy b[j] to M\n\t\t\tmergedParents[k] = b_parent;\n\t\t\tmergedReturnStates[k] = b.returnStates[j];\n\t\t\tj += 1;\n\t\t}\n\t\tk += 1;\n\t}\n\t// copy over any payloads remaining in either array\n\tif (i < a.returnStates.length) {\n\t\tfor (let p = i; p < a.returnStates.length; p++) {\n\t\t\tmergedParents[k] = a.parents[p];\n\t\t\tmergedReturnStates[k] = a.returnStates[p];\n\t\t\tk += 1;\n\t\t}\n\t} else {\n\t\tfor (let p = j; p < b.returnStates.length; p++) {\n\t\t\tmergedParents[k] = b.parents[p];\n\t\t\tmergedReturnStates[k] = b.returnStates[p];\n\t\t\tk += 1;\n\t\t}\n\t}\n\t// trim merged if we combined a few that had same stack tops\n\tif (k < mergedParents.length) { // write index < last position; trim\n\t\tif (k === 1) { // for just one merged element, return singleton top\n\t\t\tconst a_ = SingletonPredictionContext.create(mergedParents[0],\n\t\t\t\t\tmergedReturnStates[0]);\n\t\t\tif (mergeCache !== null) {\n\t\t\t\tmergeCache.set(a, b, a_);\n\t\t\t}\n\t\t\treturn a_;\n\t\t}\n\t\tmergedParents = mergedParents.slice(0, k);\n\t\tmergedReturnStates = mergedReturnStates.slice(0, k);\n\t}\n\n\tconst M = new ArrayPredictionContext(mergedParents, mergedReturnStates);\n\n\t// if we created same array as a or b, return that instead\n\t// TODO: track whether this is possible above during merge sort for speed\n\tif (M === a) {\n\t\tif (mergeCache !== null) {\n\t\t\tmergeCache.set(a, b, a);\n\t\t}\n\t\treturn a;\n\t}\n\tif (M === b) {\n\t\tif (mergeCache !== null) {\n\t\t\tmergeCache.set(a, b, b);\n\t\t}\n\t\treturn b;\n\t}\n\tcombineCommonParents(mergedParents);\n\n\tif (mergeCache !== null) {\n\t\tmergeCache.set(a, b, M);\n\t}\n\treturn M;\n}\n\n/**\n * Make pass over all M {@code parents}; merge any {@code equals()}\n * ones.\n */\nfunction combineCommonParents(parents) {\n\tconst uniqueParents = new Map();\n\n\tfor (let p = 0; p < parents.length; p++) {\n\t\tconst parent = parents[p];\n\t\tif (!(uniqueParents.containsKey(parent))) {\n\t\t\tuniqueParents.put(parent, parent);\n\t\t}\n\t}\n\tfor (let q = 0; q < parents.length; q++) {\n\t\tparents[q] = uniqueParents.get(parents[q]);\n\t}\n}\n\nfunction getCachedPredictionContext(context, contextCache, visited) {\n\tif (context.isEmpty()) {\n\t\treturn context;\n\t}\n\tlet existing = visited.get(context) || null;\n\tif (existing !== null) {\n\t\treturn existing;\n\t}\n\texisting = contextCache.get(context);\n\tif (existing !== null) {\n\t\tvisited.put(context, existing);\n\t\treturn existing;\n\t}\n\tlet changed = false;\n\tlet parents = [];\n\tfor (let i = 0; i < parents.length; i++) {\n\t\tconst parent = getCachedPredictionContext(context.getParent(i), contextCache, visited);\n\t\tif (changed || parent !== context.getParent(i)) {\n\t\t\tif (!changed) {\n\t\t\t\tparents = [];\n\t\t\t\tfor (let j = 0; j < context.length; j++) {\n\t\t\t\t\tparents[j] = context.getParent(j);\n\t\t\t\t}\n\t\t\t\tchanged = true;\n\t\t\t}\n\t\t\tparents[i] = parent;\n\t\t}\n\t}\n\tif (!changed) {\n\t\tcontextCache.add(context);\n\t\tvisited.put(context, context);\n\t\treturn context;\n\t}\n\tlet updated = null;\n\tif (parents.length === 0) {\n\t\tupdated = PredictionContext.EMPTY;\n\t} else if (parents.length === 1) {\n\t\tupdated = SingletonPredictionContext.create(parents[0], context\n\t\t\t\t.getReturnState(0));\n\t} else {\n\t\tupdated = new ArrayPredictionContext(parents, context.returnStates);\n\t}\n\tcontextCache.add(updated);\n\tvisited.put(updated, updated);\n\tvisited.put(context, updated);\n\n\treturn updated;\n}\n\n// ter's recursive version of Sam's getAllNodes()\nfunction getAllContextNodes(context, nodes, visited) {\n\tif (nodes === null) {\n\t\tnodes = [];\n\t\treturn getAllContextNodes(context, nodes, visited);\n\t} else if (visited === null) {\n\t\tvisited = new Map();\n\t\treturn getAllContextNodes(context, nodes, visited);\n\t} else {\n\t\tif (context === null || visited.containsKey(context)) {\n\t\t\treturn nodes;\n\t\t}\n\t\tvisited.put(context, context);\n\t\tnodes.push(context);\n\t\tfor (let i = 0; i < context.length; i++) {\n\t\t\tgetAllContextNodes(context.getParent(i), nodes, visited);\n\t\t}\n\t\treturn nodes;\n\t}\n}\n\nmodule.exports = {\n\tmerge,\n\tPredictionContext,\n\tPredictionContextCache,\n\tSingletonPredictionContext,\n\tpredictionContextFromRuleContext,\n\tgetCachedPredictionContext\n}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Set, BitSet} = require('./Utils');\nconst {Token} = require('./Token');\nconst {ATNConfig} = require('./atn/ATNConfig');\nconst {IntervalSet} = require('./IntervalSet');\nconst {RuleStopState} = require('./atn/ATNState');\nconst {RuleTransition, NotSetTransition, WildcardTransition, AbstractPredicateTransition} = require('./atn/Transition');\nconst {predictionContextFromRuleContext, PredictionContext, SingletonPredictionContext} = require('./PredictionContext');\n\nclass LL1Analyzer {\n constructor(atn) {\n this.atn = atn;\n }\n\n /**\n * Calculates the SLL(1) expected lookahead set for each outgoing transition\n * of an {@link ATNState}. The returned array has one element for each\n * outgoing transition in {@code s}. If the closure from transition\n * i leads to a semantic predicate before matching a symbol, the\n * element at index i of the result will be {@code null}.\n *\n * @param s the ATN state\n * @return the expected symbols for each outgoing transition of {@code s}.\n */\n getDecisionLookahead(s) {\n if (s === null) {\n return null;\n }\n const count = s.transitions.length;\n const look = [];\n for(let alt=0; alt< count; alt++) {\n look[alt] = new IntervalSet();\n const lookBusy = new Set();\n const seeThruPreds = false; // fail to get lookahead upon pred\n this._LOOK(s.transition(alt).target, null, PredictionContext.EMPTY,\n look[alt], lookBusy, new BitSet(), seeThruPreds, false);\n // Wipe out lookahead for this alternative if we found nothing\n // or we had a predicate when we !seeThruPreds\n if (look[alt].length===0 || look[alt].contains(LL1Analyzer.HIT_PRED)) {\n look[alt] = null;\n }\n }\n return look;\n }\n\n /**\n * Compute set of tokens that can follow {@code s} in the ATN in the\n * specified {@code ctx}.\n *\n *

If {@code ctx} is {@code null} and the end of the rule containing\n * {@code s} is reached, {@link Token//EPSILON} is added to the result set.\n * If {@code ctx} is not {@code null} and the end of the outermost rule is\n * reached, {@link Token//EOF} is added to the result set.

\n *\n * @param s the ATN state\n * @param stopState the ATN state to stop at. This can be a\n * {@link BlockEndState} to detect epsilon paths through a closure.\n * @param ctx the complete parser context, or {@code null} if the context\n * should be ignored\n *\n * @return The set of tokens that can follow {@code s} in the ATN in the\n * specified {@code ctx}.\n */\n LOOK(s, stopState, ctx) {\n const r = new IntervalSet();\n const seeThruPreds = true; // ignore preds; get all lookahead\n ctx = ctx || null;\n const lookContext = ctx!==null ? predictionContextFromRuleContext(s.atn, ctx) : null;\n this._LOOK(s, stopState, lookContext, r, new Set(), new BitSet(), seeThruPreds, true);\n return r;\n }\n\n /**\n * Compute set of tokens that can follow {@code s} in the ATN in the\n * specified {@code ctx}.\n *\n *

If {@code ctx} is {@code null} and {@code stopState} or the end of the\n * rule containing {@code s} is reached, {@link Token//EPSILON} is added to\n * the result set. If {@code ctx} is not {@code null} and {@code addEOF} is\n * {@code true} and {@code stopState} or the end of the outermost rule is\n * reached, {@link Token//EOF} is added to the result set.

\n *\n * @param s the ATN state.\n * @param stopState the ATN state to stop at. This can be a\n * {@link BlockEndState} to detect epsilon paths through a closure.\n * @param ctx The outer context, or {@code null} if the outer context should\n * not be used.\n * @param look The result lookahead set.\n * @param lookBusy A set used for preventing epsilon closures in the ATN\n * from causing a stack overflow. Outside code should pass\n * {@code new Set} for this argument.\n * @param calledRuleStack A set used for preventing left recursion in the\n * ATN from causing a stack overflow. Outside code should pass\n * {@code new BitSet()} for this argument.\n * @param seeThruPreds {@code true} to true semantic predicates as\n * implicitly {@code true} and \"see through them\", otherwise {@code false}\n * to treat semantic predicates as opaque and add {@link //HIT_PRED} to the\n * result if one is encountered.\n * @param addEOF Add {@link Token//EOF} to the result if the end of the\n * outermost context is reached. This parameter has no effect if {@code ctx}\n * is {@code null}.\n */\n _LOOK(s, stopState , ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF) {\n const c = new ATNConfig({state:s, alt:0, context: ctx}, null);\n if (lookBusy.contains(c)) {\n return;\n }\n lookBusy.add(c);\n if (s === stopState) {\n if (ctx ===null) {\n look.addOne(Token.EPSILON);\n return;\n } else if (ctx.isEmpty() && addEOF) {\n look.addOne(Token.EOF);\n return;\n }\n }\n if (s instanceof RuleStopState ) {\n if (ctx ===null) {\n look.addOne(Token.EPSILON);\n return;\n } else if (ctx.isEmpty() && addEOF) {\n look.addOne(Token.EOF);\n return;\n }\n if (ctx !== PredictionContext.EMPTY) {\n const removed = calledRuleStack.contains(s.ruleIndex);\n try {\n calledRuleStack.remove(s.ruleIndex);\n // run thru all possible stack tops in ctx\n for (let i = 0; i < ctx.length; i++) {\n const returnState = this.atn.states[ctx.getReturnState(i)];\n this._LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF);\n }\n }finally {\n if (removed) {\n calledRuleStack.add(s.ruleIndex);\n }\n }\n return;\n }\n }\n for(let j=0; jIf {@code context} is {@code null}, it is treated as\n * {@link ParserRuleContext//EMPTY}.

\n *\n * @param stateNumber the ATN state number\n * @param ctx the full parse context\n *\n * @return {IntervalSet} The set of potentially valid input symbols which could follow the\n * specified state in the specified context.\n *\n * @throws IllegalArgumentException if the ATN does not contain a state with\n * number {@code stateNumber}\n */\n getExpectedTokens(stateNumber, ctx ) {\n if ( stateNumber < 0 || stateNumber >= this.states.length ) {\n throw(\"Invalid state number.\");\n }\n const s = this.states[stateNumber];\n let following = this.nextTokens(s);\n if (!following.contains(Token.EPSILON)) {\n return following;\n }\n const expected = new IntervalSet();\n expected.addSet(following);\n expected.removeOne(Token.EPSILON);\n while (ctx !== null && ctx.invokingState >= 0 && following.contains(Token.EPSILON)) {\n const invokingState = this.states[ctx.invokingState];\n const rt = invokingState.transitions[0];\n following = this.nextTokens(rt.followState);\n expected.addSet(following);\n expected.removeOne(Token.EPSILON);\n ctx = ctx.parentCtx;\n }\n if (following.contains(Token.EPSILON)) {\n expected.addOne(Token.EOF);\n }\n return expected;\n }\n}\n\nATN.INVALID_ALT_NUMBER = 0;\n\nmodule.exports = ATN;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\n/**\n * Represents the type of recognizer an ATN applies to\n */\nmodule.exports = {\n LEXER: 0,\n PARSER: 1\n};\n\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nclass ATNDeserializationOptions {\n\tconstructor(copyFrom) {\n\t\tif(copyFrom===undefined) {\n\t\t\tcopyFrom = null;\n\t\t}\n\t\tthis.readOnly = false;\n\t\tthis.verifyATN = copyFrom===null ? true : copyFrom.verifyATN;\n\t\tthis.generateRuleBypassTransitions = copyFrom===null ? false : copyFrom.generateRuleBypassTransitions;\n\t}\n}\n\nATNDeserializationOptions.defaultOptions = new ATNDeserializationOptions();\nATNDeserializationOptions.defaultOptions.readOnly = true;\n\n// def __setattr__(self, key, value):\n// if key!=\"readOnly\" and self.readOnly:\n// raise Exception(\"The object is read only.\")\n// super(type(self), self).__setattr__(key,value)\n\nmodule.exports = ATNDeserializationOptions\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst LexerActionType = {\n // The type of a {@link LexerChannelAction} action.\n CHANNEL: 0,\n // The type of a {@link LexerCustomAction} action\n CUSTOM: 1,\n // The type of a {@link LexerModeAction} action.\n MODE: 2,\n //The type of a {@link LexerMoreAction} action.\n MORE: 3,\n //The type of a {@link LexerPopModeAction} action.\n POP_MODE: 4,\n //The type of a {@link LexerPushModeAction} action.\n PUSH_MODE: 5,\n //The type of a {@link LexerSkipAction} action.\n SKIP: 6,\n //The type of a {@link LexerTypeAction} action.\n TYPE: 7\n}\n\nclass LexerAction {\n constructor(action) {\n this.actionType = action;\n this.isPositionDependent = false;\n }\n\n hashCode() {\n const hash = new Hash();\n this.updateHashCode(hash);\n return hash.finish()\n }\n\n updateHashCode(hash) {\n hash.update(this.actionType);\n }\n\n equals(other) {\n return this === other;\n }\n}\n\n\n/**\n * Implements the {@code skip} lexer action by calling {@link Lexer//skip}.\n *\n *

The {@code skip} command does not have any parameters, so this action is\n * implemented as a singleton instance exposed by {@link //INSTANCE}.

\n */\nclass LexerSkipAction extends LexerAction {\n constructor() {\n super(LexerActionType.SKIP);\n }\n\n execute(lexer) {\n lexer.skip();\n }\n\n toString() {\n return \"skip\";\n }\n}\n\n// Provides a singleton instance of this parameterless lexer action.\nLexerSkipAction.INSTANCE = new LexerSkipAction();\n\n/**\n * Implements the {@code type} lexer action by calling {@link Lexer//setType}\n * with the assigned type\n */\nclass LexerTypeAction extends LexerAction {\n constructor(type) {\n super(LexerActionType.TYPE);\n this.type = type;\n }\n\n execute(lexer) {\n lexer.type = this.type;\n }\n\n updateHashCode(hash) {\n hash.update(this.actionType, this.type);\n }\n\n equals(other) {\n if(this === other) {\n return true;\n } else if (! (other instanceof LexerTypeAction)) {\n return false;\n } else {\n return this.type === other.type;\n }\n }\n\n toString() {\n return \"type(\" + this.type + \")\";\n }\n}\n\n\n/**\n * Implements the {@code pushMode} lexer action by calling\n * {@link Lexer//pushMode} with the assigned mode\n */\nclass LexerPushModeAction extends LexerAction {\n constructor(mode) {\n super(LexerActionType.PUSH_MODE);\n this.mode = mode;\n }\n\n /**\n *

This action is implemented by calling {@link Lexer//pushMode} with the\n * value provided by {@link //getMode}.

\n */\n execute(lexer) {\n lexer.pushMode(this.mode);\n }\n\n updateHashCode(hash) {\n hash.update(this.actionType, this.mode);\n }\n\n equals(other) {\n if (this === other) {\n return true;\n } else if (! (other instanceof LexerPushModeAction)) {\n return false;\n } else {\n return this.mode === other.mode;\n }\n }\n\n toString() {\n return \"pushMode(\" + this.mode + \")\";\n }\n}\n\n/**\n * Implements the {@code popMode} lexer action by calling {@link Lexer//popMode}.\n *\n *

The {@code popMode} command does not have any parameters, so this action is\n * implemented as a singleton instance exposed by {@link //INSTANCE}.

\n */\nclass LexerPopModeAction extends LexerAction {\n constructor() {\n super(LexerActionType.POP_MODE);\n }\n\n /**\n *

This action is implemented by calling {@link Lexer//popMode}.

\n */\n execute(lexer) {\n lexer.popMode();\n }\n\n toString() {\n return \"popMode\";\n }\n}\n\nLexerPopModeAction.INSTANCE = new LexerPopModeAction();\n\n/**\n * Implements the {@code more} lexer action by calling {@link Lexer//more}.\n *\n *

The {@code more} command does not have any parameters, so this action is\n * implemented as a singleton instance exposed by {@link //INSTANCE}.

\n */\nclass LexerMoreAction extends LexerAction {\n constructor() {\n super(LexerActionType.MORE);\n }\n\n /**\n *

This action is implemented by calling {@link Lexer//popMode}.

\n */\n execute(lexer) {\n lexer.more();\n }\n\n toString() {\n return \"more\";\n }\n}\n\nLexerMoreAction.INSTANCE = new LexerMoreAction();\n\n\n/**\n * Implements the {@code mode} lexer action by calling {@link Lexer//mode} with\n * the assigned mode\n */\nclass LexerModeAction extends LexerAction {\n constructor(mode) {\n super(LexerActionType.MODE);\n this.mode = mode;\n }\n\n /**\n *

This action is implemented by calling {@link Lexer//mode} with the\n * value provided by {@link //getMode}.

\n */\n execute(lexer) {\n lexer.mode(this.mode);\n }\n\n updateHashCode(hash) {\n hash.update(this.actionType, this.mode);\n }\n\n equals(other) {\n if (this === other) {\n return true;\n } else if (! (other instanceof LexerModeAction)) {\n return false;\n } else {\n return this.mode === other.mode;\n }\n }\n\n toString() {\n return \"mode(\" + this.mode + \")\";\n }\n}\n\n/**\n * Executes a custom lexer action by calling {@link Recognizer//action} with the\n * rule and action indexes assigned to the custom action. The implementation of\n * a custom action is added to the generated code for the lexer in an override\n * of {@link Recognizer//action} when the grammar is compiled.\n *\n *

This class may represent embedded actions created with the {...}\n * syntax in ANTLR 4, as well as actions created for lexer commands where the\n * command argument could not be evaluated when the grammar was compiled.

\n */\nclass LexerCustomAction extends LexerAction {\n /**\n * Constructs a custom lexer action with the specified rule and action\n * indexes.\n *\n * @param ruleIndex The rule index to use for calls to\n * {@link Recognizer//action}.\n * @param actionIndex The action index to use for calls to\n * {@link Recognizer//action}.\n */\n constructor(ruleIndex, actionIndex) {\n super(LexerActionType.CUSTOM);\n this.ruleIndex = ruleIndex;\n this.actionIndex = actionIndex;\n this.isPositionDependent = true;\n }\n\n /**\n *

Custom actions are implemented by calling {@link Lexer//action} with the\n * appropriate rule and action indexes.

\n */\n execute(lexer) {\n lexer.action(null, this.ruleIndex, this.actionIndex);\n }\n\n updateHashCode(hash) {\n hash.update(this.actionType, this.ruleIndex, this.actionIndex);\n }\n\n equals(other) {\n if (this === other) {\n return true;\n } else if (! (other instanceof LexerCustomAction)) {\n return false;\n } else {\n return this.ruleIndex === other.ruleIndex && this.actionIndex === other.actionIndex;\n }\n }\n}\n\n/**\n * Implements the {@code channel} lexer action by calling\n * {@link Lexer//setChannel} with the assigned channel.\n * Constructs a new {@code channel} action with the specified channel value.\n * @param channel The channel value to pass to {@link Lexer//setChannel}\n */\nclass LexerChannelAction extends LexerAction {\n constructor(channel) {\n super(LexerActionType.CHANNEL);\n this.channel = channel;\n }\n\n /**\n *

This action is implemented by calling {@link Lexer//setChannel} with the\n * value provided by {@link //getChannel}.

\n */\n execute(lexer) {\n lexer._channel = this.channel;\n }\n\n updateHashCode(hash) {\n hash.update(this.actionType, this.channel);\n }\n\n equals(other) {\n if (this === other) {\n return true;\n } else if (! (other instanceof LexerChannelAction)) {\n return false;\n } else {\n return this.channel === other.channel;\n }\n }\n\n toString() {\n return \"channel(\" + this.channel + \")\";\n }\n}\n\n\n/**\n * This implementation of {@link LexerAction} is used for tracking input offsets\n * for position-dependent actions within a {@link LexerActionExecutor}.\n *\n *

This action is not serialized as part of the ATN, and is only required for\n * position-dependent lexer actions which appear at a location other than the\n * end of a rule. For more information about DFA optimizations employed for\n * lexer actions, see {@link LexerActionExecutor//append} and\n * {@link LexerActionExecutor//fixOffsetBeforeMatch}.

\n *\n * Constructs a new indexed custom action by associating a character offset\n * with a {@link LexerAction}.\n *\n *

Note: This class is only required for lexer actions for which\n * {@link LexerAction//isPositionDependent} returns {@code true}.

\n *\n * @param offset The offset into the input {@link CharStream}, relative to\n * the token start index, at which the specified lexer action should be\n * executed.\n * @param action The lexer action to execute at a particular offset in the\n * input {@link CharStream}.\n */\nclass LexerIndexedCustomAction extends LexerAction {\n constructor(offset, action) {\n super(action.actionType);\n this.offset = offset;\n this.action = action;\n this.isPositionDependent = true;\n }\n\n /**\n *

This method calls {@link //execute} on the result of {@link //getAction}\n * using the provided {@code lexer}.

\n */\n execute(lexer) {\n // assume the input stream position was properly set by the calling code\n this.action.execute(lexer);\n }\n\n updateHashCode(hash) {\n hash.update(this.actionType, this.offset, this.action);\n }\n\n equals(other) {\n if (this === other) {\n return true;\n } else if (! (other instanceof LexerIndexedCustomAction)) {\n return false;\n } else {\n return this.offset === other.offset && this.action === other.action;\n }\n }\n}\n\nmodule.exports = {\n LexerActionType,\n LexerSkipAction,\n LexerChannelAction,\n LexerCustomAction,\n LexerIndexedCustomAction,\n LexerMoreAction,\n LexerTypeAction,\n LexerPushModeAction,\n LexerPopModeAction,\n LexerModeAction\n}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Token} = require('./../Token');\nconst ATN = require('./ATN');\nconst ATNType = require('./ATNType');\n\nconst {\n ATNState,\n BasicState,\n DecisionState,\n BlockStartState,\n BlockEndState,\n LoopEndState,\n RuleStartState,\n RuleStopState,\n TokensStartState,\n PlusLoopbackState,\n StarLoopbackState,\n StarLoopEntryState,\n PlusBlockStartState,\n StarBlockStartState,\n BasicBlockStartState\n} = require('./ATNState');\n\nconst {\n Transition,\n AtomTransition,\n SetTransition,\n NotSetTransition,\n RuleTransition,\n RangeTransition,\n ActionTransition,\n EpsilonTransition,\n WildcardTransition,\n PredicateTransition,\n PrecedencePredicateTransition\n} = require('./Transition')\n\nconst {IntervalSet} = require('./../IntervalSet');\nconst ATNDeserializationOptions = require('./ATNDeserializationOptions');\n\nconst {\n LexerActionType,\n LexerSkipAction,\n LexerChannelAction,\n LexerCustomAction,\n LexerMoreAction,\n LexerTypeAction,\n LexerPushModeAction,\n LexerPopModeAction,\n LexerModeAction,\n} = require('./LexerAction');\n\n// This is the earliest supported serialized UUID.\n// stick to serialized version for now, we don't need a UUID instance\nconst BASE_SERIALIZED_UUID = \"AADB8D7E-AEEF-4415-AD2B-8204D6CF042E\";\n\n//\n// This UUID indicates the serialized ATN contains two sets of\n// IntervalSets, where the second set's values are encoded as\n// 32-bit integers to support the full Unicode SMP range up to U+10FFFF.\n//\nconst ADDED_UNICODE_SMP = \"59627784-3BE5-417A-B9EB-8131A7286089\";\n\n// This list contains all of the currently supported UUIDs, ordered by when\n// the feature first appeared in this branch.\nconst SUPPORTED_UUIDS = [ BASE_SERIALIZED_UUID, ADDED_UNICODE_SMP ];\n\nconst SERIALIZED_VERSION = 3;\n\n// This is the current serialized UUID.\nconst SERIALIZED_UUID = ADDED_UNICODE_SMP;\n\nfunction initArray( length, value) {\n\tconst tmp = [];\n\ttmp[length-1] = value;\n\treturn tmp.map(function(i) {return value;});\n}\n\nclass ATNDeserializer {\n constructor(options) {\n\n if ( options=== undefined || options === null ) {\n options = ATNDeserializationOptions.defaultOptions;\n }\n this.deserializationOptions = options;\n this.stateFactories = null;\n this.actionFactories = null;\n }\n\n /**\n * Determines if a particular serialized representation of an ATN supports\n * a particular feature, identified by the {@link UUID} used for serializing\n * the ATN at the time the feature was first introduced.\n *\n * @param feature The {@link UUID} marking the first time the feature was\n * supported in the serialized ATN.\n * @param actualUuid The {@link UUID} of the actual serialized ATN which is\n * currently being deserialized.\n * @return {@code true} if the {@code actualUuid} value represents a\n * serialized ATN at or after the feature identified by {@code feature} was\n * introduced; otherwise, {@code false}.\n */\n isFeatureSupported(feature, actualUuid) {\n const idx1 = SUPPORTED_UUIDS.indexOf(feature);\n if (idx1<0) {\n return false;\n }\n const idx2 = SUPPORTED_UUIDS.indexOf(actualUuid);\n return idx2 >= idx1;\n }\n\n deserialize(data) {\n this.reset(data);\n this.checkVersion();\n this.checkUUID();\n const atn = this.readATN();\n this.readStates(atn);\n this.readRules(atn);\n this.readModes(atn);\n const sets = [];\n // First, deserialize sets with 16-bit arguments <= U+FFFF.\n this.readSets(atn, sets, this.readInt.bind(this));\n // Next, if the ATN was serialized with the Unicode SMP feature,\n // deserialize sets with 32-bit arguments <= U+10FFFF.\n if (this.isFeatureSupported(ADDED_UNICODE_SMP, this.uuid)) {\n this.readSets(atn, sets, this.readInt32.bind(this));\n }\n this.readEdges(atn, sets);\n this.readDecisions(atn);\n this.readLexerActions(atn);\n this.markPrecedenceDecisions(atn);\n this.verifyATN(atn);\n if (this.deserializationOptions.generateRuleBypassTransitions && atn.grammarType === ATNType.PARSER ) {\n this.generateRuleBypassTransitions(atn);\n // re-verify after modification\n this.verifyATN(atn);\n }\n return atn;\n }\n\n reset(data) {\n const adjust = function(c) {\n const v = c.charCodeAt(0);\n return v>1 ? v-2 : v + 65534;\n };\n const temp = data.split(\"\").map(adjust);\n // don't adjust the first value since that's the version number\n temp[0] = data.charCodeAt(0);\n this.data = temp;\n this.pos = 0;\n }\n\n checkVersion() {\n const version = this.readInt();\n if ( version !== SERIALIZED_VERSION ) {\n throw (\"Could not deserialize ATN with version \" + version + \" (expected \" + SERIALIZED_VERSION + \").\");\n }\n }\n\n checkUUID() {\n const uuid = this.readUUID();\n if (SUPPORTED_UUIDS.indexOf(uuid)<0) {\n throw (\"Could not deserialize ATN with UUID: \" + uuid +\n \" (expected \" + SERIALIZED_UUID + \" or a legacy UUID).\", uuid, SERIALIZED_UUID);\n }\n this.uuid = uuid;\n }\n\n readATN() {\n const grammarType = this.readInt();\n const maxTokenType = this.readInt();\n return new ATN(grammarType, maxTokenType);\n }\n\n readStates(atn) {\n let j, pair, stateNumber;\n const loopBackStateNumbers = [];\n const endStateNumbers = [];\n const nstates = this.readInt();\n for(let i=0; i 0) {\n bypassStart.addTransition(ruleToStartState.transitions[count-1]);\n ruleToStartState.transitions = ruleToStartState.transitions.slice(-1);\n }\n // link the new states\n atn.ruleToStartState[idx].addTransition(new EpsilonTransition(bypassStart));\n bypassStop.addTransition(new EpsilonTransition(endState));\n\n const matchState = new BasicState();\n atn.addState(matchState);\n matchState.addTransition(new AtomTransition(bypassStop, atn.ruleToTokenType[idx]));\n bypassStart.addTransition(new EpsilonTransition(matchState));\n }\n\n stateIsEndStateFor(state, idx) {\n if ( state.ruleIndex !== idx) {\n return null;\n }\n if (!( state instanceof StarLoopEntryState)) {\n return null;\n }\n const maybeLoopEndState = state.transitions[state.transitions.length - 1].target;\n if (!( maybeLoopEndState instanceof LoopEndState)) {\n return null;\n }\n if (maybeLoopEndState.epsilonOnlyTransitions &&\n (maybeLoopEndState.transitions[0].target instanceof RuleStopState)) {\n return state;\n } else {\n return null;\n }\n }\n\n /**\n * Analyze the {@link StarLoopEntryState} states in the specified ATN to set\n * the {@link StarLoopEntryState//isPrecedenceDecision} field to the\n * correct value.\n * @param atn The ATN.\n */\n markPrecedenceDecisions(atn) {\n for(let i=0; i= 0);\n } else {\n this.checkCondition(state.transitions.length <= 1 || (state instanceof RuleStopState));\n }\n }\n }\n\n checkCondition(condition, message) {\n if (!condition) {\n if (message === undefined || message===null) {\n message = \"IllegalState\";\n }\n throw (message);\n }\n }\n\n readInt() {\n return this.data[this.pos++];\n }\n\n readInt32() {\n const low = this.readInt();\n const high = this.readInt();\n return low | (high << 16);\n }\n\n readLong() {\n const low = this.readInt32();\n const high = this.readInt32();\n return (low & 0x00000000FFFFFFFF) | (high << 32);\n }\n\n readUUID() {\n const bb = [];\n for(let i=7;i>=0;i--) {\n const int = this.readInt();\n /* jshint bitwise: false */\n bb[(2*i)+1] = int & 0xFF;\n bb[2*i] = (int >> 8) & 0xFF;\n }\n return byteToHex[bb[0]] + byteToHex[bb[1]] +\n byteToHex[bb[2]] + byteToHex[bb[3]] + '-' +\n byteToHex[bb[4]] + byteToHex[bb[5]] + '-' +\n byteToHex[bb[6]] + byteToHex[bb[7]] + '-' +\n byteToHex[bb[8]] + byteToHex[bb[9]] + '-' +\n byteToHex[bb[10]] + byteToHex[bb[11]] +\n byteToHex[bb[12]] + byteToHex[bb[13]] +\n byteToHex[bb[14]] + byteToHex[bb[15]];\n }\n\n edgeFactory(atn, type, src, trg, arg1, arg2, arg3, sets) {\n const target = atn.states[trg];\n switch(type) {\n case Transition.EPSILON:\n return new EpsilonTransition(target);\n case Transition.RANGE:\n return arg3 !== 0 ? new RangeTransition(target, Token.EOF, arg2) : new RangeTransition(target, arg1, arg2);\n case Transition.RULE:\n return new RuleTransition(atn.states[arg1], arg2, arg3, target);\n case Transition.PREDICATE:\n return new PredicateTransition(target, arg1, arg2, arg3 !== 0);\n case Transition.PRECEDENCE:\n return new PrecedencePredicateTransition(target, arg1);\n case Transition.ATOM:\n return arg3 !== 0 ? new AtomTransition(target, Token.EOF) : new AtomTransition(target, arg1);\n case Transition.ACTION:\n return new ActionTransition(target, arg1, arg2, arg3 !== 0);\n case Transition.SET:\n return new SetTransition(target, sets[arg1]);\n case Transition.NOT_SET:\n return new NotSetTransition(target, sets[arg1]);\n case Transition.WILDCARD:\n return new WildcardTransition(target);\n default:\n throw \"The specified transition type: \" + type + \" is not valid.\";\n }\n }\n\n stateFactory(type, ruleIndex) {\n if (this.stateFactories === null) {\n const sf = [];\n sf[ATNState.INVALID_TYPE] = null;\n sf[ATNState.BASIC] = () => new BasicState();\n sf[ATNState.RULE_START] = () => new RuleStartState();\n sf[ATNState.BLOCK_START] = () => new BasicBlockStartState();\n sf[ATNState.PLUS_BLOCK_START] = () => new PlusBlockStartState();\n sf[ATNState.STAR_BLOCK_START] = () => new StarBlockStartState();\n sf[ATNState.TOKEN_START] = () => new TokensStartState();\n sf[ATNState.RULE_STOP] = () => new RuleStopState();\n sf[ATNState.BLOCK_END] = () => new BlockEndState();\n sf[ATNState.STAR_LOOP_BACK] = () => new StarLoopbackState();\n sf[ATNState.STAR_LOOP_ENTRY] = () => new StarLoopEntryState();\n sf[ATNState.PLUS_LOOP_BACK] = () => new PlusLoopbackState();\n sf[ATNState.LOOP_END] = () => new LoopEndState();\n this.stateFactories = sf;\n }\n if (type>this.stateFactories.length || this.stateFactories[type] === null) {\n throw(\"The specified state type \" + type + \" is not valid.\");\n } else {\n const s = this.stateFactories[type]();\n if (s!==null) {\n s.ruleIndex = ruleIndex;\n return s;\n }\n }\n }\n\n lexerActionFactory(type, data1, data2) {\n if (this.actionFactories === null) {\n const af = [];\n af[LexerActionType.CHANNEL] = (data1, data2) => new LexerChannelAction(data1);\n af[LexerActionType.CUSTOM] = (data1, data2) => new LexerCustomAction(data1, data2);\n af[LexerActionType.MODE] = (data1, data2) => new LexerModeAction(data1);\n af[LexerActionType.MORE] = (data1, data2) => LexerMoreAction.INSTANCE;\n af[LexerActionType.POP_MODE] = (data1, data2) => LexerPopModeAction.INSTANCE;\n af[LexerActionType.PUSH_MODE] = (data1, data2) => new LexerPushModeAction(data1);\n af[LexerActionType.SKIP] = (data1, data2) => LexerSkipAction.INSTANCE;\n af[LexerActionType.TYPE] = (data1, data2) => new LexerTypeAction(data1);\n this.actionFactories = af;\n }\n if (type>this.actionFactories.length || this.actionFactories[type] === null) {\n throw(\"The specified lexer action type \" + type + \" is not valid.\");\n } else {\n return this.actionFactories[type](data1, data2);\n }\n }\n}\n\nfunction createByteToHex() {\n\tconst bth = [];\n\tfor (let i = 0; i < 256; i++) {\n\t\tbth[i] = (i + 0x100).toString(16).substr(1).toUpperCase();\n\t}\n\treturn bth;\n}\n\nconst byteToHex = createByteToHex();\n\n\nmodule.exports = ATNDeserializer;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\n/**\n * Provides an empty default implementation of {@link ANTLRErrorListener}. The\n * default implementation of each method does nothing, but can be overridden as\n * necessary.\n */\nclass ErrorListener {\n syntaxError(recognizer, offendingSymbol, line, column, msg, e) {\n }\n\n reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) {\n }\n\n reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) {\n }\n\n reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs) {\n }\n}\n\n/**\n * {@inheritDoc}\n *\n *

\n * This implementation prints messages to {@link System//err} containing the\n * values of {@code line}, {@code charPositionInLine}, and {@code msg} using\n * the following format.

\n *\n *
\n * line line:charPositionInLine msg\n * 
\n *\n */\nclass ConsoleErrorListener extends ErrorListener {\n constructor() {\n super();\n }\n\n syntaxError(recognizer, offendingSymbol, line, column, msg, e) {\n console.error(\"line \" + line + \":\" + column + \" \" + msg);\n }\n}\n\n\n/**\n * Provides a default instance of {@link ConsoleErrorListener}.\n */\nConsoleErrorListener.INSTANCE = new ConsoleErrorListener();\n\nclass ProxyErrorListener extends ErrorListener {\n constructor(delegates) {\n super();\n if (delegates===null) {\n throw \"delegates\";\n }\n this.delegates = delegates;\n return this;\n }\n\n syntaxError(recognizer, offendingSymbol, line, column, msg, e) {\n this.delegates.map(d => d.syntaxError(recognizer, offendingSymbol, line, column, msg, e));\n }\n\n reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) {\n this.delegates.map(d => d.reportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs));\n }\n\n reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) {\n this.delegates.map(d => d.reportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs));\n }\n\n reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs) {\n this.delegates.map(d => d.reportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs));\n }\n}\n\nmodule.exports = {ErrorListener, ConsoleErrorListener, ProxyErrorListener}\n\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Token} = require('./Token');\nconst {ConsoleErrorListener} = require('./error/ErrorListener');\nconst {ProxyErrorListener} = require('./error/ErrorListener');\n\nclass Recognizer {\n constructor() {\n this._listeners = [ ConsoleErrorListener.INSTANCE ];\n this._interp = null;\n this._stateNumber = -1;\n }\n\n checkVersion(toolVersion) {\n const runtimeVersion = \"4.9.2\";\n if (runtimeVersion!==toolVersion) {\n console.log(\"ANTLR runtime and generated code versions disagree: \"+runtimeVersion+\"!=\"+toolVersion);\n }\n }\n\n addErrorListener(listener) {\n this._listeners.push(listener);\n }\n\n removeErrorListeners() {\n this._listeners = [];\n }\n\n getTokenTypeMap() {\n const tokenNames = this.getTokenNames();\n if (tokenNames===null) {\n throw(\"The current recognizer does not provide a list of token names.\");\n }\n let result = this.tokenTypeMapCache[tokenNames];\n if(result===undefined) {\n result = tokenNames.reduce(function(o, k, i) { o[k] = i; });\n result.EOF = Token.EOF;\n this.tokenTypeMapCache[tokenNames] = result;\n }\n return result;\n }\n\n /**\n * Get a map from rule names to rule indexes.\n *

Used for XPath and tree pattern compilation.

\n */\n getRuleIndexMap() {\n const ruleNames = this.ruleNames;\n if (ruleNames===null) {\n throw(\"The current recognizer does not provide a list of rule names.\");\n }\n let result = this.ruleIndexMapCache[ruleNames]; // todo: should it be Recognizer.ruleIndexMapCache ?\n if(result===undefined) {\n result = ruleNames.reduce(function(o, k, i) { o[k] = i; });\n this.ruleIndexMapCache[ruleNames] = result;\n }\n return result;\n }\n\n getTokenType(tokenName) {\n const ttype = this.getTokenTypeMap()[tokenName];\n if (ttype !==undefined) {\n return ttype;\n } else {\n return Token.INVALID_TYPE;\n }\n }\n\n // What is the error header, normally line/character position information?\n getErrorHeader(e) {\n const line = e.getOffendingToken().line;\n const column = e.getOffendingToken().column;\n return \"line \" + line + \":\" + column;\n }\n\n /**\n * How should a token be displayed in an error message? The default\n * is to display just the text, but during development you might\n * want to have a lot of information spit out. Override in that case\n * to use t.toString() (which, for CommonToken, dumps everything about\n * the token). This is better than forcing you to override a method in\n * your token objects because you don't have to go modify your lexer\n * so that it creates a new Java type.\n *\n * @deprecated This method is not called by the ANTLR 4 Runtime. Specific\n * implementations of {@link ANTLRErrorStrategy} may provide a similar\n * feature when necessary. For example, see\n * {@link DefaultErrorStrategy//getTokenErrorDisplay}.*/\n getTokenErrorDisplay(t) {\n if (t===null) {\n return \"\";\n }\n let s = t.text;\n if (s===null) {\n if (t.type===Token.EOF) {\n s = \"\";\n } else {\n s = \"<\" + t.type + \">\";\n }\n }\n s = s.replace(\"\\n\",\"\\\\n\").replace(\"\\r\",\"\\\\r\").replace(\"\\t\",\"\\\\t\");\n return \"'\" + s + \"'\";\n }\n\n getErrorListenerDispatch() {\n return new ProxyErrorListener(this._listeners);\n }\n\n /**\n * subclass needs to override these if there are sempreds or actions\n * that the ATN interp needs to execute\n */\n sempred(localctx, ruleIndex, actionIndex) {\n return true;\n }\n\n precpred(localctx , precedence) {\n return true;\n }\n\n get state(){\n return this._stateNumber;\n }\n\n set state(state) {\n this._stateNumber = state;\n }\n}\n\nRecognizer.tokenTypeMapCache = {};\nRecognizer.ruleIndexMapCache = {};\n\nmodule.exports = Recognizer;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst CommonToken = require('./Token').CommonToken;\n\nclass TokenFactory {}\n\n/**\n * This default implementation of {@link TokenFactory} creates\n * {@link CommonToken} objects.\n */\nclass CommonTokenFactory extends TokenFactory {\n constructor(copyText) {\n super();\n /**\n * Indicates whether {@link CommonToken//setText} should be called after\n * constructing tokens to explicitly set the text. This is useful for cases\n * where the input stream might not be able to provide arbitrary substrings\n * of text from the input after the lexer creates a token (e.g. the\n * implementation of {@link CharStream//getText} in\n * {@link UnbufferedCharStream} throws an\n * {@link UnsupportedOperationException}). Explicitly setting the token text\n * allows {@link Token//getText} to be called at any time regardless of the\n * input stream implementation.\n *\n *

\n * The default value is {@code false} to avoid the performance and memory\n * overhead of copying text for every token unless explicitly requested.

\n */\n this.copyText = copyText===undefined ? false : copyText;\n }\n\n create(source, type, text, channel, start, stop, line, column) {\n const t = new CommonToken(source, type, channel, start, stop);\n t.line = line;\n t.column = column;\n if (text !==null) {\n t.text = text;\n } else if (this.copyText && source[1] !==null) {\n t.text = source[1].getText(start,stop);\n }\n return t;\n }\n\n createThin(type, text) {\n const t = new CommonToken(null, type);\n t.text = text;\n return t;\n }\n}\n\n/**\n * The default {@link CommonTokenFactory} instance.\n *\n *

\n * This token factory does not explicitly copy token text when constructing\n * tokens.

\n */\nCommonTokenFactory.DEFAULT = new CommonTokenFactory();\n\nmodule.exports = CommonTokenFactory;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\n/**\n * The root of the ANTLR exception hierarchy. In general, ANTLR tracks just\n * 3 kinds of errors: prediction errors, failed predicate errors, and\n * mismatched input errors. In each case, the parser knows where it is\n * in the input, where it is in the ATN, the rule invocation stack,\n * and what kind of problem occurred.\n */\n\nconst {PredicateTransition} = require('./../atn/Transition');\nconst {Interval} = require('../IntervalSet').Interval;\n\nclass RecognitionException extends Error {\n constructor(params) {\n super(params.message);\n if (!!Error.captureStackTrace) {\n Error.captureStackTrace(this, RecognitionException);\n } else {\n var stack = new Error().stack;\n }\n this.message = params.message;\n this.recognizer = params.recognizer;\n this.input = params.input;\n this.ctx = params.ctx;\n /**\n * The current {@link Token} when an error occurred. Since not all streams\n * support accessing symbols by index, we have to track the {@link Token}\n * instance itself\n */\n this.offendingToken = null;\n /**\n * Get the ATN state number the parser was in at the time the error\n * occurred. For {@link NoViableAltException} and\n * {@link LexerNoViableAltException} exceptions, this is the\n * {@link DecisionState} number. For others, it is the state whose outgoing\n * edge we couldn't match.\n */\n this.offendingState = -1;\n if (this.recognizer!==null) {\n this.offendingState = this.recognizer.state;\n }\n }\n\n /**\n * Gets the set of input symbols which could potentially follow the\n * previously matched symbol at the time this exception was thrown.\n *\n *

If the set of expected tokens is not known and could not be computed,\n * this method returns {@code null}.

\n *\n * @return The set of token types that could potentially follow the current\n * state in the ATN, or {@code null} if the information is not available.\n */\n getExpectedTokens() {\n if (this.recognizer!==null) {\n return this.recognizer.atn.getExpectedTokens(this.offendingState, this.ctx);\n } else {\n return null;\n }\n }\n\n //

If the state number is not known, this method returns -1.

\n toString() {\n return this.message;\n }\n}\n\nclass LexerNoViableAltException extends RecognitionException {\n constructor(lexer, input, startIndex, deadEndConfigs) {\n super({message: \"\", recognizer: lexer, input: input, ctx: null});\n this.startIndex = startIndex;\n this.deadEndConfigs = deadEndConfigs;\n }\n\n toString() {\n let symbol = \"\";\n if (this.startIndex >= 0 && this.startIndex < this.input.size) {\n symbol = this.input.getText(new Interval(this.startIndex,this.startIndex));\n }\n return \"LexerNoViableAltException\" + symbol;\n }\n}\n\n\n/**\n * Indicates that the parser could not decide which of two or more paths\n * to take based upon the remaining input. It tracks the starting token\n * of the offending input and also knows where the parser was\n * in the various paths when the error. Reported by reportNoViableAlternative()\n */\nclass NoViableAltException extends RecognitionException {\n constructor(recognizer, input, startToken, offendingToken, deadEndConfigs, ctx) {\n ctx = ctx || recognizer._ctx;\n offendingToken = offendingToken || recognizer.getCurrentToken();\n startToken = startToken || recognizer.getCurrentToken();\n input = input || recognizer.getInputStream();\n super({message: \"\", recognizer: recognizer, input: input, ctx: ctx});\n // Which configurations did we try at input.index() that couldn't match\n // input.LT(1)?//\n this.deadEndConfigs = deadEndConfigs;\n // The token object at the start index; the input stream might\n // not be buffering tokens so get a reference to it. (At the\n // time the error occurred, of course the stream needs to keep a\n // buffer all of the tokens but later we might not have access to those.)\n this.startToken = startToken;\n this.offendingToken = offendingToken;\n }\n}\n\n/**\n * This signifies any kind of mismatched input exceptions such as\n * when the current input does not match the expected token.\n*/\nclass InputMismatchException extends RecognitionException {\n constructor(recognizer) {\n super({message: \"\", recognizer: recognizer, input: recognizer.getInputStream(), ctx: recognizer._ctx});\n this.offendingToken = recognizer.getCurrentToken();\n }\n}\n\nfunction formatMessage(predicate, message) {\n if (message !==null) {\n return message;\n } else {\n return \"failed predicate: {\" + predicate + \"}?\";\n }\n}\n\n/**\n * A semantic predicate failed during validation. Validation of predicates\n * occurs when normally parsing the alternative just like matching a token.\n * Disambiguating predicate evaluation occurs when we test a predicate during\n * prediction.\n*/\nclass FailedPredicateException extends RecognitionException {\n constructor(recognizer, predicate, message) {\n super({\n message: formatMessage(predicate, message || null), recognizer: recognizer,\n input: recognizer.getInputStream(), ctx: recognizer._ctx\n });\n const s = recognizer._interp.atn.states[recognizer.state]\n const trans = s.transitions[0]\n if (trans instanceof PredicateTransition) {\n this.ruleIndex = trans.ruleIndex;\n this.predicateIndex = trans.predIndex;\n } else {\n this.ruleIndex = 0;\n this.predicateIndex = 0;\n }\n this.predicate = predicate;\n this.offendingToken = recognizer.getCurrentToken();\n }\n}\n\n\nclass ParseCancellationException extends Error{\n constructor() {\n super()\n Error.captureStackTrace(this, ParseCancellationException);\n }\n}\n\nmodule.exports = {\n RecognitionException,\n NoViableAltException,\n LexerNoViableAltException,\n InputMismatchException,\n FailedPredicateException,\n ParseCancellationException\n};\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Token} = require('./Token');\nconst Recognizer = require('./Recognizer');\nconst CommonTokenFactory = require('./CommonTokenFactory');\nconst {RecognitionException} = require('./error/Errors');\nconst {LexerNoViableAltException} = require('./error/Errors');\n\nclass TokenSource {}\n\n/**\n * A lexer is recognizer that draws input symbols from a character stream.\n * lexer grammars result in a subclass of this object. A Lexer object\n * uses simplified match() and error recovery mechanisms in the interest of speed.\n */\nclass Lexer extends Recognizer {\n\tconstructor(input) {\n\t\tsuper();\n\t\tthis._input = input;\n\t\tthis._factory = CommonTokenFactory.DEFAULT;\n\t\tthis._tokenFactorySourcePair = [ this, input ];\n\n\t\tthis._interp = null; // child classes must populate this\n\n\t\t/**\n\t\t * The goal of all lexer rules/methods is to create a token object.\n\t\t * this is an instance variable as multiple rules may collaborate to\n\t\t * create a single token. nextToken will return this object after\n\t\t * matching lexer rule(s). If you subclass to allow multiple token\n\t\t * emissions, then set this to the last token to be matched or\n\t\t * something nonnull so that the auto token emit mechanism will not\n\t\t * emit another token.\n\t\t */\n\t\tthis._token = null;\n\n\t\t/**\n\t\t * What character index in the stream did the current token start at?\n\t\t * Needed, for example, to get the text for current token. Set at\n\t\t * the start of nextToken.\n\t\t */\n\t\tthis._tokenStartCharIndex = -1;\n\n\t\t// The line on which the first character of the token resides///\n\t\tthis._tokenStartLine = -1;\n\n\t\t// The character position of first character within the line///\n\t\tthis._tokenStartColumn = -1;\n\n\t\t// Once we see EOF on char stream, next token will be EOF.\n\t\t// If you have DONE : EOF ; then you see DONE EOF.\n\t\tthis._hitEOF = false;\n\n\t\t// The channel number for the current token///\n\t\tthis._channel = Token.DEFAULT_CHANNEL;\n\n\t\t// The token type for the current token///\n\t\tthis._type = Token.INVALID_TYPE;\n\n\t\tthis._modeStack = [];\n\t\tthis._mode = Lexer.DEFAULT_MODE;\n\n\t\t/**\n\t\t * You can set the text for the current token to override what is in\n\t\t * the input char buffer. Use setText() or can set this instance var.\n\t\t */\n\t\tthis._text = null;\n\t}\n\n\treset() {\n\t\t// wack Lexer state variables\n\t\tif (this._input !== null) {\n\t\t\tthis._input.seek(0); // rewind the input\n\t\t}\n\t\tthis._token = null;\n\t\tthis._type = Token.INVALID_TYPE;\n\t\tthis._channel = Token.DEFAULT_CHANNEL;\n\t\tthis._tokenStartCharIndex = -1;\n\t\tthis._tokenStartColumn = -1;\n\t\tthis._tokenStartLine = -1;\n\t\tthis._text = null;\n\n\t\tthis._hitEOF = false;\n\t\tthis._mode = Lexer.DEFAULT_MODE;\n\t\tthis._modeStack = [];\n\n\t\tthis._interp.reset();\n\t}\n\n// Return a token from this source; i.e., match a token on the char stream.\n\tnextToken() {\n\t\tif (this._input === null) {\n\t\t\tthrow \"nextToken requires a non-null input stream.\";\n\t\t}\n\n\t\t/**\n\t\t * Mark start location in char stream so unbuffered streams are\n\t\t * guaranteed at least have text of current token\n\t\t */\n\t\tconst tokenStartMarker = this._input.mark();\n\t\ttry {\n\t\t\twhile (true) {\n\t\t\t\tif (this._hitEOF) {\n\t\t\t\t\tthis.emitEOF();\n\t\t\t\t\treturn this._token;\n\t\t\t\t}\n\t\t\t\tthis._token = null;\n\t\t\t\tthis._channel = Token.DEFAULT_CHANNEL;\n\t\t\t\tthis._tokenStartCharIndex = this._input.index;\n\t\t\t\tthis._tokenStartColumn = this._interp.column;\n\t\t\t\tthis._tokenStartLine = this._interp.line;\n\t\t\t\tthis._text = null;\n\t\t\t\tlet continueOuter = false;\n\t\t\t\twhile (true) {\n\t\t\t\t\tthis._type = Token.INVALID_TYPE;\n\t\t\t\t\tlet ttype = Lexer.SKIP;\n\t\t\t\t\ttry {\n\t\t\t\t\t\tttype = this._interp.match(this._input, this._mode);\n\t\t\t\t\t} catch (e) {\n\t\t\t\t\t\tif(e instanceof RecognitionException) {\n\t\t\t\t\t\t\tthis.notifyListeners(e); // report error\n\t\t\t\t\t\t\tthis.recover(e);\n\t\t\t\t\t\t} else {\n\t\t\t\t\t\t\tconsole.log(e.stack);\n\t\t\t\t\t\t\tthrow e;\n\t\t\t\t\t\t}\n\t\t\t\t\t}\n\t\t\t\t\tif (this._input.LA(1) === Token.EOF) {\n\t\t\t\t\t\tthis._hitEOF = true;\n\t\t\t\t\t}\n\t\t\t\t\tif (this._type === Token.INVALID_TYPE) {\n\t\t\t\t\t\tthis._type = ttype;\n\t\t\t\t\t}\n\t\t\t\t\tif (this._type === Lexer.SKIP) {\n\t\t\t\t\t\tcontinueOuter = true;\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t\tif (this._type !== Lexer.MORE) {\n\t\t\t\t\t\tbreak;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t\tif (continueOuter) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (this._token === null) {\n\t\t\t\t\tthis.emit();\n\t\t\t\t}\n\t\t\t\treturn this._token;\n\t\t\t}\n\t\t} finally {\n\t\t\t// make sure we release marker after match or\n\t\t\t// unbuffered char stream will keep buffering\n\t\t\tthis._input.release(tokenStartMarker);\n\t\t}\n\t}\n\n\t/**\n\t * Instruct the lexer to skip creating a token for current lexer rule\n\t * and look for another token. nextToken() knows to keep looking when\n\t * a lexer rule finishes with token set to SKIP_TOKEN. Recall that\n\t * if token==null at end of any token rule, it creates one for you\n\t * and emits it.\n\t */\n\tskip() {\n\t\tthis._type = Lexer.SKIP;\n\t}\n\n\tmore() {\n\t\tthis._type = Lexer.MORE;\n\t}\n\n\tmode(m) {\n\t\tthis._mode = m;\n\t}\n\n\tpushMode(m) {\n\t\tif (this._interp.debug) {\n\t\t\tconsole.log(\"pushMode \" + m);\n\t\t}\n\t\tthis._modeStack.push(this._mode);\n\t\tthis.mode(m);\n\t}\n\n\tpopMode() {\n\t\tif (this._modeStack.length === 0) {\n\t\t\tthrow \"Empty Stack\";\n\t\t}\n\t\tif (this._interp.debug) {\n\t\t\tconsole.log(\"popMode back to \" + this._modeStack.slice(0, -1));\n\t\t}\n\t\tthis.mode(this._modeStack.pop());\n\t\treturn this._mode;\n\t}\n\n\t/**\n\t * By default does not support multiple emits per nextToken invocation\n\t * for efficiency reasons. Subclass and override this method, nextToken,\n\t * and getToken (to push tokens into a list and pull from that list\n\t * rather than a single variable as this implementation does).\n\t */\n\temitToken(token) {\n\t\tthis._token = token;\n\t}\n\n\t/**\n\t * The standard method called to automatically emit a token at the\n\t * outermost lexical rule. The token object should point into the\n\t * char buffer start..stop. If there is a text override in 'text',\n\t * use that to set the token's text. Override this method to emit\n\t * custom Token objects or provide a new factory.\n\t */\n\temit() {\n\t\tconst t = this._factory.create(this._tokenFactorySourcePair, this._type,\n\t\t\t\tthis._text, this._channel, this._tokenStartCharIndex, this\n\t\t\t\t\t\t.getCharIndex() - 1, this._tokenStartLine,\n\t\t\t\tthis._tokenStartColumn);\n\t\tthis.emitToken(t);\n\t\treturn t;\n\t}\n\n\temitEOF() {\n\t\tconst cpos = this.column;\n\t\tconst lpos = this.line;\n\t\tconst eof = this._factory.create(this._tokenFactorySourcePair, Token.EOF,\n\t\t\t\tnull, Token.DEFAULT_CHANNEL, this._input.index,\n\t\t\t\tthis._input.index - 1, lpos, cpos);\n\t\tthis.emitToken(eof);\n\t\treturn eof;\n\t}\n\n// What is the index of the current character of lookahead?///\n\tgetCharIndex() {\n\t\treturn this._input.index;\n\t}\n\n\t/**\n\t * Return a list of all Token objects in input char stream.\n\t * Forces load of all tokens. Does not include EOF token.\n\t */\n\tgetAllTokens() {\n\t\tconst tokens = [];\n\t\tlet t = this.nextToken();\n\t\twhile (t.type !== Token.EOF) {\n\t\t\ttokens.push(t);\n\t\t\tt = this.nextToken();\n\t\t}\n\t\treturn tokens;\n\t}\n\n\tnotifyListeners(e) {\n\t\tconst start = this._tokenStartCharIndex;\n\t\tconst stop = this._input.index;\n\t\tconst text = this._input.getText(start, stop);\n\t\tconst msg = \"token recognition error at: '\" + this.getErrorDisplay(text) + \"'\";\n\t\tconst listener = this.getErrorListenerDispatch();\n\t\tlistener.syntaxError(this, null, this._tokenStartLine,\n\t\t\t\tthis._tokenStartColumn, msg, e);\n\t}\n\n\tgetErrorDisplay(s) {\n\t\tconst d = [];\n\t\tfor (let i = 0; i < s.length; i++) {\n\t\t\td.push(s[i]);\n\t\t}\n\t\treturn d.join('');\n\t}\n\n\tgetErrorDisplayForChar(c) {\n\t\tif (c.charCodeAt(0) === Token.EOF) {\n\t\t\treturn \"\";\n\t\t} else if (c === '\\n') {\n\t\t\treturn \"\\\\n\";\n\t\t} else if (c === '\\t') {\n\t\t\treturn \"\\\\t\";\n\t\t} else if (c === '\\r') {\n\t\t\treturn \"\\\\r\";\n\t\t} else {\n\t\t\treturn c;\n\t\t}\n\t}\n\n\tgetCharErrorDisplay(c) {\n\t\treturn \"'\" + this.getErrorDisplayForChar(c) + \"'\";\n\t}\n\n\t/**\n\t * Lexers can normally match any char in it's vocabulary after matching\n\t * a token, so do the easy thing and just kill a character and hope\n\t * it all works out. You can instead use the rule invocation stack\n\t * to do sophisticated error recovery if you are in a fragment rule.\n\t */\n\trecover(re) {\n\t\tif (this._input.LA(1) !== Token.EOF) {\n\t\t\tif (re instanceof LexerNoViableAltException) {\n\t\t\t\t// skip a char and try again\n\t\t\t\tthis._interp.consume(this._input);\n\t\t\t} else {\n\t\t\t\t// TODO: Do we lose character or line position information?\n\t\t\t\tthis._input.consume();\n\t\t\t}\n\t\t}\n\t}\n\n\tget inputStream(){\n\t\treturn this._input;\n\t}\n\n\tset inputStream(input) {\n\t\tthis._input = null;\n\t\tthis._tokenFactorySourcePair = [ this, this._input ];\n\t\tthis.reset();\n\t\tthis._input = input;\n\t\tthis._tokenFactorySourcePair = [ this, this._input ];\n\t}\n\n\tget sourceName(){\n\t\treturn this._input.sourceName;\n\t}\n\n\tget type(){\n\t\treturn this.type;\n\t}\n\n\tset type(type) {\n\t\tthis._type = type;\n\t}\n\n\tget line(){\n\t\treturn this._interp.line;\n\t}\n\n\tset line(line) {\n\t\tthis._interp.line = line;\n\t}\n\n\tget column(){\n\t\treturn this._interp.column;\n\t}\n\n\tset column(column) {\n\t\tthis._interp.column = column;\n\t}\n\n\tget text(){\n\t\tif (this._text !== null) {\n\t\t\treturn this._text;\n\t\t} else {\n\t\t\treturn this._interp.getText(this._input);\n\t\t}\n\t}\n\n\tset text(text) {\n\t\tthis._text = text;\n\t}\n}\n\n\n\n\nLexer.DEFAULT_MODE = 0;\nLexer.MORE = -2;\nLexer.SKIP = -3;\n\nLexer.DEFAULT_TOKEN_CHANNEL = Token.DEFAULT_CHANNEL;\nLexer.HIDDEN = Token.HIDDEN_CHANNEL;\nLexer.MIN_CHAR_VALUE = 0x0000;\nLexer.MAX_CHAR_VALUE = 0x10FFFF;\n\n// Set the char stream and reset the lexer\n\n\nmodule.exports = Lexer;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst ATN = require('./ATN');\nconst Utils = require('./../Utils');\nconst {SemanticContext} = require('./SemanticContext');\nconst {merge} = require('./../PredictionContext');\n\nfunction hashATNConfig(c) {\n\treturn c.hashCodeForConfigSet();\n}\n\nfunction equalATNConfigs(a, b) {\n\tif ( a===b ) {\n\t\treturn true;\n\t} else if ( a===null || b===null ) {\n\t\treturn false;\n\t} else\n return a.equalsForConfigSet(b);\n }\n\n/**\n * Specialized {@link Set}{@code <}{@link ATNConfig}{@code >} that can track\n * info about the set, with support for combining similar configurations using a\n * graph-structured stack\n */\nclass ATNConfigSet {\n\tconstructor(fullCtx) {\n\t\t/**\n\t\t * The reason that we need this is because we don't want the hash map to use\n\t\t * the standard hash code and equals. We need all configurations with the\n\t\t * same\n\t\t * {@code (s,i,_,semctx)} to be equal. Unfortunately, this key effectively\n\t\t * doubles\n\t\t * the number of objects associated with ATNConfigs. The other solution is\n\t\t * to\n\t\t * use a hash table that lets us specify the equals/hashcode operation.\n\t\t * All configs but hashed by (s, i, _, pi) not including context. Wiped out\n\t\t * when we go readonly as this set becomes a DFA state\n\t\t */\n\t\tthis.configLookup = new Utils.Set(hashATNConfig, equalATNConfigs);\n\t\t/**\n\t\t * Indicates that this configuration set is part of a full context\n\t\t * LL prediction. It will be used to determine how to merge $. With SLL\n\t\t * it's a wildcard whereas it is not for LL context merge\n\t\t */\n\t\tthis.fullCtx = fullCtx === undefined ? true : fullCtx;\n\t\t/**\n\t\t * Indicates that the set of configurations is read-only. Do not\n\t\t * allow any code to manipulate the set; DFA states will point at\n\t\t * the sets and they must not change. This does not protect the other\n\t\t * fields; in particular, conflictingAlts is set after\n\t\t * we've made this readonly\n\t\t */\n\t\tthis.readOnly = false;\n\t\t// Track the elements as they are added to the set; supports get(i)///\n\t\tthis.configs = [];\n\n\t\t// TODO: these fields make me pretty uncomfortable but nice to pack up info\n\t\t// together, saves recomputation\n\t\t// TODO: can we track conflicts as they are added to save scanning configs\n\t\t// later?\n\t\tthis.uniqueAlt = 0;\n\t\tthis.conflictingAlts = null;\n\n\t\t/**\n\t\t * Used in parser and lexer. In lexer, it indicates we hit a pred\n\t\t * while computing a closure operation. Don't make a DFA state from this\n\t\t */\n\t\tthis.hasSemanticContext = false;\n\t\tthis.dipsIntoOuterContext = false;\n\n\t\tthis.cachedHashCode = -1;\n\t}\n\n\t/**\n\t * Adding a new config means merging contexts with existing configs for\n\t * {@code (s, i, pi, _)}, where {@code s} is the\n\t * {@link ATNConfig//state}, {@code i} is the {@link ATNConfig//alt}, and\n\t * {@code pi} is the {@link ATNConfig//semanticContext}. We use\n\t * {@code (s,i,pi)} as key.\n\t *\n\t *

This method updates {@link //dipsIntoOuterContext} and\n\t * {@link //hasSemanticContext} when necessary.

\n\t */\n\tadd(config, mergeCache) {\n\t\tif (mergeCache === undefined) {\n\t\t\tmergeCache = null;\n\t\t}\n\t\tif (this.readOnly) {\n\t\t\tthrow \"This set is readonly\";\n\t\t}\n\t\tif (config.semanticContext !== SemanticContext.NONE) {\n\t\t\tthis.hasSemanticContext = true;\n\t\t}\n\t\tif (config.reachesIntoOuterContext > 0) {\n\t\t\tthis.dipsIntoOuterContext = true;\n\t\t}\n\t\tconst existing = this.configLookup.add(config);\n\t\tif (existing === config) {\n\t\t\tthis.cachedHashCode = -1;\n\t\t\tthis.configs.push(config); // track order here\n\t\t\treturn true;\n\t\t}\n\t\t// a previous (s,i,pi,_), merge with it and save result\n\t\tconst rootIsWildcard = !this.fullCtx;\n\t\tconst merged = merge(existing.context, config.context, rootIsWildcard, mergeCache);\n\t\t/**\n\t\t * no need to check for existing.context, config.context in cache\n\t\t * since only way to create new graphs is \"call rule\" and here. We\n\t\t * cache at both places\n\t\t */\n\t\texisting.reachesIntoOuterContext = Math.max( existing.reachesIntoOuterContext, config.reachesIntoOuterContext);\n\t\t// make sure to preserve the precedence filter suppression during the merge\n\t\tif (config.precedenceFilterSuppressed) {\n\t\t\texisting.precedenceFilterSuppressed = true;\n\t\t}\n\t\texisting.context = merged; // replace context; no need to alt mapping\n\t\treturn true;\n\t}\n\n\tgetStates() {\n\t\tconst states = new Utils.Set();\n\t\tfor (let i = 0; i < this.configs.length; i++) {\n\t\t\tstates.add(this.configs[i].state);\n\t\t}\n\t\treturn states;\n\t}\n\n\tgetPredicates() {\n\t\tconst preds = [];\n\t\tfor (let i = 0; i < this.configs.length; i++) {\n\t\t\tconst c = this.configs[i].semanticContext;\n\t\t\tif (c !== SemanticContext.NONE) {\n\t\t\t\tpreds.push(c.semanticContext);\n\t\t\t}\n\t\t}\n\t\treturn preds;\n\t}\n\n\toptimizeConfigs(interpreter) {\n\t\tif (this.readOnly) {\n\t\t\tthrow \"This set is readonly\";\n\t\t}\n\t\tif (this.configLookup.length === 0) {\n\t\t\treturn;\n\t\t}\n\t\tfor (let i = 0; i < this.configs.length; i++) {\n\t\t\tconst config = this.configs[i];\n\t\t\tconfig.context = interpreter.getCachedContext(config.context);\n\t\t}\n\t}\n\n\taddAll(coll) {\n\t\tfor (let i = 0; i < coll.length; i++) {\n\t\t\tthis.add(coll[i]);\n\t\t}\n\t\treturn false;\n\t}\n\n\tequals(other) {\n\t\treturn this === other ||\n\t\t\t(other instanceof ATNConfigSet &&\n\t\t\tUtils.equalArrays(this.configs, other.configs) &&\n\t\t\tthis.fullCtx === other.fullCtx &&\n\t\t\tthis.uniqueAlt === other.uniqueAlt &&\n\t\t\tthis.conflictingAlts === other.conflictingAlts &&\n\t\t\tthis.hasSemanticContext === other.hasSemanticContext &&\n\t\t\tthis.dipsIntoOuterContext === other.dipsIntoOuterContext);\n\t}\n\n\thashCode() {\n\t\tconst hash = new Utils.Hash();\n\t\thash.update(this.configs);\n\t\treturn hash.finish();\n\t}\n\n\tupdateHashCode(hash) {\n\t\tif (this.readOnly) {\n\t\t\tif (this.cachedHashCode === -1) {\n\t\t\t\tthis.cachedHashCode = this.hashCode();\n\t\t\t}\n\t\t\thash.update(this.cachedHashCode);\n\t\t} else {\n\t\t\thash.update(this.hashCode());\n\t\t}\n\t}\n\n\tisEmpty() {\n\t\treturn this.configs.length === 0;\n\t}\n\n\tcontains(item) {\n\t\tif (this.configLookup === null) {\n\t\t\tthrow \"This method is not implemented for readonly sets.\";\n\t\t}\n\t\treturn this.configLookup.contains(item);\n\t}\n\n\tcontainsFast(item) {\n\t\tif (this.configLookup === null) {\n\t\t\tthrow \"This method is not implemented for readonly sets.\";\n\t\t}\n\t\treturn this.configLookup.containsFast(item);\n\t}\n\n\tclear() {\n\t\tif (this.readOnly) {\n\t\t\tthrow \"This set is readonly\";\n\t\t}\n\t\tthis.configs = [];\n\t\tthis.cachedHashCode = -1;\n\t\tthis.configLookup = new Utils.Set();\n\t}\n\n\tsetReadonly(readOnly) {\n\t\tthis.readOnly = readOnly;\n\t\tif (readOnly) {\n\t\t\tthis.configLookup = null; // can't mod, no need for lookup cache\n\t\t}\n\t}\n\n\ttoString() {\n\t\treturn Utils.arrayToString(this.configs) +\n\t\t\t(this.hasSemanticContext ? \",hasSemanticContext=\" + this.hasSemanticContext : \"\") +\n\t\t\t(this.uniqueAlt !== ATN.INVALID_ALT_NUMBER ? \",uniqueAlt=\" + this.uniqueAlt : \"\") +\n\t\t\t(this.conflictingAlts !== null ? \",conflictingAlts=\" + this.conflictingAlts : \"\") +\n\t\t\t(this.dipsIntoOuterContext ? \",dipsIntoOuterContext\" : \"\");\n\t}\n\n\tget items(){\n\t\treturn this.configs;\n\t}\n\n\tget length(){\n\t\treturn this.configs.length;\n\t}\n}\n\n\nclass OrderedATNConfigSet extends ATNConfigSet {\n\tconstructor() {\n\t\tsuper();\n\t\tthis.configLookup = new Utils.Set();\n\t}\n}\n\nmodule.exports = {\n\tATNConfigSet,\n\tOrderedATNConfigSet\n}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {ATNConfigSet} = require('./../atn/ATNConfigSet');\nconst {Hash, Set} = require('./../Utils');\n\n/**\n * Map a predicate to a predicted alternative.\n */\nclass PredPrediction {\n\tconstructor(pred, alt) {\n\t\tthis.alt = alt;\n\t\tthis.pred = pred;\n\t}\n\n\ttoString() {\n\t\treturn \"(\" + this.pred + \", \" + this.alt + \")\";\n\t}\n}\n\n/**\n * A DFA state represents a set of possible ATN configurations.\n * As Aho, Sethi, Ullman p. 117 says \"The DFA uses its state\n * to keep track of all possible states the ATN can be in after\n * reading each input symbol. That is to say, after reading\n * input a1a2..an, the DFA is in a state that represents the\n * subset T of the states of the ATN that are reachable from the\n * ATN's start state along some path labeled a1a2..an.\"\n * In conventional NFA→DFA conversion, therefore, the subset T\n * would be a bitset representing the set of states the\n * ATN could be in. We need to track the alt predicted by each\n * state as well, however. More importantly, we need to maintain\n * a stack of states, tracking the closure operations as they\n * jump from rule to rule, emulating rule invocations (method calls).\n * I have to add a stack to simulate the proper lookahead sequences for\n * the underlying LL grammar from which the ATN was derived.\n *\n *

I use a set of ATNConfig objects not simple states. An ATNConfig\n * is both a state (ala normal conversion) and a RuleContext describing\n * the chain of rules (if any) followed to arrive at that state.

\n *\n *

A DFA state may have multiple references to a particular state,\n * but with different ATN contexts (with same or different alts)\n * meaning that state was reached via a different set of rule invocations.

\n */\nclass DFAState {\n\tconstructor(stateNumber, configs) {\n\t\tif (stateNumber === null) {\n\t\t\tstateNumber = -1;\n\t\t}\n\t\tif (configs === null) {\n\t\t\tconfigs = new ATNConfigSet();\n\t\t}\n\t\tthis.stateNumber = stateNumber;\n\t\tthis.configs = configs;\n\t\t/**\n\t\t * {@code edges[symbol]} points to target of symbol. Shift up by 1 so (-1)\n\t\t * {@link Token//EOF} maps to {@code edges[0]}.\n\t\t */\n\t\tthis.edges = null;\n\t\tthis.isAcceptState = false;\n\t\t/**\n\t\t * if accept state, what ttype do we match or alt do we predict?\n\t\t * This is set to {@link ATN//INVALID_ALT_NUMBER} when {@link//predicates}\n\t\t * {@code !=null} or {@link //requiresFullContext}.\n\t\t */\n\t\tthis.prediction = 0;\n\t\tthis.lexerActionExecutor = null;\n\t\t/**\n\t\t * Indicates that this state was created during SLL prediction that\n\t\t * discovered a conflict between the configurations in the state. Future\n\t\t * {@link ParserATNSimulator//execATN} invocations immediately jumped doing\n\t\t * full context prediction if this field is true.\n\t\t */\n\t\tthis.requiresFullContext = false;\n\t\t/**\n\t\t * During SLL parsing, this is a list of predicates associated with the\n\t\t * ATN configurations of the DFA state. When we have predicates,\n\t\t * {@link //requiresFullContext} is {@code false} since full context\n\t\t * prediction evaluates predicates\n\t\t * on-the-fly. If this is not null, then {@link //prediction} is\n\t\t * {@link ATN//INVALID_ALT_NUMBER}.\n\t\t *\n\t\t *

We only use these for non-{@link //requiresFullContext} but\n\t\t * conflicting states. That\n\t\t * means we know from the context (it's $ or we don't dip into outer\n\t\t * context) that it's an ambiguity not a conflict.

\n\t\t *\n\t\t *

This list is computed by {@link\n\t\t * ParserATNSimulator//predicateDFAState}.

\n\t\t */\n\t\tthis.predicates = null;\n\t\treturn this;\n\t}\n\n\t/**\n\t * Get the set of all alts mentioned by all ATN configurations in this\n\t * DFA state.\n\t */\n\tgetAltSet() {\n\t\tconst alts = new Set();\n\t\tif (this.configs !== null) {\n\t\t\tfor (let i = 0; i < this.configs.length; i++) {\n\t\t\t\tconst c = this.configs[i];\n\t\t\t\talts.add(c.alt);\n\t\t\t}\n\t\t}\n\t\tif (alts.length === 0) {\n\t\t\treturn null;\n\t\t} else {\n\t\t\treturn alts;\n\t\t}\n\t}\n\n\t/**\n\t * Two {@link DFAState} instances are equal if their ATN configuration sets\n\t * are the same. This method is used to see if a state already exists.\n\t *\n\t *

Because the number of alternatives and number of ATN configurations are\n\t * finite, there is a finite number of DFA states that can be processed.\n\t * This is necessary to show that the algorithm terminates.

\n\t *\n\t *

Cannot test the DFA state numbers here because in\n\t * {@link ParserATNSimulator//addDFAState} we need to know if any other state\n\t * exists that has this exact set of ATN configurations. The\n\t * {@link //stateNumber} is irrelevant.

\n\t */\n\tequals(other) {\n\t\t// compare set of ATN configurations in this set with other\n\t\treturn this === other ||\n\t\t\t\t(other instanceof DFAState &&\n\t\t\t\t\tthis.configs.equals(other.configs));\n\t}\n\n\ttoString() {\n\t\tlet s = \"\" + this.stateNumber + \":\" + this.configs;\n\t\tif(this.isAcceptState) {\n\t\t\ts = s + \"=>\";\n\t\t\tif (this.predicates !== null)\n\t\t\t\ts = s + this.predicates;\n\t\t\telse\n\t\t\t\ts = s + this.prediction;\n\t\t}\n\t\treturn s;\n\t}\n\n\thashCode() {\n\t\tconst hash = new Hash();\n\t\thash.update(this.configs);\n\t\treturn hash.finish();\n\t}\n}\n\nmodule.exports = { DFAState, PredPrediction };\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {DFAState} = require('./../dfa/DFAState');\nconst {ATNConfigSet} = require('./ATNConfigSet');\nconst {getCachedPredictionContext} = require('./../PredictionContext');\nconst {Map} = require('./../Utils');\n\nclass ATNSimulator {\n constructor(atn, sharedContextCache) {\n /**\n * The context cache maps all PredictionContext objects that are ==\n * to a single cached copy. This cache is shared across all contexts\n * in all ATNConfigs in all DFA states. We rebuild each ATNConfigSet\n * to use only cached nodes/graphs in addDFAState(). We don't want to\n * fill this during closure() since there are lots of contexts that\n * pop up but are not used ever again. It also greatly slows down closure().\n *\n *

This cache makes a huge difference in memory and a little bit in speed.\n * For the Java grammar on java.*, it dropped the memory requirements\n * at the end from 25M to 16M. We don't store any of the full context\n * graphs in the DFA because they are limited to local context only,\n * but apparently there's a lot of repetition there as well. We optimize\n * the config contexts before storing the config set in the DFA states\n * by literally rebuilding them with cached subgraphs only.

\n *\n *

I tried a cache for use during closure operations, that was\n * whacked after each adaptivePredict(). It cost a little bit\n * more time I think and doesn't save on the overall footprint\n * so it's not worth the complexity.

\n */\n this.atn = atn;\n this.sharedContextCache = sharedContextCache;\n return this;\n }\n\n getCachedContext(context) {\n if (this.sharedContextCache ===null) {\n return context;\n }\n const visited = new Map();\n return getCachedPredictionContext(context, this.sharedContextCache, visited);\n }\n}\n\n// Must distinguish between missing edge and edge we know leads nowhere///\nATNSimulator.ERROR = new DFAState(0x7FFFFFFF, new ATNConfigSet());\n\n\nmodule.exports = ATNSimulator;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {hashStuff} = require(\"../Utils\");\nconst {LexerIndexedCustomAction} = require('./LexerAction');\n\nclass LexerActionExecutor {\n\t/**\n\t * Represents an executor for a sequence of lexer actions which traversed during\n\t * the matching operation of a lexer rule (token).\n\t *\n\t *

The executor tracks position information for position-dependent lexer actions\n\t * efficiently, ensuring that actions appearing only at the end of the rule do\n\t * not cause bloating of the {@link DFA} created for the lexer.

\n\t */\n\tconstructor(lexerActions) {\n\t\tthis.lexerActions = lexerActions === null ? [] : lexerActions;\n\t\t/**\n\t\t * Caches the result of {@link //hashCode} since the hash code is an element\n\t\t * of the performance-critical {@link LexerATNConfig//hashCode} operation\n\t\t */\n\t\tthis.cachedHashCode = hashStuff(lexerActions); // \"\".join([str(la) for la in\n\t\t// lexerActions]))\n\t\treturn this;\n\t}\n\n\t/**\n\t * Creates a {@link LexerActionExecutor} which encodes the current offset\n\t * for position-dependent lexer actions.\n\t *\n\t *

Normally, when the executor encounters lexer actions where\n\t * {@link LexerAction//isPositionDependent} returns {@code true}, it calls\n\t * {@link IntStream//seek} on the input {@link CharStream} to set the input\n\t * position to the end of the current token. This behavior provides\n\t * for efficient DFA representation of lexer actions which appear at the end\n\t * of a lexer rule, even when the lexer rule matches a variable number of\n\t * characters.

\n\t *\n\t *

Prior to traversing a match transition in the ATN, the current offset\n\t * from the token start index is assigned to all position-dependent lexer\n\t * actions which have not already been assigned a fixed offset. By storing\n\t * the offsets relative to the token start index, the DFA representation of\n\t * lexer actions which appear in the middle of tokens remains efficient due\n\t * to sharing among tokens of the same length, regardless of their absolute\n\t * position in the input stream.

\n\t *\n\t *

If the current executor already has offsets assigned to all\n\t * position-dependent lexer actions, the method returns {@code this}.

\n\t *\n\t * @param offset The current offset to assign to all position-dependent\n\t * lexer actions which do not already have offsets assigned.\n\t *\n\t * @return {LexerActionExecutor} A {@link LexerActionExecutor} which stores input stream offsets\n\t * for all position-dependent lexer actions.\n\t */\n\tfixOffsetBeforeMatch(offset) {\n\t\tlet updatedLexerActions = null;\n\t\tfor (let i = 0; i < this.lexerActions.length; i++) {\n\t\t\tif (this.lexerActions[i].isPositionDependent &&\n\t\t\t\t\t!(this.lexerActions[i] instanceof LexerIndexedCustomAction)) {\n\t\t\t\tif (updatedLexerActions === null) {\n\t\t\t\t\tupdatedLexerActions = this.lexerActions.concat([]);\n\t\t\t\t}\n\t\t\t\tupdatedLexerActions[i] = new LexerIndexedCustomAction(offset,\n\t\t\t\t\t\tthis.lexerActions[i]);\n\t\t\t}\n\t\t}\n\t\tif (updatedLexerActions === null) {\n\t\t\treturn this;\n\t\t} else {\n\t\t\treturn new LexerActionExecutor(updatedLexerActions);\n\t\t}\n\t}\n\n\t/**\n\t * Execute the actions encapsulated by this executor within the context of a\n\t * particular {@link Lexer}.\n\t *\n\t *

This method calls {@link IntStream//seek} to set the position of the\n\t * {@code input} {@link CharStream} prior to calling\n\t * {@link LexerAction//execute} on a position-dependent action. Before the\n\t * method returns, the input position will be restored to the same position\n\t * it was in when the method was invoked.

\n\t *\n\t * @param lexer The lexer instance.\n\t * @param input The input stream which is the source for the current token.\n\t * When this method is called, the current {@link IntStream//index} for\n\t * {@code input} should be the start of the following token, i.e. 1\n\t * character past the end of the current token.\n\t * @param startIndex The token start index. This value may be passed to\n\t * {@link IntStream//seek} to set the {@code input} position to the beginning\n\t * of the token.\n\t */\n\texecute(lexer, input, startIndex) {\n\t\tlet requiresSeek = false;\n\t\tconst stopIndex = input.index;\n\t\ttry {\n\t\t\tfor (let i = 0; i < this.lexerActions.length; i++) {\n\t\t\t\tlet lexerAction = this.lexerActions[i];\n\t\t\t\tif (lexerAction instanceof LexerIndexedCustomAction) {\n\t\t\t\t\tconst offset = lexerAction.offset;\n\t\t\t\t\tinput.seek(startIndex + offset);\n\t\t\t\t\tlexerAction = lexerAction.action;\n\t\t\t\t\trequiresSeek = (startIndex + offset) !== stopIndex;\n\t\t\t\t} else if (lexerAction.isPositionDependent) {\n\t\t\t\t\tinput.seek(stopIndex);\n\t\t\t\t\trequiresSeek = false;\n\t\t\t\t}\n\t\t\t\tlexerAction.execute(lexer);\n\t\t\t}\n\t\t} finally {\n\t\t\tif (requiresSeek) {\n\t\t\t\tinput.seek(stopIndex);\n\t\t\t}\n\t\t}\n\t}\n\n\thashCode() {\n\t\treturn this.cachedHashCode;\n\t}\n\n\tupdateHashCode(hash) {\n\t\thash.update(this.cachedHashCode);\n\t}\n\n\tequals(other) {\n\t\tif (this === other) {\n\t\t\treturn true;\n\t\t} else if (!(other instanceof LexerActionExecutor)) {\n\t\t\treturn false;\n\t\t} else if (this.cachedHashCode != other.cachedHashCode) {\n\t\t\treturn false;\n\t\t} else if (this.lexerActions.length != other.lexerActions.length) {\n\t\t\treturn false;\n\t\t} else {\n\t\t\tconst numActions = this.lexerActions.length\n\t\t\tfor (let idx = 0; idx < numActions; ++idx) {\n\t\t\t\tif (!this.lexerActions[idx].equals(other.lexerActions[idx])) {\n\t\t\t\t\treturn false;\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn true;\n\t\t}\n\t}\n\n\t/**\n\t * Creates a {@link LexerActionExecutor} which executes the actions for\n\t * the input {@code lexerActionExecutor} followed by a specified\n\t * {@code lexerAction}.\n\t *\n\t * @param lexerActionExecutor The executor for actions already traversed by\n\t * the lexer while matching a token within a particular\n\t * {@link LexerATNConfig}. If this is {@code null}, the method behaves as\n\t * though it were an empty executor.\n\t * @param lexerAction The lexer action to execute after the actions\n\t * specified in {@code lexerActionExecutor}.\n\t *\n\t * @return {LexerActionExecutor} A {@link LexerActionExecutor} for executing the combine actions\n\t * of {@code lexerActionExecutor} and {@code lexerAction}.\n\t */\n\tstatic append(lexerActionExecutor, lexerAction) {\n\t\tif (lexerActionExecutor === null) {\n\t\t\treturn new LexerActionExecutor([ lexerAction ]);\n\t\t}\n\t\tconst lexerActions = lexerActionExecutor.lexerActions.concat([ lexerAction ]);\n\t\treturn new LexerActionExecutor(lexerActions);\n\t}\n}\n\n\nmodule.exports = LexerActionExecutor;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Token} = require('./../Token');\nconst Lexer = require('./../Lexer');\nconst ATN = require('./ATN');\nconst ATNSimulator = require('./ATNSimulator');\nconst {DFAState} = require('./../dfa/DFAState');\nconst {OrderedATNConfigSet} = require('./ATNConfigSet');\nconst {PredictionContext} = require('./../PredictionContext');\nconst {SingletonPredictionContext} = require('./../PredictionContext');\nconst {RuleStopState} = require('./ATNState');\nconst {LexerATNConfig} = require('./ATNConfig');\nconst {Transition} = require('./Transition');\nconst LexerActionExecutor = require('./LexerActionExecutor');\nconst {LexerNoViableAltException} = require('./../error/Errors');\n\nfunction resetSimState(sim) {\n\tsim.index = -1;\n\tsim.line = 0;\n\tsim.column = -1;\n\tsim.dfaState = null;\n}\n\nclass SimState {\n\tconstructor() {\n\t\tresetSimState(this);\n\t}\n\n\treset() {\n\t\tresetSimState(this);\n\t}\n}\n\nclass LexerATNSimulator extends ATNSimulator {\n\t/**\n\t * When we hit an accept state in either the DFA or the ATN, we\n\t * have to notify the character stream to start buffering characters\n\t * via {@link IntStream//mark} and record the current state. The current sim state\n\t * includes the current index into the input, the current line,\n\t * and current character position in that line. Note that the Lexer is\n\t * tracking the starting line and characterization of the token. These\n\t * variables track the \"state\" of the simulator when it hits an accept state.\n\t *\n\t *

We track these variables separately for the DFA and ATN simulation\n\t * because the DFA simulation often has to fail over to the ATN\n\t * simulation. If the ATN simulation fails, we need the DFA to fall\n\t * back to its previously accepted state, if any. If the ATN succeeds,\n\t * then the ATN does the accept and the DFA simulator that invoked it\n\t * can simply return the predicted token type.

\n\t */\n\tconstructor(recog, atn, decisionToDFA, sharedContextCache) {\n\t\tsuper(atn, sharedContextCache);\n\t\tthis.decisionToDFA = decisionToDFA;\n\t\tthis.recog = recog;\n\t\t/**\n\t\t * The current token's starting index into the character stream.\n\t\t * Shared across DFA to ATN simulation in case the ATN fails and the\n\t\t * DFA did not have a previous accept state. In this case, we use the\n\t\t * ATN-generated exception object\n\t\t */\n\t\tthis.startIndex = -1;\n\t\t// line number 1..n within the input///\n\t\tthis.line = 1;\n\t\t/**\n\t\t * The index of the character relative to the beginning of the line\n\t\t * 0..n-1\n\t\t */\n\t\tthis.column = 0;\n\t\tthis.mode = Lexer.DEFAULT_MODE;\n\t\t/**\n\t\t * Used during DFA/ATN exec to record the most recent accept configuration\n\t\t * info\n\t\t */\n\t\tthis.prevAccept = new SimState();\n\t}\n\n\tcopyState(simulator) {\n\t\tthis.column = simulator.column;\n\t\tthis.line = simulator.line;\n\t\tthis.mode = simulator.mode;\n\t\tthis.startIndex = simulator.startIndex;\n\t}\n\n\tmatch(input, mode) {\n\t\tthis.match_calls += 1;\n\t\tthis.mode = mode;\n\t\tconst mark = input.mark();\n\t\ttry {\n\t\t\tthis.startIndex = input.index;\n\t\t\tthis.prevAccept.reset();\n\t\t\tconst dfa = this.decisionToDFA[mode];\n\t\t\tif (dfa.s0 === null) {\n\t\t\t\treturn this.matchATN(input);\n\t\t\t} else {\n\t\t\t\treturn this.execATN(input, dfa.s0);\n\t\t\t}\n\t\t} finally {\n\t\t\tinput.release(mark);\n\t\t}\n\t}\n\n\treset() {\n\t\tthis.prevAccept.reset();\n\t\tthis.startIndex = -1;\n\t\tthis.line = 1;\n\t\tthis.column = 0;\n\t\tthis.mode = Lexer.DEFAULT_MODE;\n\t}\n\n\tmatchATN(input) {\n\t\tconst startState = this.atn.modeToStartState[this.mode];\n\n\t\tif (LexerATNSimulator.debug) {\n\t\t\tconsole.log(\"matchATN mode \" + this.mode + \" start: \" + startState);\n\t\t}\n\t\tconst old_mode = this.mode;\n\t\tconst s0_closure = this.computeStartState(input, startState);\n\t\tconst suppressEdge = s0_closure.hasSemanticContext;\n\t\ts0_closure.hasSemanticContext = false;\n\n\t\tconst next = this.addDFAState(s0_closure);\n\t\tif (!suppressEdge) {\n\t\t\tthis.decisionToDFA[this.mode].s0 = next;\n\t\t}\n\n\t\tconst predict = this.execATN(input, next);\n\n\t\tif (LexerATNSimulator.debug) {\n\t\t\tconsole.log(\"DFA after matchATN: \" + this.decisionToDFA[old_mode].toLexerString());\n\t\t}\n\t\treturn predict;\n\t}\n\n\texecATN(input, ds0) {\n\t\tif (LexerATNSimulator.debug) {\n\t\t\tconsole.log(\"start state closure=\" + ds0.configs);\n\t\t}\n\t\tif (ds0.isAcceptState) {\n\t\t\t// allow zero-length tokens\n\t\t\tthis.captureSimState(this.prevAccept, input, ds0);\n\t\t}\n\t\tlet t = input.LA(1);\n\t\tlet s = ds0; // s is current/from DFA state\n\n\t\twhile (true) { // while more work\n\t\t\tif (LexerATNSimulator.debug) {\n\t\t\t\tconsole.log(\"execATN loop starting closure: \" + s.configs);\n\t\t\t}\n\n\t\t\t/**\n\t\t\t * As we move src->trg, src->trg, we keep track of the previous trg to\n\t\t\t * avoid looking up the DFA state again, which is expensive.\n\t\t\t * If the previous target was already part of the DFA, we might\n\t\t\t * be able to avoid doing a reach operation upon t. If s!=null,\n\t\t\t * it means that semantic predicates didn't prevent us from\n\t\t\t * creating a DFA state. Once we know s!=null, we check to see if\n\t\t\t * the DFA state has an edge already for t. If so, we can just reuse\n\t\t\t * it's configuration set; there's no point in re-computing it.\n\t\t\t * This is kind of like doing DFA simulation within the ATN\n\t\t\t * simulation because DFA simulation is really just a way to avoid\n\t\t\t * computing reach/closure sets. Technically, once we know that\n\t\t\t * we have a previously added DFA state, we could jump over to\n\t\t\t * the DFA simulator. But, that would mean popping back and forth\n\t\t\t * a lot and making things more complicated algorithmically.\n\t\t\t * This optimization makes a lot of sense for loops within DFA.\n\t\t\t * A character will take us back to an existing DFA state\n\t\t\t * that already has lots of edges out of it. e.g., .* in comments.\n\t\t\t * print(\"Target for:\" + str(s) + \" and:\" + str(t))\n\t\t\t */\n\t\t\tlet target = this.getExistingTargetState(s, t);\n\t\t\t// print(\"Existing:\" + str(target))\n\t\t\tif (target === null) {\n\t\t\t\ttarget = this.computeTargetState(input, s, t);\n\t\t\t\t// print(\"Computed:\" + str(target))\n\t\t\t}\n\t\t\tif (target === ATNSimulator.ERROR) {\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\t// If this is a consumable input element, make sure to consume before\n\t\t\t// capturing the accept state so the input index, line, and char\n\t\t\t// position accurately reflect the state of the interpreter at the\n\t\t\t// end of the token.\n\t\t\tif (t !== Token.EOF) {\n\t\t\t\tthis.consume(input);\n\t\t\t}\n\t\t\tif (target.isAcceptState) {\n\t\t\t\tthis.captureSimState(this.prevAccept, input, target);\n\t\t\t\tif (t === Token.EOF) {\n\t\t\t\t\tbreak;\n\t\t\t\t}\n\t\t\t}\n\t\t\tt = input.LA(1);\n\t\t\ts = target; // flip; current DFA target becomes new src/from state\n\t\t}\n\t\treturn this.failOrAccept(this.prevAccept, input, s.configs, t);\n\t}\n\n\t/**\n\t * Get an existing target state for an edge in the DFA. If the target state\n\t * for the edge has not yet been computed or is otherwise not available,\n\t * this method returns {@code null}.\n\t *\n\t * @param s The current DFA state\n\t * @param t The next input symbol\n\t * @return The existing target DFA state for the given input symbol\n\t * {@code t}, or {@code null} if the target state for this edge is not\n\t * already cached\n\t */\n\tgetExistingTargetState(s, t) {\n\t\tif (s.edges === null || t < LexerATNSimulator.MIN_DFA_EDGE || t > LexerATNSimulator.MAX_DFA_EDGE) {\n\t\t\treturn null;\n\t\t}\n\n\t\tlet target = s.edges[t - LexerATNSimulator.MIN_DFA_EDGE];\n\t\tif(target===undefined) {\n\t\t\ttarget = null;\n\t\t}\n\t\tif (LexerATNSimulator.debug && target !== null) {\n\t\t\tconsole.log(\"reuse state \" + s.stateNumber + \" edge to \" + target.stateNumber);\n\t\t}\n\t\treturn target;\n\t}\n\n\t/**\n\t * Compute a target state for an edge in the DFA, and attempt to add the\n\t * computed state and corresponding edge to the DFA.\n\t *\n\t * @param input The input stream\n\t * @param s The current DFA state\n\t * @param t The next input symbol\n\t *\n\t * @return The computed target DFA state for the given input symbol\n\t * {@code t}. If {@code t} does not lead to a valid DFA state, this method\n\t * returns {@link //ERROR}.\n\t */\n\tcomputeTargetState(input, s, t) {\n\t\tconst reach = new OrderedATNConfigSet();\n\t\t// if we don't find an existing DFA state\n\t\t// Fill reach starting from closure, following t transitions\n\t\tthis.getReachableConfigSet(input, s.configs, reach, t);\n\n\t\tif (reach.items.length === 0) { // we got nowhere on t from s\n\t\t\tif (!reach.hasSemanticContext) {\n\t\t\t\t// we got nowhere on t, don't throw out this knowledge; it'd\n\t\t\t\t// cause a failover from DFA later.\n\t\t\t\tthis.addDFAEdge(s, t, ATNSimulator.ERROR);\n\t\t\t}\n\t\t\t// stop when we can't match any more char\n\t\t\treturn ATNSimulator.ERROR;\n\t\t}\n\t\t// Add an edge from s to target DFA found/created for reach\n\t\treturn this.addDFAEdge(s, t, null, reach);\n\t}\n\n\tfailOrAccept(prevAccept, input, reach, t) {\n\t\tif (this.prevAccept.dfaState !== null) {\n\t\t\tconst lexerActionExecutor = prevAccept.dfaState.lexerActionExecutor;\n\t\t\tthis.accept(input, lexerActionExecutor, this.startIndex,\n\t\t\t\t\tprevAccept.index, prevAccept.line, prevAccept.column);\n\t\t\treturn prevAccept.dfaState.prediction;\n\t\t} else {\n\t\t\t// if no accept and EOF is first char, return EOF\n\t\t\tif (t === Token.EOF && input.index === this.startIndex) {\n\t\t\t\treturn Token.EOF;\n\t\t\t}\n\t\t\tthrow new LexerNoViableAltException(this.recog, input, this.startIndex, reach);\n\t\t}\n\t}\n\n\t/**\n\t * Given a starting configuration set, figure out all ATN configurations\n\t * we can reach upon input {@code t}. Parameter {@code reach} is a return\n\t * parameter.\n\t */\n\tgetReachableConfigSet(input, closure,\n\t\t\treach, t) {\n\t\t// this is used to skip processing for configs which have a lower priority\n\t\t// than a config that already reached an accept state for the same rule\n\t\tlet skipAlt = ATN.INVALID_ALT_NUMBER;\n\t\tfor (let i = 0; i < closure.items.length; i++) {\n\t\t\tconst cfg = closure.items[i];\n\t\t\tconst currentAltReachedAcceptState = (cfg.alt === skipAlt);\n\t\t\tif (currentAltReachedAcceptState && cfg.passedThroughNonGreedyDecision) {\n\t\t\t\tcontinue;\n\t\t\t}\n\t\t\tif (LexerATNSimulator.debug) {\n\t\t\t\tconsole.log(\"testing %s at %s\\n\", this.getTokenName(t), cfg\n\t\t\t\t\t\t.toString(this.recog, true));\n\t\t\t}\n\t\t\tfor (let j = 0; j < cfg.state.transitions.length; j++) {\n\t\t\t\tconst trans = cfg.state.transitions[j]; // for each transition\n\t\t\t\tconst target = this.getReachableTarget(trans, t);\n\t\t\t\tif (target !== null) {\n\t\t\t\t\tlet lexerActionExecutor = cfg.lexerActionExecutor;\n\t\t\t\t\tif (lexerActionExecutor !== null) {\n\t\t\t\t\t\tlexerActionExecutor = lexerActionExecutor.fixOffsetBeforeMatch(input.index - this.startIndex);\n\t\t\t\t\t}\n\t\t\t\t\tconst treatEofAsEpsilon = (t === Token.EOF);\n\t\t\t\t\tconst config = new LexerATNConfig({state:target, lexerActionExecutor:lexerActionExecutor}, cfg);\n\t\t\t\t\tif (this.closure(input, config, reach,\n\t\t\t\t\t\t\tcurrentAltReachedAcceptState, true, treatEofAsEpsilon)) {\n\t\t\t\t\t\t// any remaining configs for this alt have a lower priority\n\t\t\t\t\t\t// than the one that just reached an accept state.\n\t\t\t\t\t\tskipAlt = cfg.alt;\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t}\n\n\taccept(input, lexerActionExecutor,\n\t\t\t startIndex, index, line, charPos) {\n\t\t if (LexerATNSimulator.debug) {\n\t\t\t console.log(\"ACTION %s\\n\", lexerActionExecutor);\n\t\t }\n\t\t // seek to after last char in token\n\t\t input.seek(index);\n\t\t this.line = line;\n\t\t this.column = charPos;\n\t\t if (lexerActionExecutor !== null && this.recog !== null) {\n\t\t\t lexerActionExecutor.execute(this.recog, input, startIndex);\n\t\t }\n\t }\n\n\tgetReachableTarget(trans, t) {\n\t\tif (trans.matches(t, 0, Lexer.MAX_CHAR_VALUE)) {\n\t\t\treturn trans.target;\n\t\t} else {\n\t\t\treturn null;\n\t\t}\n\t}\n\n\tcomputeStartState(input, p) {\n\t\tconst initialContext = PredictionContext.EMPTY;\n\t\tconst configs = new OrderedATNConfigSet();\n\t\tfor (let i = 0; i < p.transitions.length; i++) {\n\t\t\tconst target = p.transitions[i].target;\n\t\t\tconst cfg = new LexerATNConfig({state:target, alt:i+1, context:initialContext}, null);\n\t\t\tthis.closure(input, cfg, configs, false, false, false);\n\t\t}\n\t\treturn configs;\n\t}\n\n\t/**\n\t * Since the alternatives within any lexer decision are ordered by\n\t * preference, this method stops pursuing the closure as soon as an accept\n\t * state is reached. After the first accept state is reached by depth-first\n\t * search from {@code config}, all other (potentially reachable) states for\n\t * this rule would have a lower priority.\n\t *\n\t * @return {Boolean} {@code true} if an accept state is reached, otherwise\n\t * {@code false}.\n\t */\n\tclosure(input, config, configs,\n\t\t\tcurrentAltReachedAcceptState, speculative, treatEofAsEpsilon) {\n\t\tlet cfg = null;\n\t\tif (LexerATNSimulator.debug) {\n\t\t\tconsole.log(\"closure(\" + config.toString(this.recog, true) + \")\");\n\t\t}\n\t\tif (config.state instanceof RuleStopState) {\n\t\t\tif (LexerATNSimulator.debug) {\n\t\t\t\tif (this.recog !== null) {\n\t\t\t\t\tconsole.log(\"closure at %s rule stop %s\\n\", this.recog.ruleNames[config.state.ruleIndex], config);\n\t\t\t\t} else {\n\t\t\t\t\tconsole.log(\"closure at rule stop %s\\n\", config);\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (config.context === null || config.context.hasEmptyPath()) {\n\t\t\t\tif (config.context === null || config.context.isEmpty()) {\n\t\t\t\t\tconfigs.add(config);\n\t\t\t\t\treturn true;\n\t\t\t\t} else {\n\t\t\t\t\tconfigs.add(new LexerATNConfig({ state:config.state, context:PredictionContext.EMPTY}, config));\n\t\t\t\t\tcurrentAltReachedAcceptState = true;\n\t\t\t\t}\n\t\t\t}\n\t\t\tif (config.context !== null && !config.context.isEmpty()) {\n\t\t\t\tfor (let i = 0; i < config.context.length; i++) {\n\t\t\t\t\tif (config.context.getReturnState(i) !== PredictionContext.EMPTY_RETURN_STATE) {\n\t\t\t\t\t\tconst newContext = config.context.getParent(i); // \"pop\" return state\n\t\t\t\t\t\tconst returnState = this.atn.states[config.context.getReturnState(i)];\n\t\t\t\t\t\tcfg = new LexerATNConfig({ state:returnState, context:newContext }, config);\n\t\t\t\t\t\tcurrentAltReachedAcceptState = this.closure(input, cfg,\n\t\t\t\t\t\t\t\tconfigs, currentAltReachedAcceptState, speculative,\n\t\t\t\t\t\t\t\ttreatEofAsEpsilon);\n\t\t\t\t\t}\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn currentAltReachedAcceptState;\n\t\t}\n\t\t// optimization\n\t\tif (!config.state.epsilonOnlyTransitions) {\n\t\t\tif (!currentAltReachedAcceptState || !config.passedThroughNonGreedyDecision) {\n\t\t\t\tconfigs.add(config);\n\t\t\t}\n\t\t}\n\t\tfor (let j = 0; j < config.state.transitions.length; j++) {\n\t\t\tconst trans = config.state.transitions[j];\n\t\t\tcfg = this.getEpsilonTarget(input, config, trans, configs, speculative, treatEofAsEpsilon);\n\t\t\tif (cfg !== null) {\n\t\t\t\tcurrentAltReachedAcceptState = this.closure(input, cfg, configs,\n\t\t\t\t\t\tcurrentAltReachedAcceptState, speculative, treatEofAsEpsilon);\n\t\t\t}\n\t\t}\n\t\treturn currentAltReachedAcceptState;\n\t}\n\n\t// side-effect: can alter configs.hasSemanticContext\n\tgetEpsilonTarget(input, config, trans,\n\t\t\tconfigs, speculative, treatEofAsEpsilon) {\n\t\tlet cfg = null;\n\t\tif (trans.serializationType === Transition.RULE) {\n\t\t\tconst newContext = SingletonPredictionContext.create(config.context, trans.followState.stateNumber);\n\t\t\tcfg = new LexerATNConfig( { state:trans.target, context:newContext}, config);\n\t\t} else if (trans.serializationType === Transition.PRECEDENCE) {\n\t\t\tthrow \"Precedence predicates are not supported in lexers.\";\n\t\t} else if (trans.serializationType === Transition.PREDICATE) {\n\t\t\t// Track traversing semantic predicates. If we traverse,\n\t\t\t// we cannot add a DFA state for this \"reach\" computation\n\t\t\t// because the DFA would not test the predicate again in the\n\t\t\t// future. Rather than creating collections of semantic predicates\n\t\t\t// like v3 and testing them on prediction, v4 will test them on the\n\t\t\t// fly all the time using the ATN not the DFA. This is slower but\n\t\t\t// semantically it's not used that often. One of the key elements to\n\t\t\t// this predicate mechanism is not adding DFA states that see\n\t\t\t// predicates immediately afterwards in the ATN. For example,\n\n\t\t\t// a : ID {p1}? | ID {p2}? ;\n\n\t\t\t// should create the start state for rule 'a' (to save start state\n\t\t\t// competition), but should not create target of ID state. The\n\t\t\t// collection of ATN states the following ID references includes\n\t\t\t// states reached by traversing predicates. Since this is when we\n\t\t\t// test them, we cannot cash the DFA state target of ID.\n\n\t\t\tif (LexerATNSimulator.debug) {\n\t\t\t\tconsole.log(\"EVAL rule \" + trans.ruleIndex + \":\" + trans.predIndex);\n\t\t\t}\n\t\t\tconfigs.hasSemanticContext = true;\n\t\t\tif (this.evaluatePredicate(input, trans.ruleIndex, trans.predIndex, speculative)) {\n\t\t\t\tcfg = new LexerATNConfig({ state:trans.target}, config);\n\t\t\t}\n\t\t} else if (trans.serializationType === Transition.ACTION) {\n\t\t\tif (config.context === null || config.context.hasEmptyPath()) {\n\t\t\t\t// execute actions anywhere in the start rule for a token.\n\t\t\t\t//\n\t\t\t\t// TODO: if the entry rule is invoked recursively, some\n\t\t\t\t// actions may be executed during the recursive call. The\n\t\t\t\t// problem can appear when hasEmptyPath() is true but\n\t\t\t\t// isEmpty() is false. In this case, the config needs to be\n\t\t\t\t// split into two contexts - one with just the empty path\n\t\t\t\t// and another with everything but the empty path.\n\t\t\t\t// Unfortunately, the current algorithm does not allow\n\t\t\t\t// getEpsilonTarget to return two configurations, so\n\t\t\t\t// additional modifications are needed before we can support\n\t\t\t\t// the split operation.\n\t\t\t\tconst lexerActionExecutor = LexerActionExecutor.append(config.lexerActionExecutor,\n\t\t\t\t\t\tthis.atn.lexerActions[trans.actionIndex]);\n\t\t\t\tcfg = new LexerATNConfig({ state:trans.target, lexerActionExecutor:lexerActionExecutor }, config);\n\t\t\t} else {\n\t\t\t\t// ignore actions in referenced rules\n\t\t\t\tcfg = new LexerATNConfig( { state:trans.target}, config);\n\t\t\t}\n\t\t} else if (trans.serializationType === Transition.EPSILON) {\n\t\t\tcfg = new LexerATNConfig({ state:trans.target}, config);\n\t\t} else if (trans.serializationType === Transition.ATOM ||\n\t\t\t\t\ttrans.serializationType === Transition.RANGE ||\n\t\t\t\t\ttrans.serializationType === Transition.SET) {\n\t\t\tif (treatEofAsEpsilon) {\n\t\t\t\tif (trans.matches(Token.EOF, 0, Lexer.MAX_CHAR_VALUE)) {\n\t\t\t\t\tcfg = new LexerATNConfig( { state:trans.target }, config);\n\t\t\t\t}\n\t\t\t}\n\t\t}\n\t\treturn cfg;\n\t}\n\n\t/**\n\t * Evaluate a predicate specified in the lexer.\n\t *\n\t *

If {@code speculative} is {@code true}, this method was called before\n\t * {@link //consume} for the matched character. This method should call\n\t * {@link //consume} before evaluating the predicate to ensure position\n\t * sensitive values, including {@link Lexer//getText}, {@link Lexer//getLine},\n\t * and {@link Lexer//getcolumn}, properly reflect the current\n\t * lexer state. This method should restore {@code input} and the simulator\n\t * to the original state before returning (i.e. undo the actions made by the\n\t * call to {@link //consume}.

\n\t *\n\t * @param input The input stream.\n\t * @param ruleIndex The rule containing the predicate.\n\t * @param predIndex The index of the predicate within the rule.\n\t * @param speculative {@code true} if the current index in {@code input} is\n\t * one character before the predicate's location.\n\t *\n\t * @return {@code true} if the specified predicate evaluates to\n\t * {@code true}.\n\t */\n\tevaluatePredicate(input, ruleIndex,\n\t\t\tpredIndex, speculative) {\n\t\t// assume true if no recognizer was provided\n\t\tif (this.recog === null) {\n\t\t\treturn true;\n\t\t}\n\t\tif (!speculative) {\n\t\t\treturn this.recog.sempred(null, ruleIndex, predIndex);\n\t\t}\n\t\tconst savedcolumn = this.column;\n\t\tconst savedLine = this.line;\n\t\tconst index = input.index;\n\t\tconst marker = input.mark();\n\t\ttry {\n\t\t\tthis.consume(input);\n\t\t\treturn this.recog.sempred(null, ruleIndex, predIndex);\n\t\t} finally {\n\t\t\tthis.column = savedcolumn;\n\t\t\tthis.line = savedLine;\n\t\t\tinput.seek(index);\n\t\t\tinput.release(marker);\n\t\t}\n\t}\n\n\tcaptureSimState(settings, input, dfaState) {\n\t\tsettings.index = input.index;\n\t\tsettings.line = this.line;\n\t\tsettings.column = this.column;\n\t\tsettings.dfaState = dfaState;\n\t}\n\n\taddDFAEdge(from_, tk, to, cfgs) {\n\t\tif (to === undefined) {\n\t\t\tto = null;\n\t\t}\n\t\tif (cfgs === undefined) {\n\t\t\tcfgs = null;\n\t\t}\n\t\tif (to === null && cfgs !== null) {\n\t\t\t// leading to this call, ATNConfigSet.hasSemanticContext is used as a\n\t\t\t// marker indicating dynamic predicate evaluation makes this edge\n\t\t\t// dependent on the specific input sequence, so the static edge in the\n\t\t\t// DFA should be omitted. The target DFAState is still created since\n\t\t\t// execATN has the ability to resynchronize with the DFA state cache\n\t\t\t// following the predicate evaluation step.\n\t\t\t//\n\t\t\t// TJP notes: next time through the DFA, we see a pred again and eval.\n\t\t\t// If that gets us to a previously created (but dangling) DFA\n\t\t\t// state, we can continue in pure DFA mode from there.\n\t\t\t// /\n\t\t\tconst suppressEdge = cfgs.hasSemanticContext;\n\t\t\tcfgs.hasSemanticContext = false;\n\n\t\t\tto = this.addDFAState(cfgs);\n\n\t\t\tif (suppressEdge) {\n\t\t\t\treturn to;\n\t\t\t}\n\t\t}\n\t\t// add the edge\n\t\tif (tk < LexerATNSimulator.MIN_DFA_EDGE || tk > LexerATNSimulator.MAX_DFA_EDGE) {\n\t\t\t// Only track edges within the DFA bounds\n\t\t\treturn to;\n\t\t}\n\t\tif (LexerATNSimulator.debug) {\n\t\t\tconsole.log(\"EDGE \" + from_ + \" -> \" + to + \" upon \" + tk);\n\t\t}\n\t\tif (from_.edges === null) {\n\t\t\t// make room for tokens 1..n and -1 masquerading as index 0\n\t\t\tfrom_.edges = [];\n\t\t}\n\t\tfrom_.edges[tk - LexerATNSimulator.MIN_DFA_EDGE] = to; // connect\n\n\t\treturn to;\n\t}\n\n\t/**\n\t * Add a new DFA state if there isn't one with this set of\n\t * configurations already. This method also detects the first\n\t * configuration containing an ATN rule stop state. Later, when\n\t * traversing the DFA, we will know which rule to accept.\n\t */\n\taddDFAState(configs) {\n\t\tconst proposed = new DFAState(null, configs);\n\t\tlet firstConfigWithRuleStopState = null;\n\t\tfor (let i = 0; i < configs.items.length; i++) {\n\t\t\tconst cfg = configs.items[i];\n\t\t\tif (cfg.state instanceof RuleStopState) {\n\t\t\t\tfirstConfigWithRuleStopState = cfg;\n\t\t\t\tbreak;\n\t\t\t}\n\t\t}\n\t\tif (firstConfigWithRuleStopState !== null) {\n\t\t\tproposed.isAcceptState = true;\n\t\t\tproposed.lexerActionExecutor = firstConfigWithRuleStopState.lexerActionExecutor;\n\t\t\tproposed.prediction = this.atn.ruleToTokenType[firstConfigWithRuleStopState.state.ruleIndex];\n\t\t}\n\t\tconst dfa = this.decisionToDFA[this.mode];\n\t\tconst existing = dfa.states.get(proposed);\n\t\tif (existing!==null) {\n\t\t\treturn existing;\n\t\t}\n\t\tconst newState = proposed;\n\t\tnewState.stateNumber = dfa.states.length;\n\t\tconfigs.setReadonly(true);\n\t\tnewState.configs = configs;\n\t\tdfa.states.add(newState);\n\t\treturn newState;\n\t}\n\n\tgetDFA(mode) {\n\t\treturn this.decisionToDFA[mode];\n\t}\n\n// Get the text matched so far for the current token.\n\tgetText(input) {\n\t\t// index is first lookahead char, don't include.\n\t\treturn input.getText(this.startIndex, input.index - 1);\n\t}\n\n\tconsume(input) {\n\t\tconst curChar = input.LA(1);\n\t\tif (curChar === \"\\n\".charCodeAt(0)) {\n\t\t\tthis.line += 1;\n\t\t\tthis.column = 0;\n\t\t} else {\n\t\t\tthis.column += 1;\n\t\t}\n\t\tinput.consume();\n\t}\n\n\tgetTokenName(tt) {\n\t\tif (tt === -1) {\n\t\t\treturn \"EOF\";\n\t\t} else {\n\t\t\treturn \"'\" + String.fromCharCode(tt) + \"'\";\n\t\t}\n\t}\n}\n\nLexerATNSimulator.debug = false;\nLexerATNSimulator.dfa_debug = false;\n\nLexerATNSimulator.MIN_DFA_EDGE = 0;\nLexerATNSimulator.MAX_DFA_EDGE = 127; // forces unicode to stay in ATN\n\nLexerATNSimulator.match_calls = 0;\n\nmodule.exports = LexerATNSimulator;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Map, BitSet, AltDict, hashStuff} = require('./../Utils');\nconst ATN = require('./ATN');\nconst {RuleStopState} = require('./ATNState');\nconst {ATNConfigSet} = require('./ATNConfigSet');\nconst {ATNConfig} = require('./ATNConfig');\nconst {SemanticContext} = require('./SemanticContext');\n\n/**\n * This enumeration defines the prediction modes available in ANTLR 4 along with\n * utility methods for analyzing configuration sets for conflicts and/or\n * ambiguities.\n */\nconst PredictionMode = {\n /**\n * The SLL(*) prediction mode. This prediction mode ignores the current\n * parser context when making predictions. This is the fastest prediction\n * mode, and provides correct results for many grammars. This prediction\n * mode is more powerful than the prediction mode provided by ANTLR 3, but\n * may result in syntax errors for grammar and input combinations which are\n * not SLL.\n *\n *

\n * When using this prediction mode, the parser will either return a correct\n * parse tree (i.e. the same parse tree that would be returned with the\n * {@link //LL} prediction mode), or it will report a syntax error. If a\n * syntax error is encountered when using the {@link //SLL} prediction mode,\n * it may be due to either an actual syntax error in the input or indicate\n * that the particular combination of grammar and input requires the more\n * powerful {@link //LL} prediction abilities to complete successfully.

\n *\n *

\n * This prediction mode does not provide any guarantees for prediction\n * behavior for syntactically-incorrect inputs.

\n */\n SLL: 0,\n\n /**\n * The LL(*) prediction mode. This prediction mode allows the current parser\n * context to be used for resolving SLL conflicts that occur during\n * prediction. This is the fastest prediction mode that guarantees correct\n * parse results for all combinations of grammars with syntactically correct\n * inputs.\n *\n *

\n * When using this prediction mode, the parser will make correct decisions\n * for all syntactically-correct grammar and input combinations. However, in\n * cases where the grammar is truly ambiguous this prediction mode might not\n * report a precise answer for exactly which alternatives are\n * ambiguous.

\n *\n *

\n * This prediction mode does not provide any guarantees for prediction\n * behavior for syntactically-incorrect inputs.

\n */\n LL: 1,\n\n /**\n *\n * The LL(*) prediction mode with exact ambiguity detection. In addition to\n * the correctness guarantees provided by the {@link //LL} prediction mode,\n * this prediction mode instructs the prediction algorithm to determine the\n * complete and exact set of ambiguous alternatives for every ambiguous\n * decision encountered while parsing.\n *\n *

\n * This prediction mode may be used for diagnosing ambiguities during\n * grammar development. Due to the performance overhead of calculating sets\n * of ambiguous alternatives, this prediction mode should be avoided when\n * the exact results are not necessary.

\n *\n *

\n * This prediction mode does not provide any guarantees for prediction\n * behavior for syntactically-incorrect inputs.

\n */\n LL_EXACT_AMBIG_DETECTION: 2,\n\n /**\n *\n * Computes the SLL prediction termination condition.\n *\n *

\n * This method computes the SLL prediction termination condition for both of\n * the following cases.

\n *\n *
    \n *
  • The usual SLL+LL fallback upon SLL conflict
  • \n *
  • Pure SLL without LL fallback
  • \n *
\n *\n *

COMBINED SLL+LL PARSING

\n *\n *

When LL-fallback is enabled upon SLL conflict, correct predictions are\n * ensured regardless of how the termination condition is computed by this\n * method. Due to the substantially higher cost of LL prediction, the\n * prediction should only fall back to LL when the additional lookahead\n * cannot lead to a unique SLL prediction.

\n *\n *

Assuming combined SLL+LL parsing, an SLL configuration set with only\n * conflicting subsets should fall back to full LL, even if the\n * configuration sets don't resolve to the same alternative (e.g.\n * {@code {1,2}} and {@code {3,4}}. If there is at least one non-conflicting\n * configuration, SLL could continue with the hopes that more lookahead will\n * resolve via one of those non-conflicting configurations.

\n *\n *

Here's the prediction termination rule them: SLL (for SLL+LL parsing)\n * stops when it sees only conflicting configuration subsets. In contrast,\n * full LL keeps going when there is uncertainty.

\n *\n *

HEURISTIC

\n *\n *

As a heuristic, we stop prediction when we see any conflicting subset\n * unless we see a state that only has one alternative associated with it.\n * The single-alt-state thing lets prediction continue upon rules like\n * (otherwise, it would admit defeat too soon):

\n *\n *

{@code [12|1|[], 6|2|[], 12|2|[]]. s : (ID | ID ID?) ';' ;}

\n *\n *

When the ATN simulation reaches the state before {@code ';'}, it has a\n * DFA state that looks like: {@code [12|1|[], 6|2|[], 12|2|[]]}. Naturally\n * {@code 12|1|[]} and {@code 12|2|[]} conflict, but we cannot stop\n * processing this node because alternative to has another way to continue,\n * via {@code [6|2|[]]}.

\n *\n *

It also let's us continue for this rule:

\n *\n *

{@code [1|1|[], 1|2|[], 8|3|[]] a : A | A | A B ;}

\n *\n *

After matching input A, we reach the stop state for rule A, state 1.\n * State 8 is the state right before B. Clearly alternatives 1 and 2\n * conflict and no amount of further lookahead will separate the two.\n * However, alternative 3 will be able to continue and so we do not stop\n * working on this state. In the previous example, we're concerned with\n * states associated with the conflicting alternatives. Here alt 3 is not\n * associated with the conflicting configs, but since we can continue\n * looking for input reasonably, don't declare the state done.

\n *\n *

PURE SLL PARSING

\n *\n *

To handle pure SLL parsing, all we have to do is make sure that we\n * combine stack contexts for configurations that differ only by semantic\n * predicate. From there, we can do the usual SLL termination heuristic.

\n *\n *

PREDICATES IN SLL+LL PARSING

\n *\n *

SLL decisions don't evaluate predicates until after they reach DFA stop\n * states because they need to create the DFA cache that works in all\n * semantic situations. In contrast, full LL evaluates predicates collected\n * during start state computation so it can ignore predicates thereafter.\n * This means that SLL termination detection can totally ignore semantic\n * predicates.

\n *\n *

Implementation-wise, {@link ATNConfigSet} combines stack contexts but not\n * semantic predicate contexts so we might see two configurations like the\n * following.

\n *\n *

{@code (s, 1, x, {}), (s, 1, x', {p})}

\n *\n *

Before testing these configurations against others, we have to merge\n * {@code x} and {@code x'} (without modifying the existing configurations).\n * For example, we test {@code (x+x')==x''} when looking for conflicts in\n * the following configurations.

\n *\n *

{@code (s, 1, x, {}), (s, 1, x', {p}), (s, 2, x'', {})}

\n *\n *

If the configuration set has predicates (as indicated by\n * {@link ATNConfigSet//hasSemanticContext}), this algorithm makes a copy of\n * the configurations to strip out all of the predicates so that a standard\n * {@link ATNConfigSet} will merge everything ignoring predicates.

\n */\n hasSLLConflictTerminatingPrediction: function( mode, configs) {\n // Configs in rule stop states indicate reaching the end of the decision\n // rule (local context) or end of start rule (full context). If all\n // configs meet this condition, then none of the configurations is able\n // to match additional input so we terminate prediction.\n //\n if (PredictionMode.allConfigsInRuleStopStates(configs)) {\n return true;\n }\n // pure SLL mode parsing\n if (mode === PredictionMode.SLL) {\n // Don't bother with combining configs from different semantic\n // contexts if we can fail over to full LL; costs more time\n // since we'll often fail over anyway.\n if (configs.hasSemanticContext) {\n // dup configs, tossing out semantic predicates\n const dup = new ATNConfigSet();\n for(let i=0;iCan we stop looking ahead during ATN simulation or is there some\n * uncertainty as to which alternative we will ultimately pick, after\n * consuming more input? Even if there are partial conflicts, we might know\n * that everything is going to resolve to the same minimum alternative. That\n * means we can stop since no more lookahead will change that fact. On the\n * other hand, there might be multiple conflicts that resolve to different\n * minimums. That means we need more look ahead to decide which of those\n * alternatives we should predict.

\n *\n *

The basic idea is to split the set of configurations {@code C}, into\n * conflicting subsets {@code (s, _, ctx, _)} and singleton subsets with\n * non-conflicting configurations. Two configurations conflict if they have\n * identical {@link ATNConfig//state} and {@link ATNConfig//context} values\n * but different {@link ATNConfig//alt} value, e.g. {@code (s, i, ctx, _)}\n * and {@code (s, j, ctx, _)} for {@code i!=j}.

\n *\n *

Reduce these configuration subsets to the set of possible alternatives.\n * You can compute the alternative subsets in one pass as follows:

\n *\n *

{@code A_s,ctx = {i | (s, i, ctx, _)}} for each configuration in\n * {@code C} holding {@code s} and {@code ctx} fixed.

\n *\n *

Or in pseudo-code, for each configuration {@code c} in {@code C}:

\n *\n *
\n     * map[c] U= c.{@link ATNConfig//alt alt} // map hash/equals uses s and x, not\n     * alt and not pred\n     * 
\n *\n *

The values in {@code map} are the set of {@code A_s,ctx} sets.

\n *\n *

If {@code |A_s,ctx|=1} then there is no conflict associated with\n * {@code s} and {@code ctx}.

\n *\n *

Reduce the subsets to singletons by choosing a minimum of each subset. If\n * the union of these alternative subsets is a singleton, then no amount of\n * more lookahead will help us. We will always pick that alternative. If,\n * however, there is more than one alternative, then we are uncertain which\n * alternative to predict and must continue looking for resolution. We may\n * or may not discover an ambiguity in the future, even if there are no\n * conflicting subsets this round.

\n *\n *

The biggest sin is to terminate early because it means we've made a\n * decision but were uncertain as to the eventual outcome. We haven't used\n * enough lookahead. On the other hand, announcing a conflict too late is no\n * big deal; you will still have the conflict. It's just inefficient. It\n * might even look until the end of file.

\n *\n *

No special consideration for semantic predicates is required because\n * predicates are evaluated on-the-fly for full LL prediction, ensuring that\n * no configuration contains a semantic context during the termination\n * check.

\n *\n *

CONFLICTING CONFIGS

\n *\n *

Two configurations {@code (s, i, x)} and {@code (s, j, x')}, conflict\n * when {@code i!=j} but {@code x=x'}. Because we merge all\n * {@code (s, i, _)} configurations together, that means that there are at\n * most {@code n} configurations associated with state {@code s} for\n * {@code n} possible alternatives in the decision. The merged stacks\n * complicate the comparison of configuration contexts {@code x} and\n * {@code x'}. Sam checks to see if one is a subset of the other by calling\n * merge and checking to see if the merged result is either {@code x} or\n * {@code x'}. If the {@code x} associated with lowest alternative {@code i}\n * is the superset, then {@code i} is the only possible prediction since the\n * others resolve to {@code min(i)} as well. However, if {@code x} is\n * associated with {@code j>i} then at least one stack configuration for\n * {@code j} is not in conflict with alternative {@code i}. The algorithm\n * should keep going, looking for more lookahead due to the uncertainty.

\n *\n *

For simplicity, I'm doing a equality check between {@code x} and\n * {@code x'} that lets the algorithm continue to consume lookahead longer\n * than necessary. The reason I like the equality is of course the\n * simplicity but also because that is the test you need to detect the\n * alternatives that are actually in conflict.

\n *\n *

CONTINUE/STOP RULE

\n *\n *

Continue if union of resolved alternative sets from non-conflicting and\n * conflicting alternative subsets has more than one alternative. We are\n * uncertain about which alternative to predict.

\n *\n *

The complete set of alternatives, {@code [i for (_,i,_)]}, tells us which\n * alternatives are still in the running for the amount of input we've\n * consumed at this point. The conflicting sets let us to strip away\n * configurations that won't lead to more states because we resolve\n * conflicts to the configuration with a minimum alternate for the\n * conflicting set.

\n *\n *

CASES

\n *\n *
    \n *\n *
  • no conflicts and more than 1 alternative in set => continue
  • \n *\n *
  • {@code (s, 1, x)}, {@code (s, 2, x)}, {@code (s, 3, z)},\n * {@code (s', 1, y)}, {@code (s', 2, y)} yields non-conflicting set\n * {@code {3}} U conflicting sets {@code min({1,2})} U {@code min({1,2})} =\n * {@code {1,3}} => continue\n *
  • \n *\n *
  • {@code (s, 1, x)}, {@code (s, 2, x)}, {@code (s', 1, y)},\n * {@code (s', 2, y)}, {@code (s'', 1, z)} yields non-conflicting set\n * {@code {1}} U conflicting sets {@code min({1,2})} U {@code min({1,2})} =\n * {@code {1}} => stop and predict 1
  • \n *\n *
  • {@code (s, 1, x)}, {@code (s, 2, x)}, {@code (s', 1, y)},\n * {@code (s', 2, y)} yields conflicting, reduced sets {@code {1}} U\n * {@code {1}} = {@code {1}} => stop and predict 1, can announce\n * ambiguity {@code {1,2}}
  • \n *\n *
  • {@code (s, 1, x)}, {@code (s, 2, x)}, {@code (s', 2, y)},\n * {@code (s', 3, y)} yields conflicting, reduced sets {@code {1}} U\n * {@code {2}} = {@code {1,2}} => continue
  • \n *\n *
  • {@code (s, 1, x)}, {@code (s, 2, x)}, {@code (s', 3, y)},\n * {@code (s', 4, y)} yields conflicting, reduced sets {@code {1}} U\n * {@code {3}} = {@code {1,3}} => continue
  • \n *\n *
\n *\n *

EXACT AMBIGUITY DETECTION

\n *\n *

If all states report the same conflicting set of alternatives, then we\n * know we have the exact ambiguity set.

\n *\n *

|A_i|>1 and\n * A_i = A_j for all i, j.

\n *\n *

In other words, we continue examining lookahead until all {@code A_i}\n * have more than one alternative and all {@code A_i} are the same. If\n * {@code A={{1,2}, {1,3}}}, then regular LL prediction would terminate\n * because the resolved set is {@code {1}}. To determine what the real\n * ambiguity is, we have to know whether the ambiguity is between one and\n * two or one and three so we keep going. We can only stop prediction when\n * we need exact ambiguity detection when the sets look like\n * {@code A={{1,2}}} or {@code {{1,2},{1,2}}}, etc...

\n */\n resolvesToJustOneViableAlt: function(altsets) {\n return PredictionMode.getSingleViableAlt(altsets);\n },\n\n /**\n * Determines if every alternative subset in {@code altsets} contains more\n * than one alternative.\n *\n * @param altsets a collection of alternative subsets\n * @return {@code true} if every {@link BitSet} in {@code altsets} has\n * {@link BitSet//cardinality cardinality} > 1, otherwise {@code false}\n */\n allSubsetsConflict: function(altsets) {\n return ! PredictionMode.hasNonConflictingAltSet(altsets);\n },\n /**\n * Determines if any single alternative subset in {@code altsets} contains\n * exactly one alternative.\n *\n * @param altsets a collection of alternative subsets\n * @return {@code true} if {@code altsets} contains a {@link BitSet} with\n * {@link BitSet//cardinality cardinality} 1, otherwise {@code false}\n */\n hasNonConflictingAltSet: function(altsets) {\n for(let i=0;i1) {\n return true;\n }\n }\n return false;\n },\n\n\n /**\n * Determines if every alternative subset in {@code altsets} is equivalent.\n *\n * @param altsets a collection of alternative subsets\n * @return {@code true} if every member of {@code altsets} is equal to the\n * others, otherwise {@code false}\n */\n allSubsetsEqual: function(altsets) {\n let first = null;\n for(let i=0;i\n * map[c] U= c.{@link ATNConfig//alt alt} // map hash/equals uses s and x, not\n * alt and not pred\n * \n */\n getConflictingAltSubsets: function(configs) {\n const configToAlts = new Map();\n configToAlts.hashFunction = function(cfg) { hashStuff(cfg.state.stateNumber, cfg.context); };\n configToAlts.equalsFunction = function(c1, c2) { return c1.state.stateNumber === c2.state.stateNumber && c1.context.equals(c2.context);};\n configs.items.map(function(cfg) {\n let alts = configToAlts.get(cfg);\n if (alts === null) {\n alts = new BitSet();\n configToAlts.put(cfg, alts);\n }\n alts.add(cfg.alt);\n });\n return configToAlts.getValues();\n },\n\n /**\n * Get a map from state to alt subset from a configuration set. For each\n * configuration {@code c} in {@code configs}:\n *\n *
\n     * map[c.{@link ATNConfig//state state}] U= c.{@link ATNConfig//alt alt}\n     * 
\n */\n getStateToAltMap: function(configs) {\n const m = new AltDict();\n configs.items.map(function(c) {\n let alts = m.get(c.state);\n if (alts === null) {\n alts = new BitSet();\n m.put(c.state, alts);\n }\n alts.add(c.alt);\n });\n return m;\n },\n\n hasStateAssociatedWithOneAlt: function(configs) {\n const values = PredictionMode.getStateToAltMap(configs).values();\n for(let i=0;i= this.children.length) {\n\t\t\treturn null;\n\t\t}\n\t\tif (type === null) {\n\t\t\treturn this.children[i];\n\t\t} else {\n\t\t\tfor(let j=0; j= this.children.length) {\n\t\t\treturn null;\n\t\t}\n\t\tfor(let j=0; j\n * The basic complexity of the adaptive strategy makes it harder to understand.\n * We begin with ATN simulation to build paths in a DFA. Subsequent prediction\n * requests go through the DFA first. If they reach a state without an edge for\n * the current symbol, the algorithm fails over to the ATN simulation to\n * complete the DFA path for the current input (until it finds a conflict state\n * or uniquely predicting state).

\n *\n *

\n * All of that is done without using the outer context because we want to create\n * a DFA that is not dependent upon the rule invocation stack when we do a\n * prediction. One DFA works in all contexts. We avoid using context not\n * necessarily because it's slower, although it can be, but because of the DFA\n * caching problem. The closure routine only considers the rule invocation stack\n * created during prediction beginning in the decision rule. For example, if\n * prediction occurs without invoking another rule's ATN, there are no context\n * stacks in the configurations. When lack of context leads to a conflict, we\n * don't know if it's an ambiguity or a weakness in the strong LL(*) parsing\n * strategy (versus full LL(*)).

\n *\n *

\n * When SLL yields a configuration set with conflict, we rewind the input and\n * retry the ATN simulation, this time using full outer context without adding\n * to the DFA. Configuration context stacks will be the full invocation stacks\n * from the start rule. If we get a conflict using full context, then we can\n * definitively say we have a true ambiguity for that input sequence. If we\n * don't get a conflict, it implies that the decision is sensitive to the outer\n * context. (It is not context-sensitive in the sense of context-sensitive\n * grammars.)

\n *\n *

\n * The next time we reach this DFA state with an SLL conflict, through DFA\n * simulation, we will again retry the ATN simulation using full context mode.\n * This is slow because we can't save the results and have to \"interpret\" the\n * ATN each time we get that input.

\n *\n *

\n * CACHING FULL CONTEXT PREDICTIONS

\n *\n *

\n * We could cache results from full context to predicted alternative easily and\n * that saves a lot of time but doesn't work in presence of predicates. The set\n * of visible predicates from the ATN start state changes depending on the\n * context, because closure can fall off the end of a rule. I tried to cache\n * tuples (stack context, semantic context, predicted alt) but it was slower\n * than interpreting and much more complicated. Also required a huge amount of\n * memory. The goal is not to create the world's fastest parser anyway. I'd like\n * to keep this algorithm simple. By launching multiple threads, we can improve\n * the speed of parsing across a large number of files.

\n *\n *

\n * There is no strict ordering between the amount of input used by SLL vs LL,\n * which makes it really hard to build a cache for full context. Let's say that\n * we have input A B C that leads to an SLL conflict with full context X. That\n * implies that using X we might only use A B but we could also use A B C D to\n * resolve conflict. Input A B C D could predict alternative 1 in one position\n * in the input and A B C E could predict alternative 2 in another position in\n * input. The conflicting SLL configurations could still be non-unique in the\n * full context prediction, which would lead us to requiring more input than the\n * original A B C.\tTo make a\tprediction cache work, we have to track\tthe exact\n * input\tused during the previous prediction. That amounts to a cache that maps\n * X to a specific DFA for that context.

\n *\n *

\n * Something should be done for left-recursive expression predictions. They are\n * likely LL(1) + pred eval. Easier to do the whole SLL unless error and retry\n * with full LL thing Sam does.

\n *\n *

\n * AVOIDING FULL CONTEXT PREDICTION

\n *\n *

\n * We avoid doing full context retry when the outer context is empty, we did not\n * dip into the outer context by falling off the end of the decision state rule,\n * or when we force SLL mode.

\n *\n *

\n * As an example of the not dip into outer context case, consider as super\n * constructor calls versus function calls. One grammar might look like\n * this:

\n *\n *
\n * ctorBody\n *   : '{' superCall? stat* '}'\n *   ;\n * 
\n *\n *

\n * Or, you might see something like

\n *\n *
\n * stat\n *   : superCall ';'\n *   | expression ';'\n *   | ...\n *   ;\n * 
\n *\n *

\n * In both cases I believe that no closure operations will dip into the outer\n * context. In the first case ctorBody in the worst case will stop at the '}'.\n * In the 2nd case it should stop at the ';'. Both cases should stay within the\n * entry rule and not dip into the outer context.

\n *\n *

\n * PREDICATES

\n *\n *

\n * Predicates are always evaluated if present in either SLL or LL both. SLL and\n * LL simulation deals with predicates differently. SLL collects predicates as\n * it performs closure operations like ANTLR v3 did. It delays predicate\n * evaluation until it reaches and accept state. This allows us to cache the SLL\n * ATN simulation whereas, if we had evaluated predicates on-the-fly during\n * closure, the DFA state configuration sets would be different and we couldn't\n * build up a suitable DFA.

\n *\n *

\n * When building a DFA accept state during ATN simulation, we evaluate any\n * predicates and return the sole semantically valid alternative. If there is\n * more than 1 alternative, we report an ambiguity. If there are 0 alternatives,\n * we throw an exception. Alternatives without predicates act like they have\n * true predicates. The simple way to think about it is to strip away all\n * alternatives with false predicates and choose the minimum alternative that\n * remains.

\n *\n *

\n * When we start in the DFA and reach an accept state that's predicated, we test\n * those and return the minimum semantically viable alternative. If no\n * alternatives are viable, we throw an exception.

\n *\n *

\n * During full LL ATN simulation, closure always evaluates predicates and\n * on-the-fly. This is crucial to reducing the configuration set size during\n * closure. It hits a landmine when parsing with the Java grammar, for example,\n * without this on-the-fly evaluation.

\n *\n *

\n * SHARING DFA

\n *\n *

\n * All instances of the same parser share the same decision DFAs through a\n * static field. Each instance gets its own ATN simulator but they share the\n * same {@link //decisionToDFA} field. They also share a\n * {@link PredictionContextCache} object that makes sure that all\n * {@link PredictionContext} objects are shared among the DFA states. This makes\n * a big size difference.

\n *\n *

\n * THREAD SAFETY

\n *\n *

\n * The {@link ParserATNSimulator} locks on the {@link //decisionToDFA} field when\n * it adds a new DFA object to that array. {@link //addDFAEdge}\n * locks on the DFA for the current decision when setting the\n * {@link DFAState//edges} field. {@link //addDFAState} locks on\n * the DFA for the current decision when looking up a DFA state to see if it\n * already exists. We must make sure that all requests to add DFA states that\n * are equivalent result in the same shared DFA object. This is because lots of\n * threads will be trying to update the DFA at once. The\n * {@link //addDFAState} method also locks inside the DFA lock\n * but this time on the shared context cache when it rebuilds the\n * configurations' {@link PredictionContext} objects using cached\n * subgraphs/nodes. No other locking occurs, even during DFA simulation. This is\n * safe as long as we can guarantee that all threads referencing\n * {@code s.edge[t]} get the same physical target {@link DFAState}, or\n * {@code null}. Once into the DFA, the DFA simulation does not reference the\n * {@link DFA//states} map. It follows the {@link DFAState//edges} field to new\n * targets. The DFA simulator will either find {@link DFAState//edges} to be\n * {@code null}, to be non-{@code null} and {@code dfa.edges[t]} null, or\n * {@code dfa.edges[t]} to be non-null. The\n * {@link //addDFAEdge} method could be racing to set the field\n * but in either case the DFA simulator works; if {@code null}, and requests ATN\n * simulation. It could also race trying to get {@code dfa.edges[t]}, but either\n * way it will work because it's not doing a test and set operation.

\n *\n *

\n * Starting with SLL then failing to combined SLL/LL (Two-Stage\n * Parsing)

\n *\n *

\n * Sam pointed out that if SLL does not give a syntax error, then there is no\n * point in doing full LL, which is slower. We only have to try LL if we get a\n * syntax error. For maximum speed, Sam starts the parser set to pure SLL\n * mode with the {@link BailErrorStrategy}:

\n *\n *
\n * parser.{@link Parser//getInterpreter() getInterpreter()}.{@link //setPredictionMode setPredictionMode}{@code (}{@link PredictionMode//SLL}{@code )};\n * parser.{@link Parser//setErrorHandler setErrorHandler}(new {@link BailErrorStrategy}());\n * 
\n *\n *

\n * If it does not get a syntax error, then we're done. If it does get a syntax\n * error, we need to retry with the combined SLL/LL strategy.

\n *\n *

\n * The reason this works is as follows. If there are no SLL conflicts, then the\n * grammar is SLL (at least for that input set). If there is an SLL conflict,\n * the full LL analysis must yield a set of viable alternatives which is a\n * subset of the alternatives reported by SLL. If the LL set is a singleton,\n * then the grammar is LL but not SLL. If the LL set is the same size as the SLL\n * set, the decision is SLL. If the LL set has size > 1, then that decision\n * is truly ambiguous on the current input. If the LL set is smaller, then the\n * SLL conflict resolution might choose an alternative that the full LL would\n * rule out as a possibility based upon better context information. If that's\n * the case, then the SLL parse will definitely get an error because the full LL\n * analysis says it's not viable. If SLL conflict resolution chooses an\n * alternative within the LL set, them both SLL and LL would choose the same\n * alternative because they both choose the minimum of multiple conflicting\n * alternatives.

\n *\n *

\n * Let's say we have a set of SLL conflicting alternatives {@code {1, 2, 3}} and\n * a smaller LL set called s. If s is {@code {2, 3}}, then SLL\n * parsing will get an error because SLL will pursue alternative 1. If\n * s is {@code {1, 2}} or {@code {1, 3}} then both SLL and LL will\n * choose the same alternative because alternative one is the minimum of either\n * set. If s is {@code {2}} or {@code {3}} then SLL will get a syntax\n * error. If s is {@code {1}} then SLL will succeed.

\n *\n *

\n * Of course, if the input is invalid, then we will get an error for sure in\n * both SLL and LL parsing. Erroneous input will therefore require 2 passes over\n * the input.

\n */\nclass ParserATNSimulator extends ATNSimulator {\n constructor(parser, atn, decisionToDFA, sharedContextCache) {\n super(atn, sharedContextCache);\n this.parser = parser;\n this.decisionToDFA = decisionToDFA;\n // SLL, LL, or LL + exact ambig detection?//\n this.predictionMode = PredictionMode.LL;\n // LAME globals to avoid parameters!!!!! I need these down deep in predTransition\n this._input = null;\n this._startIndex = 0;\n this._outerContext = null;\n this._dfa = null;\n /**\n * Each prediction operation uses a cache for merge of prediction contexts.\n * Don't keep around as it wastes huge amounts of memory. DoubleKeyMap\n * isn't synchronized but we're ok since two threads shouldn't reuse same\n * parser/atnsim object because it can only handle one input at a time.\n * This maps graphs a and b to merged result c. (a,b)→c. We can avoid\n * the merge if we ever see a and b again. Note that (b,a)→c should\n * also be examined during cache lookup.\n */\n this.mergeCache = null;\n this.debug = false;\n this.debug_closure = false;\n this.debug_add = false;\n this.debug_list_atn_decisions = false;\n this.dfa_debug = false;\n this.retry_debug = false;\n }\n\n reset() {}\n\n adaptivePredict(input, decision, outerContext) {\n if (this.debug || this.debug_list_atn_decisions) {\n console.log(\"adaptivePredict decision \" + decision +\n \" exec LA(1)==\" + this.getLookaheadName(input) +\n \" line \" + input.LT(1).line + \":\" +\n input.LT(1).column);\n }\n this._input = input;\n this._startIndex = input.index;\n this._outerContext = outerContext;\n\n const dfa = this.decisionToDFA[decision];\n this._dfa = dfa;\n const m = input.mark();\n const index = input.index;\n\n // Now we are certain to have a specific decision's DFA\n // But, do we still need an initial state?\n try {\n let s0;\n if (dfa.precedenceDfa) {\n // the start state for a precedence DFA depends on the current\n // parser precedence, and is provided by a DFA method.\n s0 = dfa.getPrecedenceStartState(this.parser.getPrecedence());\n } else {\n // the start state for a \"regular\" DFA is just s0\n s0 = dfa.s0;\n }\n if (s0===null) {\n if (outerContext===null) {\n outerContext = RuleContext.EMPTY;\n }\n if (this.debug || this.debug_list_atn_decisions) {\n console.log(\"predictATN decision \" + dfa.decision +\n \" exec LA(1)==\" + this.getLookaheadName(input) +\n \", outerContext=\" + outerContext.toString(this.parser.ruleNames));\n }\n\n const fullCtx = false;\n let s0_closure = this.computeStartState(dfa.atnStartState, RuleContext.EMPTY, fullCtx);\n\n if( dfa.precedenceDfa) {\n // If this is a precedence DFA, we use applyPrecedenceFilter\n // to convert the computed start state to a precedence start\n // state. We then use DFA.setPrecedenceStartState to set the\n // appropriate start state for the precedence level rather\n // than simply setting DFA.s0.\n //\n dfa.s0.configs = s0_closure; // not used for prediction but useful to know start configs anyway\n s0_closure = this.applyPrecedenceFilter(s0_closure);\n s0 = this.addDFAState(dfa, new DFAState(null, s0_closure));\n dfa.setPrecedenceStartState(this.parser.getPrecedence(), s0);\n } else {\n s0 = this.addDFAState(dfa, new DFAState(null, s0_closure));\n dfa.s0 = s0;\n }\n }\n const alt = this.execATN(dfa, s0, input, index, outerContext);\n if (this.debug) {\n console.log(\"DFA after predictATN: \" + dfa.toString(this.parser.literalNames));\n }\n return alt;\n } finally {\n this._dfa = null;\n this.mergeCache = null; // wack cache after each prediction\n input.seek(index);\n input.release(m);\n }\n }\n\n /**\n * Performs ATN simulation to compute a predicted alternative based\n * upon the remaining input, but also updates the DFA cache to avoid\n * having to traverse the ATN again for the same input sequence.\n *\n * There are some key conditions we're looking for after computing a new\n * set of ATN configs (proposed DFA state):\n * if the set is empty, there is no viable alternative for current symbol\n * does the state uniquely predict an alternative?\n * does the state have a conflict that would prevent us from\n * putting it on the work list?\n *\n * We also have some key operations to do:\n * add an edge from previous DFA state to potentially new DFA state, D,\n * upon current symbol but only if adding to work list, which means in all\n * cases except no viable alternative (and possibly non-greedy decisions?)\n * collecting predicates and adding semantic context to DFA accept states\n * adding rule context to context-sensitive DFA accept states\n * consuming an input symbol\n * reporting a conflict\n * reporting an ambiguity\n * reporting a context sensitivity\n * reporting insufficient predicates\n *\n * cover these cases:\n * dead end\n * single alt\n * single alt + preds\n * conflict\n * conflict + preds\n *\n */\n execATN(dfa, s0, input, startIndex, outerContext ) {\n if (this.debug || this.debug_list_atn_decisions) {\n console.log(\"execATN decision \" + dfa.decision +\n \" exec LA(1)==\" + this.getLookaheadName(input) +\n \" line \" + input.LT(1).line + \":\" + input.LT(1).column);\n }\n let alt;\n let previousD = s0;\n\n if (this.debug) {\n console.log(\"s0 = \" + s0);\n }\n let t = input.LA(1);\n while(true) { // while more work\n let D = this.getExistingTargetState(previousD, t);\n if(D===null) {\n D = this.computeTargetState(dfa, previousD, t);\n }\n if(D===ATNSimulator.ERROR) {\n // if any configs in previous dipped into outer context, that\n // means that input up to t actually finished entry rule\n // at least for SLL decision. Full LL doesn't dip into outer\n // so don't need special case.\n // We will get an error no matter what so delay until after\n // decision; better error message. Also, no reachable target\n // ATN states in SLL implies LL will also get nowhere.\n // If conflict in states that dip out, choose min since we\n // will get error no matter what.\n const e = this.noViableAlt(input, outerContext, previousD.configs, startIndex);\n input.seek(startIndex);\n alt = this.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previousD.configs, outerContext);\n if(alt!==ATN.INVALID_ALT_NUMBER) {\n return alt;\n } else {\n throw e;\n }\n }\n if(D.requiresFullContext && this.predictionMode !== PredictionMode.SLL) {\n // IF PREDS, MIGHT RESOLVE TO SINGLE ALT => SLL (or syntax error)\n let conflictingAlts = null;\n if (D.predicates!==null) {\n if (this.debug) {\n console.log(\"DFA state has preds in DFA sim LL failover\");\n }\n const conflictIndex = input.index;\n if(conflictIndex !== startIndex) {\n input.seek(startIndex);\n }\n conflictingAlts = this.evalSemanticContext(D.predicates, outerContext, true);\n if (conflictingAlts.length===1) {\n if(this.debug) {\n console.log(\"Full LL avoided\");\n }\n return conflictingAlts.minValue();\n }\n if (conflictIndex !== startIndex) {\n // restore the index so reporting the fallback to full\n // context occurs with the index at the correct spot\n input.seek(conflictIndex);\n }\n }\n if (this.dfa_debug) {\n console.log(\"ctx sensitive state \" + outerContext +\" in \" + D);\n }\n const fullCtx = true;\n const s0_closure = this.computeStartState(dfa.atnStartState, outerContext, fullCtx);\n this.reportAttemptingFullContext(dfa, conflictingAlts, D.configs, startIndex, input.index);\n alt = this.execATNWithFullContext(dfa, D, s0_closure, input, startIndex, outerContext);\n return alt;\n }\n if (D.isAcceptState) {\n if (D.predicates===null) {\n return D.prediction;\n }\n const stopIndex = input.index;\n input.seek(startIndex);\n const alts = this.evalSemanticContext(D.predicates, outerContext, true);\n if (alts.length===0) {\n throw this.noViableAlt(input, outerContext, D.configs, startIndex);\n } else if (alts.length===1) {\n return alts.minValue();\n } else {\n // report ambiguity after predicate evaluation to make sure the correct set of ambig alts is reported.\n this.reportAmbiguity(dfa, D, startIndex, stopIndex, false, alts, D.configs);\n return alts.minValue();\n }\n }\n previousD = D;\n\n if (t !== Token.EOF) {\n input.consume();\n t = input.LA(1);\n }\n }\n }\n\n /**\n * Get an existing target state for an edge in the DFA. If the target state\n * for the edge has not yet been computed or is otherwise not available,\n * this method returns {@code null}.\n *\n * @param previousD The current DFA state\n * @param t The next input symbol\n * @return The existing target DFA state for the given input symbol\n * {@code t}, or {@code null} if the target state for this edge is not\n * already cached\n */\n getExistingTargetState(previousD, t) {\n const edges = previousD.edges;\n if (edges===null) {\n return null;\n } else {\n return edges[t + 1] || null;\n }\n }\n\n /**\n * Compute a target state for an edge in the DFA, and attempt to add the\n * computed state and corresponding edge to the DFA.\n *\n * @param dfa The DFA\n * @param previousD The current DFA state\n * @param t The next input symbol\n *\n * @return The computed target DFA state for the given input symbol\n * {@code t}. If {@code t} does not lead to a valid DFA state, this method\n * returns {@link //ERROR\n */\n computeTargetState(dfa, previousD, t) {\n const reach = this.computeReachSet(previousD.configs, t, false);\n if(reach===null) {\n this.addDFAEdge(dfa, previousD, t, ATNSimulator.ERROR);\n return ATNSimulator.ERROR;\n }\n // create new target state; we'll add to DFA after it's complete\n let D = new DFAState(null, reach);\n\n const predictedAlt = this.getUniqueAlt(reach);\n\n if (this.debug) {\n const altSubSets = PredictionMode.getConflictingAltSubsets(reach);\n console.log(\"SLL altSubSets=\" + Utils.arrayToString(altSubSets) +\n \", previous=\" + previousD.configs +\n \", configs=\" + reach +\n \", predict=\" + predictedAlt +\n \", allSubsetsConflict=\" +\n PredictionMode.allSubsetsConflict(altSubSets) + \", conflictingAlts=\" +\n this.getConflictingAlts(reach));\n }\n if (predictedAlt!==ATN.INVALID_ALT_NUMBER) {\n // NO CONFLICT, UNIQUELY PREDICTED ALT\n D.isAcceptState = true;\n D.configs.uniqueAlt = predictedAlt;\n D.prediction = predictedAlt;\n } else if (PredictionMode.hasSLLConflictTerminatingPrediction(this.predictionMode, reach)) {\n // MORE THAN ONE VIABLE ALTERNATIVE\n D.configs.conflictingAlts = this.getConflictingAlts(reach);\n D.requiresFullContext = true;\n // in SLL-only mode, we will stop at this state and return the minimum alt\n D.isAcceptState = true;\n D.prediction = D.configs.conflictingAlts.minValue();\n }\n if (D.isAcceptState && D.configs.hasSemanticContext) {\n this.predicateDFAState(D, this.atn.getDecisionState(dfa.decision));\n if( D.predicates!==null) {\n D.prediction = ATN.INVALID_ALT_NUMBER;\n }\n }\n // all adds to dfa are done after we've created full D state\n D = this.addDFAEdge(dfa, previousD, t, D);\n return D;\n }\n\n predicateDFAState(dfaState, decisionState) {\n // We need to test all predicates, even in DFA states that\n // uniquely predict alternative.\n const nalts = decisionState.transitions.length;\n // Update DFA so reach becomes accept state with (predicate,alt)\n // pairs if preds found for conflicting alts\n const altsToCollectPredsFrom = this.getConflictingAltsOrUniqueAlt(dfaState.configs);\n const altToPred = this.getPredsForAmbigAlts(altsToCollectPredsFrom, dfaState.configs, nalts);\n if (altToPred!==null) {\n dfaState.predicates = this.getPredicatePredictions(altsToCollectPredsFrom, altToPred);\n dfaState.prediction = ATN.INVALID_ALT_NUMBER; // make sure we use preds\n } else {\n // There are preds in configs but they might go away\n // when OR'd together like {p}? || NONE == NONE. If neither\n // alt has preds, resolve to min alt\n dfaState.prediction = altsToCollectPredsFrom.minValue();\n }\n }\n\n// comes back with reach.uniqueAlt set to a valid alt\n execATNWithFullContext(dfa, D, // how far we got before failing over\n s0,\n input,\n startIndex,\n outerContext) {\n if (this.debug || this.debug_list_atn_decisions) {\n console.log(\"execATNWithFullContext \"+s0);\n }\n const fullCtx = true;\n let foundExactAmbig = false;\n let reach;\n let previous = s0;\n input.seek(startIndex);\n let t = input.LA(1);\n let predictedAlt = -1;\n while (true) { // while more work\n reach = this.computeReachSet(previous, t, fullCtx);\n if (reach===null) {\n // if any configs in previous dipped into outer context, that\n // means that input up to t actually finished entry rule\n // at least for LL decision. Full LL doesn't dip into outer\n // so don't need special case.\n // We will get an error no matter what so delay until after\n // decision; better error message. Also, no reachable target\n // ATN states in SLL implies LL will also get nowhere.\n // If conflict in states that dip out, choose min since we\n // will get error no matter what.\n const e = this.noViableAlt(input, outerContext, previous, startIndex);\n input.seek(startIndex);\n const alt = this.getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext);\n if(alt!==ATN.INVALID_ALT_NUMBER) {\n return alt;\n } else {\n throw e;\n }\n }\n const altSubSets = PredictionMode.getConflictingAltSubsets(reach);\n if(this.debug) {\n console.log(\"LL altSubSets=\" + altSubSets + \", predict=\" +\n PredictionMode.getUniqueAlt(altSubSets) + \", resolvesToJustOneViableAlt=\" +\n PredictionMode.resolvesToJustOneViableAlt(altSubSets));\n }\n reach.uniqueAlt = this.getUniqueAlt(reach);\n // unique prediction?\n if(reach.uniqueAlt!==ATN.INVALID_ALT_NUMBER) {\n predictedAlt = reach.uniqueAlt;\n break;\n } else if (this.predictionMode !== PredictionMode.LL_EXACT_AMBIG_DETECTION) {\n predictedAlt = PredictionMode.resolvesToJustOneViableAlt(altSubSets);\n if(predictedAlt !== ATN.INVALID_ALT_NUMBER) {\n break;\n }\n } else {\n // In exact ambiguity mode, we never try to terminate early.\n // Just keeps scarfing until we know what the conflict is\n if (PredictionMode.allSubsetsConflict(altSubSets) && PredictionMode.allSubsetsEqual(altSubSets)) {\n foundExactAmbig = true;\n predictedAlt = PredictionMode.getSingleViableAlt(altSubSets);\n break;\n }\n // else there are multiple non-conflicting subsets or\n // we're not sure what the ambiguity is yet.\n // So, keep going.\n }\n previous = reach;\n if( t !== Token.EOF) {\n input.consume();\n t = input.LA(1);\n }\n }\n // If the configuration set uniquely predicts an alternative,\n // without conflict, then we know that it's a full LL decision\n // not SLL.\n if (reach.uniqueAlt !== ATN.INVALID_ALT_NUMBER ) {\n this.reportContextSensitivity(dfa, predictedAlt, reach, startIndex, input.index);\n return predictedAlt;\n }\n // We do not check predicates here because we have checked them\n // on-the-fly when doing full context prediction.\n\n //\n // In non-exact ambiguity detection mode, we might\tactually be able to\n // detect an exact ambiguity, but I'm not going to spend the cycles\n // needed to check. We only emit ambiguity warnings in exact ambiguity\n // mode.\n //\n // For example, we might know that we have conflicting configurations.\n // But, that does not mean that there is no way forward without a\n // conflict. It's possible to have nonconflicting alt subsets as in:\n\n // altSubSets=[{1, 2}, {1, 2}, {1}, {1, 2}]\n\n // from\n //\n // [(17,1,[5 $]), (13,1,[5 10 $]), (21,1,[5 10 $]), (11,1,[$]),\n // (13,2,[5 10 $]), (21,2,[5 10 $]), (11,2,[$])]\n //\n // In this case, (17,1,[5 $]) indicates there is some next sequence that\n // would resolve this without conflict to alternative 1. Any other viable\n // next sequence, however, is associated with a conflict. We stop\n // looking for input because no amount of further lookahead will alter\n // the fact that we should predict alternative 1. We just can't say for\n // sure that there is an ambiguity without looking further.\n\n this.reportAmbiguity(dfa, D, startIndex, input.index, foundExactAmbig, null, reach);\n\n return predictedAlt;\n }\n\n computeReachSet(closure, t, fullCtx) {\n if (this.debug) {\n console.log(\"in computeReachSet, starting closure: \" + closure);\n }\n if( this.mergeCache===null) {\n this.mergeCache = new DoubleDict();\n }\n const intermediate = new ATNConfigSet(fullCtx);\n\n // Configurations already in a rule stop state indicate reaching the end\n // of the decision rule (local context) or end of the start rule (full\n // context). Once reached, these configurations are never updated by a\n // closure operation, so they are handled separately for the performance\n // advantage of having a smaller intermediate set when calling closure.\n //\n // For full-context reach operations, separate handling is required to\n // ensure that the alternative matching the longest overall sequence is\n // chosen when multiple such configurations can match the input.\n\n let skippedStopStates = null;\n\n // First figure out where we can reach on input t\n for (let i=0; iWhen {@code lookToEndOfRule} is true, this method uses\n * {@link ATN//nextTokens} for each configuration in {@code configs} which is\n * not already in a rule stop state to see if a rule stop state is reachable\n * from the configuration via epsilon-only transitions.

\n *\n * @param configs the configuration set to update\n * @param lookToEndOfRule when true, this method checks for rule stop states\n * reachable by epsilon-only transitions from each configuration in\n * {@code configs}.\n *\n * @return {@code configs} if all configurations in {@code configs} are in a\n * rule stop state, otherwise return a new configuration set containing only\n * the configurations from {@code configs} which are in a rule stop state\n */\n removeAllConfigsNotInRuleStopState(configs, lookToEndOfRule) {\n if (PredictionMode.allConfigsInRuleStopStates(configs)) {\n return configs;\n }\n const result = new ATNConfigSet(configs.fullCtx);\n for(let i=0; i\n *
  • Evaluate the precedence predicates for each configuration using\n * {@link SemanticContext//evalPrecedence}.
  • \n *
  • Remove all configurations which predict an alternative greater than\n * 1, for which another configuration that predicts alternative 1 is in the\n * same ATN state with the same prediction context. This transformation is\n * valid for the following reasons:\n *
      \n *
    • The closure block cannot contain any epsilon transitions which bypass\n * the body of the closure, so all states reachable via alternative 1 are\n * part of the precedence alternatives of the transformed left-recursive\n * rule.
    • \n *
    • The \"primary\" portion of a left recursive rule cannot contain an\n * epsilon transition, so the only way an alternative other than 1 can exist\n * in a state that is also reachable via alternative 1 is by nesting calls\n * to the left-recursive rule, with the outer calls not being at the\n * preferred precedence level.
    • \n *
    \n *
  • \n * \n *\n *

    \n * The prediction context must be considered by this filter to address\n * situations like the following.\n *

    \n * \n *
    \n     * grammar TA;\n     * prog: statement* EOF;\n     * statement: letterA | statement letterA 'b' ;\n     * letterA: 'a';\n     * 
    \n *
    \n *

    \n * If the above grammar, the ATN state immediately before the token\n * reference {@code 'a'} in {@code letterA} is reachable from the left edge\n * of both the primary and closure blocks of the left-recursive rule\n * {@code statement}. The prediction context associated with each of these\n * configurations distinguishes between them, and prevents the alternative\n * which stepped out to {@code prog} (and then back in to {@code statement}\n * from being eliminated by the filter.\n *

    \n *\n * @param configs The configuration set computed by\n * {@link //computeStartState} as the start state for the DFA.\n * @return The transformed configuration set representing the start state\n * for a precedence DFA at a particular precedence level (determined by\n * calling {@link Parser//getPrecedence})\n */\n applyPrecedenceFilter(configs) {\n let config;\n const statesFromAlt1 = [];\n const configSet = new ATNConfigSet(configs.fullCtx);\n for(let i=0; i1\n // (basically a graph subtraction algorithm).\n if (!config.precedenceFilterSuppressed) {\n const context = statesFromAlt1[config.state.stateNumber] || null;\n if (context!==null && context.equals(config.context)) {\n // eliminated\n continue;\n }\n }\n configSet.add(config, this.mergeCache);\n }\n return configSet;\n }\n\n getReachableTarget(trans, ttype) {\n if (trans.matches(ttype, 0, this.atn.maxTokenType)) {\n return trans.target;\n } else {\n return null;\n }\n }\n\n getPredsForAmbigAlts(ambigAlts, configs, nalts) {\n // REACH=[1|1|[]|0:0, 1|2|[]|0:1]\n // altToPred starts as an array of all null contexts. The entry at index i\n // corresponds to alternative i. altToPred[i] may have one of three values:\n // 1. null: no ATNConfig c is found such that c.alt==i\n // 2. SemanticContext.NONE: At least one ATNConfig c exists such that\n // c.alt==i and c.semanticContext==SemanticContext.NONE. In other words,\n // alt i has at least one unpredicated config.\n // 3. Non-NONE Semantic Context: There exists at least one, and for all\n // ATNConfig c such that c.alt==i, c.semanticContext!=SemanticContext.NONE.\n //\n // From this, it is clear that NONE||anything==NONE.\n //\n let altToPred = [];\n for(let i=0;i\n * The default implementation of this method uses the following\n * algorithm to identify an ATN configuration which successfully parsed the\n * decision entry rule. Choosing such an alternative ensures that the\n * {@link ParserRuleContext} returned by the calling rule will be complete\n * and valid, and the syntax error will be reported later at a more\n * localized location.

    \n *\n *
      \n *
    • If a syntactically valid path or paths reach the end of the decision rule and\n * they are semantically valid if predicated, return the min associated alt.
    • \n *
    • Else, if a semantically invalid but syntactically valid path exist\n * or paths exist, return the minimum associated alt.\n *
    • \n *
    • Otherwise, return {@link ATN//INVALID_ALT_NUMBER}.
    • \n *
    \n *\n *

    \n * In some scenarios, the algorithm described above could predict an\n * alternative which will result in a {@link FailedPredicateException} in\n * the parser. Specifically, this could occur if the only configuration\n * capable of successfully parsing to the end of the decision rule is\n * blocked by a semantic predicate. By choosing this alternative within\n * {@link //adaptivePredict} instead of throwing a\n * {@link NoViableAltException}, the resulting\n * {@link FailedPredicateException} in the parser will identify the specific\n * predicate which is preventing the parser from successfully parsing the\n * decision rule, which helps developers identify and correct logic errors\n * in semantic predicates.\n *

    \n *\n * @param configs The ATN configurations which were valid immediately before\n * the {@link //ERROR} state was reached\n * @param outerContext The is the \\gamma_0 initial parser context from the paper\n * or the parser stack at the instant before prediction commences.\n *\n * @return The value to return from {@link //adaptivePredict}, or\n * {@link ATN//INVALID_ALT_NUMBER} if a suitable alternative was not\n * identified and {@link //adaptivePredict} should report an error instead\n */\n getSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(configs, outerContext) {\n const cfgs = this.splitAccordingToSemanticValidity(configs, outerContext);\n const semValidConfigs = cfgs[0];\n const semInvalidConfigs = cfgs[1];\n let alt = this.getAltThatFinishedDecisionEntryRule(semValidConfigs);\n if (alt!==ATN.INVALID_ALT_NUMBER) { // semantically/syntactically viable path exists\n return alt;\n }\n // Is there a syntactically valid path with a failed pred?\n if (semInvalidConfigs.items.length>0) {\n alt = this.getAltThatFinishedDecisionEntryRule(semInvalidConfigs);\n if (alt!==ATN.INVALID_ALT_NUMBER) { // syntactically viable path exists\n return alt;\n }\n }\n return ATN.INVALID_ALT_NUMBER;\n }\n\n getAltThatFinishedDecisionEntryRule(configs) {\n const alts = [];\n for(let i=0;i0 || ((c.state instanceof RuleStopState) && c.context.hasEmptyPath())) {\n if(alts.indexOf(c.alt)<0) {\n alts.push(c.alt);\n }\n }\n }\n if (alts.length===0) {\n return ATN.INVALID_ALT_NUMBER;\n } else {\n return Math.min.apply(null, alts);\n }\n }\n\n /**\n * Walk the list of configurations and split them according to\n * those that have preds evaluating to true/false. If no pred, assume\n * true pred and include in succeeded set. Returns Pair of sets.\n *\n * Create a new set so as not to alter the incoming parameter.\n *\n * Assumption: the input stream has been restored to the starting point\n * prediction, which is where predicates need to evaluate.*/\n splitAccordingToSemanticValidity( configs, outerContext) {\n const succeeded = new ATNConfigSet(configs.fullCtx);\n const failed = new ATNConfigSet(configs.fullCtx);\n for(let i=0;i50) {\n throw \"problem\";\n }\n }\n if (config.state instanceof RuleStopState) {\n // We hit rule end. If we have context info, use it\n // run thru all possible stack tops in ctx\n if (! config.context.isEmpty()) {\n for (let i =0; i 0.\n if (this._dfa !== null && this._dfa.precedenceDfa) {\n if (t.outermostPrecedenceReturn === this._dfa.atnStartState.ruleIndex) {\n c.precedenceFilterSuppressed = true;\n }\n }\n\n c.reachesIntoOuterContext += 1;\n if (closureBusy.add(c)!==c) {\n // avoid infinite recursion for right-recursive rules\n continue;\n }\n configs.dipsIntoOuterContext = true; // TODO: can remove? only care when we add to set per middle of this method\n newDepth -= 1;\n if (this.debug) {\n console.log(\"dips into outer ctx: \" + c);\n }\n } else {\n if (!t.isEpsilon && closureBusy.add(c)!==c){\n // avoid infinite recursion for EOF* and EOF+\n continue;\n }\n if (t instanceof RuleTransition) {\n // latch when newDepth goes negative - once we step out of the entry context we can't return\n if (newDepth >= 0) {\n newDepth += 1;\n }\n }\n }\n this.closureCheckingStopState(c, configs, closureBusy, continueCollecting, fullCtx, newDepth, treatEofAsEpsilon);\n }\n }\n }\n\n canDropLoopEntryEdgeInLeftRecursiveRule(config) {\n // return False\n const p = config.state;\n // First check to see if we are in StarLoopEntryState generated during\n // left-recursion elimination. For efficiency, also check if\n // the context has an empty stack case. If so, it would mean\n // global FOLLOW so we can't perform optimization\n // Are we the special loop entry/exit state? or SLL wildcard\n if(p.stateType !== ATNState.STAR_LOOP_ENTRY)\n return false;\n if(p.stateType !== ATNState.STAR_LOOP_ENTRY || !p.isPrecedenceDecision ||\n config.context.isEmpty() || config.context.hasEmptyPath())\n return false;\n\n // Require all return states to return back to the same rule that p is in.\n const numCtxs = config.context.length;\n for(let i=0; i=0) {\n return this.parser.ruleNames[index];\n } else {\n return \"\";\n }\n }\n\n getEpsilonTarget(config, t, collectPredicates, inContext, fullCtx, treatEofAsEpsilon) {\n switch(t.serializationType) {\n case Transition.RULE:\n return this.ruleTransition(config, t);\n case Transition.PRECEDENCE:\n return this.precedenceTransition(config, t, collectPredicates, inContext, fullCtx);\n case Transition.PREDICATE:\n return this.predTransition(config, t, collectPredicates, inContext, fullCtx);\n case Transition.ACTION:\n return this.actionTransition(config, t);\n case Transition.EPSILON:\n return new ATNConfig({state:t.target}, config);\n case Transition.ATOM:\n case Transition.RANGE:\n case Transition.SET:\n // EOF transitions act like epsilon transitions after the first EOF\n // transition is traversed\n if (treatEofAsEpsilon) {\n if (t.matches(Token.EOF, 0, 1)) {\n return new ATNConfig({state: t.target}, config);\n }\n }\n return null;\n default:\n return null;\n }\n }\n\n actionTransition(config, t) {\n if (this.debug) {\n const index = t.actionIndex === -1 ? 65535 : t.actionIndex;\n console.log(\"ACTION edge \" + t.ruleIndex + \":\" + index);\n }\n return new ATNConfig({state:t.target}, config);\n }\n\n precedenceTransition(config, pt, collectPredicates, inContext, fullCtx) {\n if (this.debug) {\n console.log(\"PRED (collectPredicates=\" + collectPredicates + \") \" +\n pt.precedence + \">=_p, ctx dependent=true\");\n if (this.parser!==null) {\n console.log(\"context surrounding pred is \" + Utils.arrayToString(this.parser.getRuleInvocationStack()));\n }\n }\n let c = null;\n if (collectPredicates && inContext) {\n if (fullCtx) {\n // In full context mode, we can evaluate predicates on-the-fly\n // during closure, which dramatically reduces the size of\n // the config sets. It also obviates the need to test predicates\n // later during conflict resolution.\n const currentPosition = this._input.index;\n this._input.seek(this._startIndex);\n const predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext);\n this._input.seek(currentPosition);\n if (predSucceeds) {\n c = new ATNConfig({state:pt.target}, config); // no pred context\n }\n } else {\n const newSemCtx = SemanticContext.andContext(config.semanticContext, pt.getPredicate());\n c = new ATNConfig({state:pt.target, semanticContext:newSemCtx}, config);\n }\n } else {\n c = new ATNConfig({state:pt.target}, config);\n }\n if (this.debug) {\n console.log(\"config from pred transition=\" + c);\n }\n return c;\n }\n\n predTransition(config, pt, collectPredicates, inContext, fullCtx) {\n if (this.debug) {\n console.log(\"PRED (collectPredicates=\" + collectPredicates + \") \" + pt.ruleIndex +\n \":\" + pt.predIndex + \", ctx dependent=\" + pt.isCtxDependent);\n if (this.parser!==null) {\n console.log(\"context surrounding pred is \" + Utils.arrayToString(this.parser.getRuleInvocationStack()));\n }\n }\n let c = null;\n if (collectPredicates && ((pt.isCtxDependent && inContext) || ! pt.isCtxDependent)) {\n if (fullCtx) {\n // In full context mode, we can evaluate predicates on-the-fly\n // during closure, which dramatically reduces the size of\n // the config sets. It also obviates the need to test predicates\n // later during conflict resolution.\n const currentPosition = this._input.index;\n this._input.seek(this._startIndex);\n const predSucceeds = pt.getPredicate().evaluate(this.parser, this._outerContext);\n this._input.seek(currentPosition);\n if (predSucceeds) {\n c = new ATNConfig({state:pt.target}, config); // no pred context\n }\n } else {\n const newSemCtx = SemanticContext.andContext(config.semanticContext, pt.getPredicate());\n c = new ATNConfig({state:pt.target, semanticContext:newSemCtx}, config);\n }\n } else {\n c = new ATNConfig({state:pt.target}, config);\n }\n if (this.debug) {\n console.log(\"config from pred transition=\" + c);\n }\n return c;\n }\n\n ruleTransition(config, t) {\n if (this.debug) {\n console.log(\"CALL rule \" + this.getRuleName(t.target.ruleIndex) + \", ctx=\" + config.context);\n }\n const returnState = t.followState;\n const newContext = SingletonPredictionContext.create(config.context, returnState.stateNumber);\n return new ATNConfig({state:t.target, context:newContext}, config );\n }\n\n getConflictingAlts(configs) {\n const altsets = PredictionMode.getConflictingAltSubsets(configs);\n return PredictionMode.getAlts(altsets);\n }\n\n /**\n * Sam pointed out a problem with the previous definition, v3, of\n * ambiguous states. If we have another state associated with conflicting\n * alternatives, we should keep going. For example, the following grammar\n *\n * s : (ID | ID ID?) ';' ;\n *\n * When the ATN simulation reaches the state before ';', it has a DFA\n * state that looks like: [12|1|[], 6|2|[], 12|2|[]]. Naturally\n * 12|1|[] and 12|2|[] conflict, but we cannot stop processing this node\n * because alternative to has another way to continue, via [6|2|[]].\n * The key is that we have a single state that has config's only associated\n * with a single alternative, 2, and crucially the state transitions\n * among the configurations are all non-epsilon transitions. That means\n * we don't consider any conflicts that include alternative 2. So, we\n * ignore the conflict between alts 1 and 2. We ignore a set of\n * conflicting alts when there is an intersection with an alternative\n * associated with a single alt state in the state→config-list map.\n *\n * It's also the case that we might have two conflicting configurations but\n * also a 3rd nonconflicting configuration for a different alternative:\n * [1|1|[], 1|2|[], 8|3|[]]. This can come about from grammar:\n *\n * a : A | A | A B ;\n *\n * After matching input A, we reach the stop state for rule A, state 1.\n * State 8 is the state right before B. Clearly alternatives 1 and 2\n * conflict and no amount of further lookahead will separate the two.\n * However, alternative 3 will be able to continue and so we do not\n * stop working on this state. In the previous example, we're concerned\n * with states associated with the conflicting alternatives. Here alt\n * 3 is not associated with the conflicting configs, but since we can continue\n * looking for input reasonably, I don't declare the state done. We\n * ignore a set of conflicting alts when we have an alternative\n * that we still need to pursue\n */\n getConflictingAltsOrUniqueAlt(configs) {\n let conflictingAlts = null;\n if (configs.uniqueAlt!== ATN.INVALID_ALT_NUMBER) {\n conflictingAlts = new BitSet();\n conflictingAlts.add(configs.uniqueAlt);\n } else {\n conflictingAlts = configs.conflictingAlts;\n }\n return conflictingAlts;\n }\n\n getTokenName(t) {\n if (t===Token.EOF) {\n return \"EOF\";\n }\n if( this.parser!==null && this.parser.literalNames!==null) {\n if (t >= this.parser.literalNames.length && t >= this.parser.symbolicNames.length) {\n console.log(\"\" + t + \" ttype out of range: \" + this.parser.literalNames);\n console.log(\"\" + this.parser.getInputStream().getTokens());\n } else {\n const name = this.parser.literalNames[t] || this.parser.symbolicNames[t];\n return name + \"<\" + t + \">\";\n }\n }\n return \"\" + t;\n }\n\n getLookaheadName(input) {\n return this.getTokenName(input.LA(1));\n }\n\n /**\n * Used for debugging in adaptivePredict around execATN but I cut\n * it out for clarity now that alg. works well. We can leave this\n * \"dead\" code for a bit\n */\n dumpDeadEndConfigs(nvae) {\n console.log(\"dead end configs: \");\n const decs = nvae.getDeadEndConfigs();\n for(let i=0; i0) {\n const t = c.state.transitions[0];\n if (t instanceof AtomTransition) {\n trans = \"Atom \"+ this.getTokenName(t.label);\n } else if (t instanceof SetTransition) {\n const neg = (t instanceof NotSetTransition);\n trans = (neg ? \"~\" : \"\") + \"Set \" + t.set;\n }\n }\n console.error(c.toString(this.parser, true) + \":\" + trans);\n }\n }\n\n noViableAlt(input, outerContext, configs, startIndex) {\n return new NoViableAltException(this.parser, input, input.get(startIndex), input.LT(1), configs, outerContext);\n }\n\n getUniqueAlt(configs) {\n let alt = ATN.INVALID_ALT_NUMBER;\n for(let i=0;iIf {@code to} is {@code null}, this method returns {@code null}.\n * Otherwise, this method returns the {@link DFAState} returned by calling\n * {@link //addDFAState} for the {@code to} state.

    \n *\n * @param dfa The DFA\n * @param from_ The source state for the edge\n * @param t The input symbol\n * @param to The target state for the edge\n *\n * @return If {@code to} is {@code null}, this method returns {@code null};\n * otherwise this method returns the result of calling {@link //addDFAState}\n * on {@code to}\n */\n addDFAEdge(dfa, from_, t, to) {\n if( this.debug) {\n console.log(\"EDGE \" + from_ + \" -> \" + to + \" upon \" + this.getTokenName(t));\n }\n if (to===null) {\n return null;\n }\n to = this.addDFAState(dfa, to); // used existing if possible not incoming\n if (from_===null || t < -1 || t > this.atn.maxTokenType) {\n return to;\n }\n if (from_.edges===null) {\n from_.edges = [];\n }\n from_.edges[t+1] = to; // connect\n\n if (this.debug) {\n const literalNames = this.parser===null ? null : this.parser.literalNames;\n const symbolicNames = this.parser===null ? null : this.parser.symbolicNames;\n console.log(\"DFA=\\n\" + dfa.toString(literalNames, symbolicNames));\n }\n return to;\n }\n\n /**\n * Add state {@code D} to the DFA if it is not already present, and return\n * the actual instance stored in the DFA. If a state equivalent to {@code D}\n * is already in the DFA, the existing state is returned. Otherwise this\n * method returns {@code D} after adding it to the DFA.\n *\n *

    If {@code D} is {@link //ERROR}, this method returns {@link //ERROR} and\n * does not change the DFA.

    \n *\n * @param dfa The dfa\n * @param D The DFA state to add\n * @return The state stored in the DFA. This will be either the existing\n * state if {@code D} is already in the DFA, or {@code D} itself if the\n * state was not already present\n */\n addDFAState(dfa, D) {\n if (D === ATNSimulator.ERROR) {\n return D;\n }\n const existing = dfa.states.get(D);\n if(existing!==null) {\n return existing;\n }\n D.stateNumber = dfa.states.length;\n if (! D.configs.readOnly) {\n D.configs.optimizeConfigs(this);\n D.configs.setReadonly(true);\n }\n dfa.states.add(D);\n if (this.debug) {\n console.log(\"adding new DFA state: \" + D);\n }\n return D;\n }\n\n reportAttemptingFullContext(dfa, conflictingAlts, configs, startIndex, stopIndex) {\n if (this.debug || this.retry_debug) {\n const interval = new Interval(startIndex, stopIndex + 1);\n console.log(\"reportAttemptingFullContext decision=\" + dfa.decision + \":\" + configs +\n \", input=\" + this.parser.getTokenStream().getText(interval));\n }\n if (this.parser!==null) {\n this.parser.getErrorListenerDispatch().reportAttemptingFullContext(this.parser, dfa, startIndex, stopIndex, conflictingAlts, configs);\n }\n }\n\n reportContextSensitivity(dfa, prediction, configs, startIndex, stopIndex) {\n if (this.debug || this.retry_debug) {\n const interval = new Interval(startIndex, stopIndex + 1);\n console.log(\"reportContextSensitivity decision=\" + dfa.decision + \":\" + configs +\n \", input=\" + this.parser.getTokenStream().getText(interval));\n }\n if (this.parser!==null) {\n this.parser.getErrorListenerDispatch().reportContextSensitivity(this.parser, dfa, startIndex, stopIndex, prediction, configs);\n }\n }\n\n // If context sensitive parsing, we know it's ambiguity not conflict//\n reportAmbiguity(dfa, D, startIndex, stopIndex,\n exact, ambigAlts, configs ) {\n if (this.debug || this.retry_debug) {\n const interval = new Interval(startIndex, stopIndex + 1);\n console.log(\"reportAmbiguity \" + ambigAlts + \":\" + configs +\n \", input=\" + this.parser.getTokenStream().getText(interval));\n }\n if (this.parser!==null) {\n this.parser.getErrorListenerDispatch().reportAmbiguity(this.parser, dfa, startIndex, stopIndex, exact, ambigAlts, configs);\n }\n }\n}\n\nmodule.exports = ParserATNSimulator;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nexports.ATN = require('./ATN');\nexports.ATNDeserializer = require('./ATNDeserializer');\nexports.LexerATNSimulator = require('./LexerATNSimulator');\nexports.ParserATNSimulator = require('./ParserATNSimulator');\nexports.PredictionMode = require('./PredictionMode');\n", "/*! https://mths.be/codepointat v0.2.0 by @mathias */\nif (!String.prototype.codePointAt) {\n\t(function() {\n\t\t'use strict'; // needed to support `apply`/`call` with `undefined`/`null`\n\t\tvar defineProperty = (function() {\n\t\t\t// IE 8 only supports `Object.defineProperty` on DOM elements\n\t\t\tlet result;\n\t\t\ttry {\n\t\t\t\tconst object = {};\n\t\t\t\tconst $defineProperty = Object.defineProperty;\n\t\t\t\tresult = $defineProperty(object, object, object) && $defineProperty;\n\t\t\t} catch(error) {\n\t\t\t}\n\t\t\treturn result;\n\t\t}());\n\t\tconst codePointAt = function(position) {\n\t\t\tif (this == null) {\n\t\t\t\tthrow TypeError();\n\t\t\t}\n\t\t\tconst string = String(this);\n\t\t\tconst size = string.length;\n\t\t\t// `ToInteger`\n\t\t\tlet index = position ? Number(position) : 0;\n\t\t\tif (index !== index) { // better `isNaN`\n\t\t\t\tindex = 0;\n\t\t\t}\n\t\t\t// Account for out-of-bounds indices:\n\t\t\tif (index < 0 || index >= size) {\n\t\t\t\treturn undefined;\n\t\t\t}\n\t\t\t// Get the first code unit\n\t\t\tconst first = string.charCodeAt(index);\n\t\t\tlet second;\n\t\t\tif ( // check if it\u2019s the start of a surrogate pair\n\t\t\t\tfirst >= 0xD800 && first <= 0xDBFF && // high surrogate\n\t\t\t\tsize > index + 1 // there is a next code unit\n\t\t\t) {\n\t\t\t\tsecond = string.charCodeAt(index + 1);\n\t\t\t\tif (second >= 0xDC00 && second <= 0xDFFF) { // low surrogate\n\t\t\t\t\t// https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae\n\t\t\t\t\treturn (first - 0xD800) * 0x400 + second - 0xDC00 + 0x10000;\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn first;\n\t\t};\n\t\tif (defineProperty) {\n\t\t\tdefineProperty(String.prototype, 'codePointAt', {\n\t\t\t\t'value': codePointAt,\n\t\t\t\t'configurable': true,\n\t\t\t\t'writable': true\n\t\t\t});\n\t\t} else {\n\t\t\tString.prototype.codePointAt = codePointAt;\n\t\t}\n\t}());\n}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\n/**\n * A DFA walker that knows how to dump them to serialized strings.\n */\nclass DFASerializer {\n constructor(dfa, literalNames, symbolicNames) {\n this.dfa = dfa;\n this.literalNames = literalNames || [];\n this.symbolicNames = symbolicNames || [];\n }\n\n toString() {\n if(this.dfa.s0 === null) {\n return null;\n }\n let buf = \"\";\n const states = this.dfa.sortedStates();\n for(let i=0; i\");\n buf = buf.concat(this.getStateString(t));\n buf = buf.concat('\\n');\n }\n }\n }\n }\n return buf.length===0 ? null : buf;\n }\n\n getEdgeLabel(i) {\n if (i===0) {\n return \"EOF\";\n } else if(this.literalNames !==null || this.symbolicNames!==null) {\n return this.literalNames[i-1] || this.symbolicNames[i-1];\n } else {\n return String.fromCharCode(i-1);\n }\n }\n\n getStateString(s) {\n const baseStateStr = ( s.isAcceptState ? \":\" : \"\") + \"s\" + s.stateNumber + ( s.requiresFullContext ? \"^\" : \"\");\n if(s.isAcceptState) {\n if (s.predicates !== null) {\n return baseStateStr + \"=>\" + s.predicates.toString();\n } else {\n return baseStateStr + \"=>\" + s.prediction.toString();\n }\n } else {\n return baseStateStr;\n }\n }\n}\n\nclass LexerDFASerializer extends DFASerializer {\n constructor(dfa) {\n super(dfa, null);\n }\n\n getEdgeLabel(i) {\n return \"'\" + String.fromCharCode(i) + \"'\";\n }\n}\n\nmodule.exports = { DFASerializer , LexerDFASerializer };\n\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Set} = require(\"../Utils\");\nconst {DFAState} = require('./DFAState');\nconst {StarLoopEntryState} = require('../atn/ATNState');\nconst {ATNConfigSet} = require('./../atn/ATNConfigSet');\nconst {DFASerializer} = require('./DFASerializer');\nconst {LexerDFASerializer} = require('./DFASerializer');\n\nclass DFA {\n\tconstructor(atnStartState, decision) {\n\t\tif (decision === undefined) {\n\t\t\tdecision = 0;\n\t\t}\n\t\t/**\n\t\t * From which ATN state did we create this DFA?\n\t\t */\n\t\tthis.atnStartState = atnStartState;\n\t\tthis.decision = decision;\n\t\t/**\n\t\t * A set of all DFA states. Use {@link Map} so we can get old state back\n\t\t * ({@link Set} only allows you to see if it's there).\n\t\t */\n\t\tthis._states = new Set();\n\t\tthis.s0 = null;\n\t\t/**\n\t\t * {@code true} if this DFA is for a precedence decision; otherwise,\n\t\t * {@code false}. This is the backing field for {@link //isPrecedenceDfa},\n\t\t * {@link //setPrecedenceDfa}\n\t\t */\n\t\tthis.precedenceDfa = false;\n\t\tif (atnStartState instanceof StarLoopEntryState)\n\t\t{\n\t\t\tif (atnStartState.isPrecedenceDecision) {\n\t\t\t\tthis.precedenceDfa = true;\n\t\t\t\tconst precedenceState = new DFAState(null, new ATNConfigSet());\n\t\t\t\tprecedenceState.edges = [];\n\t\t\t\tprecedenceState.isAcceptState = false;\n\t\t\t\tprecedenceState.requiresFullContext = false;\n\t\t\t\tthis.s0 = precedenceState;\n\t\t\t}\n\t\t}\n\t}\n\n\t/**\n\t * Get the start state for a specific precedence value.\n\t *\n\t * @param precedence The current precedence.\n\t * @return The start state corresponding to the specified precedence, or\n\t * {@code null} if no start state exists for the specified precedence.\n\t *\n\t * @throws IllegalStateException if this is not a precedence DFA.\n\t * @see //isPrecedenceDfa()\n\t */\n\tgetPrecedenceStartState(precedence) {\n\t\tif (!(this.precedenceDfa)) {\n\t\t\tthrow (\"Only precedence DFAs may contain a precedence start state.\");\n\t\t}\n\t\t// s0.edges is never null for a precedence DFA\n\t\tif (precedence < 0 || precedence >= this.s0.edges.length) {\n\t\t\treturn null;\n\t\t}\n\t\treturn this.s0.edges[precedence] || null;\n\t}\n\n\t/**\n\t * Set the start state for a specific precedence value.\n\t *\n\t * @param precedence The current precedence.\n\t * @param startState The start state corresponding to the specified\n\t * precedence.\n\t *\n\t * @throws IllegalStateException if this is not a precedence DFA.\n\t * @see //isPrecedenceDfa()\n\t */\n\tsetPrecedenceStartState(precedence, startState) {\n\t\tif (!(this.precedenceDfa)) {\n\t\t\tthrow (\"Only precedence DFAs may contain a precedence start state.\");\n\t\t}\n\t\tif (precedence < 0) {\n\t\t\treturn;\n\t\t}\n\n\t\t/**\n\t\t * synchronization on s0 here is ok. when the DFA is turned into a\n\t\t * precedence DFA, s0 will be initialized once and not updated again\n\t\t * s0.edges is never null for a precedence DFA\n\t\t */\n\t\tthis.s0.edges[precedence] = startState;\n\t}\n\n\t/**\n\t * Sets whether this is a precedence DFA. If the specified value differs\n\t * from the current DFA configuration, the following actions are taken;\n\t * otherwise no changes are made to the current DFA.\n\t *\n\t *
      \n\t *
    • The {@link //states} map is cleared
    • \n\t *
    • If {@code precedenceDfa} is {@code false}, the initial state\n\t * {@link //s0} is set to {@code null}; otherwise, it is initialized to a new\n\t * {@link DFAState} with an empty outgoing {@link DFAState//edges} array to\n\t * store the start states for individual precedence values.
    • \n\t *
    • The {@link //precedenceDfa} field is updated
    • \n\t *
    \n\t *\n\t * @param precedenceDfa {@code true} if this is a precedence DFA; otherwise,\n\t * {@code false}\n\t */\n\tsetPrecedenceDfa(precedenceDfa) {\n\t\tif (this.precedenceDfa!==precedenceDfa) {\n\t\t\tthis._states = new Set();\n\t\t\tif (precedenceDfa) {\n\t\t\t\tconst precedenceState = new DFAState(null, new ATNConfigSet());\n\t\t\t\tprecedenceState.edges = [];\n\t\t\t\tprecedenceState.isAcceptState = false;\n\t\t\t\tprecedenceState.requiresFullContext = false;\n\t\t\t\tthis.s0 = precedenceState;\n\t\t\t} else {\n\t\t\t\tthis.s0 = null;\n\t\t\t}\n\t\t\tthis.precedenceDfa = precedenceDfa;\n\t\t}\n\t}\n\n\t/**\n\t * Return a list of all states in this DFA, ordered by state number.\n\t */\n\tsortedStates() {\n\t\tconst list = this._states.values();\n\t\treturn list.sort(function(a, b) {\n\t\t\treturn a.stateNumber - b.stateNumber;\n\t\t});\n\t}\n\n\ttoString(literalNames, symbolicNames) {\n\t\tliteralNames = literalNames || null;\n\t\tsymbolicNames = symbolicNames || null;\n\t\tif (this.s0 === null) {\n\t\t\treturn \"\";\n\t\t}\n\t\tconst serializer = new DFASerializer(this, literalNames, symbolicNames);\n\t\treturn serializer.toString();\n\t}\n\n\ttoLexerString() {\n\t\tif (this.s0 === null) {\n\t\t\treturn \"\";\n\t\t}\n\t\tconst serializer = new LexerDFASerializer(this);\n\t\treturn serializer.toString();\n\t}\n\n\tget states(){\n\t\treturn this._states;\n\t}\n}\n\n\nmodule.exports = DFA;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nexports.DFA = require('./DFA');\nexports.DFASerializer = require('./DFASerializer').DFASerializer;\nexports.LexerDFASerializer = require('./DFASerializer').LexerDFASerializer;\nexports.PredPrediction = require('./DFAState').PredPrediction;\n", "/*! https://mths.be/fromcodepoint v0.2.1 by @mathias */\nif (!String.fromCodePoint) {\n\t(function() {\n\t\tconst defineProperty = (function() {\n\t\t\t// IE 8 only supports `Object.defineProperty` on DOM elements\n\t\t\tlet result;\n\t\t\ttry {\n\t\t\t\tconst object = {};\n\t\t\t\tconst $defineProperty = Object.defineProperty;\n\t\t\t\tresult = $defineProperty(object, object, object) && $defineProperty;\n\t\t\t} catch(error) {}\n\t\t\treturn result;\n\t\t}());\n\t\tconst stringFromCharCode = String.fromCharCode;\n\t\tconst floor = Math.floor;\n\t\tconst fromCodePoint = function(_) {\n\t\t\tconst MAX_SIZE = 0x4000;\n\t\t\tconst codeUnits = [];\n\t\t\tlet highSurrogate;\n\t\t\tlet lowSurrogate;\n\t\t\tlet index = -1;\n\t\t\tconst length = arguments.length;\n\t\t\tif (!length) {\n\t\t\t\treturn '';\n\t\t\t}\n\t\t\tlet result = '';\n\t\t\twhile (++index < length) {\n\t\t\t\tlet codePoint = Number(arguments[index]);\n\t\t\t\tif (\n\t\t\t\t\t!isFinite(codePoint) || // `NaN`, `+Infinity`, or `-Infinity`\n\t\t\t\t\tcodePoint < 0 || // not a valid Unicode code point\n\t\t\t\t\tcodePoint > 0x10FFFF || // not a valid Unicode code point\n\t\t\t\t\tfloor(codePoint) !== codePoint // not an integer\n\t\t\t\t) {\n\t\t\t\t\tthrow RangeError('Invalid code point: ' + codePoint);\n\t\t\t\t}\n\t\t\t\tif (codePoint <= 0xFFFF) { // BMP code point\n\t\t\t\t\tcodeUnits.push(codePoint);\n\t\t\t\t} else { // Astral code point; split in surrogate halves\n\t\t\t\t\t// https://mathiasbynens.be/notes/javascript-encoding#surrogate-formulae\n\t\t\t\t\tcodePoint -= 0x10000;\n\t\t\t\t\thighSurrogate = (codePoint >> 10) + 0xD800;\n\t\t\t\t\tlowSurrogate = (codePoint % 0x400) + 0xDC00;\n\t\t\t\t\tcodeUnits.push(highSurrogate, lowSurrogate);\n\t\t\t\t}\n\t\t\t\tif (index + 1 === length || codeUnits.length > MAX_SIZE) {\n\t\t\t\t\tresult += stringFromCharCode.apply(null, codeUnits);\n\t\t\t\t\tcodeUnits.length = 0;\n\t\t\t\t}\n\t\t\t}\n\t\t\treturn result;\n\t\t};\n\t\tif (defineProperty) {\n\t\t\tdefineProperty(String, 'fromCodePoint', {\n\t\t\t\t'value': fromCodePoint,\n\t\t\t\t'configurable': true,\n\t\t\t\t'writable': true\n\t\t\t});\n\t\t} else {\n\t\t\tString.fromCodePoint = fromCodePoint;\n\t\t}\n\t}());\n}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst Tree = require('./Tree');\nconst Trees = require('./Trees');\nmodule.exports = {...Tree, Trees}\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {BitSet} = require('./../Utils');\nconst {ErrorListener} = require('./ErrorListener')\nconst {Interval} = require('./../IntervalSet')\n\n\n/**\n * This implementation of {@link ANTLRErrorListener} can be used to identify\n * certain potential correctness and performance problems in grammars. \"Reports\"\n * are made by calling {@link Parser//notifyErrorListeners} with the appropriate\n * message.\n *\n *
      \n *
    • Ambiguities: These are cases where more than one path through the\n * grammar can match the input.
    • \n *
    • Weak context sensitivity: These are cases where full-context\n * prediction resolved an SLL conflict to a unique alternative which equaled the\n * minimum alternative of the SLL conflict.
    • \n *
    • Strong (forced) context sensitivity: These are cases where the\n * full-context prediction resolved an SLL conflict to a unique alternative,\n * and the minimum alternative of the SLL conflict was found to not be\n * a truly viable alternative. Two-stage parsing cannot be used for inputs where\n * this situation occurs.
    • \n *
    \n */\nclass DiagnosticErrorListener extends ErrorListener {\n\tconstructor(exactOnly) {\n\t\tsuper();\n\t\texactOnly = exactOnly || true;\n\t\t// whether all ambiguities or only exact ambiguities are reported.\n\t\tthis.exactOnly = exactOnly;\n\t}\n\n\treportAmbiguity(recognizer, dfa, startIndex, stopIndex, exact, ambigAlts, configs) {\n\t\tif (this.exactOnly && !exact) {\n\t\t\treturn;\n\t\t}\n\t\tconst msg = \"reportAmbiguity d=\" +\n\t\t\tthis.getDecisionDescription(recognizer, dfa) +\n\t\t\t\": ambigAlts=\" +\n\t\t\tthis.getConflictingAlts(ambigAlts, configs) +\n\t\t\t\", input='\" +\n\t\t\trecognizer.getTokenStream().getText(new Interval(startIndex, stopIndex)) + \"'\"\n\t\trecognizer.notifyErrorListeners(msg);\n\t}\n\n\treportAttemptingFullContext(recognizer, dfa, startIndex, stopIndex, conflictingAlts, configs) {\n\t\tconst msg = \"reportAttemptingFullContext d=\" +\n\t\t\tthis.getDecisionDescription(recognizer, dfa) +\n\t\t\t\", input='\" +\n\t\t\trecognizer.getTokenStream().getText(new Interval(startIndex, stopIndex)) + \"'\"\n\t\trecognizer.notifyErrorListeners(msg);\n\t}\n\n\treportContextSensitivity(recognizer, dfa, startIndex, stopIndex, prediction, configs) {\n\t\tconst msg = \"reportContextSensitivity d=\" +\n\t\t\tthis.getDecisionDescription(recognizer, dfa) +\n\t\t\t\", input='\" +\n\t\t\trecognizer.getTokenStream().getText(new Interval(startIndex, stopIndex)) + \"'\"\n\t\trecognizer.notifyErrorListeners(msg);\n\t}\n\n\tgetDecisionDescription(recognizer, dfa) {\n\t\tconst decision = dfa.decision\n\t\tconst ruleIndex = dfa.atnStartState.ruleIndex\n\n\t\tconst ruleNames = recognizer.ruleNames\n\t\tif (ruleIndex < 0 || ruleIndex >= ruleNames.length) {\n\t\t\treturn \"\" + decision;\n\t\t}\n\t\tconst ruleName = ruleNames[ruleIndex] || null\n\t\tif (ruleName === null || ruleName.length === 0) {\n\t\t\treturn \"\" + decision;\n\t\t}\n\t\treturn `${decision} (${ruleName})`;\n\t}\n\n\t/**\n\t * Computes the set of conflicting or ambiguous alternatives from a\n\t * configuration set, if that information was not already provided by the\n\t * parser.\n\t *\n\t * @param reportedAlts The set of conflicting or ambiguous alternatives, as\n\t * reported by the parser.\n\t * @param configs The conflicting or ambiguous configuration set.\n\t * @return Returns {@code reportedAlts} if it is not {@code null}, otherwise\n\t * returns the set of alternatives represented in {@code configs}.\n */\n\tgetConflictingAlts(reportedAlts, configs) {\n\t\tif (reportedAlts !== null) {\n\t\t\treturn reportedAlts;\n\t\t}\n\t\tconst result = new BitSet()\n\t\tfor (let i = 0; i < configs.items.length; i++) {\n\t\t\tresult.add(configs.items[i].alt);\n\t\t}\n\t\treturn `{${result.values().join(\", \")}}`;\n\t}\n}\n\nmodule.exports = DiagnosticErrorListener\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Token} = require('./../Token')\nconst {NoViableAltException, InputMismatchException, FailedPredicateException, ParseCancellationException} = require('./Errors')\nconst {ATNState} = require('./../atn/ATNState')\nconst {Interval, IntervalSet} = require('./../IntervalSet')\n\nclass ErrorStrategy {\n\n reset(recognizer) {\n }\n\n recoverInline(recognizer) {\n }\n\n recover(recognizer, e) {\n }\n\n sync(recognizer) {\n }\n\n inErrorRecoveryMode(recognizer) {\n }\n\n reportError(recognizer) {\n }\n}\n\n\n/**\n * This is the default implementation of {@link ANTLRErrorStrategy} used for\n * error reporting and recovery in ANTLR parsers.\n*/\nclass DefaultErrorStrategy extends ErrorStrategy {\n constructor() {\n super();\n /**\n * Indicates whether the error strategy is currently \"recovering from an\n * error\". This is used to suppress reporting multiple error messages while\n * attempting to recover from a detected syntax error.\n *\n * @see //inErrorRecoveryMode\n */\n this.errorRecoveryMode = false;\n\n /**\n * The index into the input stream where the last error occurred.\n * This is used to prevent infinite loops where an error is found\n * but no token is consumed during recovery...another error is found,\n * ad nauseum. This is a failsafe mechanism to guarantee that at least\n * one token/tree node is consumed for two errors.\n */\n this.lastErrorIndex = -1;\n this.lastErrorStates = null;\n this.nextTokensContext = null;\n this.nextTokenState = 0;\n }\n\n /**\n *

    The default implementation simply calls {@link //endErrorCondition} to\n * ensure that the handler is not in error recovery mode.

    \n */\n reset(recognizer) {\n this.endErrorCondition(recognizer);\n }\n\n /**\n * This method is called to enter error recovery mode when a recognition\n * exception is reported.\n *\n * @param recognizer the parser instance\n */\n beginErrorCondition(recognizer) {\n this.errorRecoveryMode = true;\n }\n\n inErrorRecoveryMode(recognizer) {\n return this.errorRecoveryMode;\n }\n\n /**\n * This method is called to leave error recovery mode after recovering from\n * a recognition exception.\n * @param recognizer\n */\n endErrorCondition(recognizer) {\n this.errorRecoveryMode = false;\n this.lastErrorStates = null;\n this.lastErrorIndex = -1;\n }\n\n /**\n * {@inheritDoc}\n *

    The default implementation simply calls {@link //endErrorCondition}.

    \n */\n reportMatch(recognizer) {\n this.endErrorCondition(recognizer);\n }\n\n /**\n * {@inheritDoc}\n *\n *

    The default implementation returns immediately if the handler is already\n * in error recovery mode. Otherwise, it calls {@link //beginErrorCondition}\n * and dispatches the reporting task based on the runtime type of {@code e}\n * according to the following table.

    \n *\n *
      \n *
    • {@link NoViableAltException}: Dispatches the call to\n * {@link //reportNoViableAlternative}
    • \n *
    • {@link InputMismatchException}: Dispatches the call to\n * {@link //reportInputMismatch}
    • \n *
    • {@link FailedPredicateException}: Dispatches the call to\n * {@link //reportFailedPredicate}
    • \n *
    • All other types: calls {@link Parser//notifyErrorListeners} to report\n * the exception
    • \n *
    \n */\n reportError(recognizer, e) {\n // if we've already reported an error and have not matched a token\n // yet successfully, don't report any errors.\n if(this.inErrorRecoveryMode(recognizer)) {\n return; // don't report spurious errors\n }\n this.beginErrorCondition(recognizer);\n if ( e instanceof NoViableAltException ) {\n this.reportNoViableAlternative(recognizer, e);\n } else if ( e instanceof InputMismatchException ) {\n this.reportInputMismatch(recognizer, e);\n } else if ( e instanceof FailedPredicateException ) {\n this.reportFailedPredicate(recognizer, e);\n } else {\n console.log(\"unknown recognition error type: \" + e.constructor.name);\n console.log(e.stack);\n recognizer.notifyErrorListeners(e.getOffendingToken(), e.getMessage(), e);\n }\n }\n\n /**\n *\n * {@inheritDoc}\n *\n *

    The default implementation resynchronizes the parser by consuming tokens\n * until we find one in the resynchronization set--loosely the set of tokens\n * that can follow the current rule.

    \n *\n */\n recover(recognizer, e) {\n if (this.lastErrorIndex===recognizer.getInputStream().index &&\n this.lastErrorStates !== null && this.lastErrorStates.indexOf(recognizer.state)>=0) {\n // uh oh, another error at same token index and previously-visited\n // state in ATN; must be a case where LT(1) is in the recovery\n // token set so nothing got consumed. Consume a single token\n // at least to prevent an infinite loop; this is a failsafe.\n recognizer.consume();\n }\n this.lastErrorIndex = recognizer._input.index;\n if (this.lastErrorStates === null) {\n this.lastErrorStates = [];\n }\n this.lastErrorStates.push(recognizer.state);\n const followSet = this.getErrorRecoverySet(recognizer)\n this.consumeUntil(recognizer, followSet);\n }\n\n /**\n * The default implementation of {@link ANTLRErrorStrategy//sync} makes sure\n * that the current lookahead symbol is consistent with what were expecting\n * at this point in the ATN. You can call this anytime but ANTLR only\n * generates code to check before subrules/loops and each iteration.\n *\n *

    Implements Jim Idle's magic sync mechanism in closures and optional\n * subrules. E.g.,

    \n *\n *
    \n     * a : sync ( stuff sync )* ;\n     * sync : {consume to what can follow sync} ;\n     * 
    \n *\n * At the start of a sub rule upon error, {@link //sync} performs single\n * token deletion, if possible. If it can't do that, it bails on the current\n * rule and uses the default error recovery, which consumes until the\n * resynchronization set of the current rule.\n *\n *

    If the sub rule is optional ({@code (...)?}, {@code (...)*}, or block\n * with an empty alternative), then the expected set includes what follows\n * the subrule.

    \n *\n *

    During loop iteration, it consumes until it sees a token that can start a\n * sub rule or what follows loop. Yes, that is pretty aggressive. We opt to\n * stay in the loop as long as possible.

    \n *\n *

    ORIGINS

    \n *\n *

    Previous versions of ANTLR did a poor job of their recovery within loops.\n * A single mismatch token or missing token would force the parser to bail\n * out of the entire rules surrounding the loop. So, for rule

    \n *\n *
    \n     * classDef : 'class' ID '{' member* '}'\n     * 
    \n *\n * input with an extra token between members would force the parser to\n * consume until it found the next class definition rather than the next\n * member definition of the current class.\n *\n *

    This functionality cost a little bit of effort because the parser has to\n * compare token set at the start of the loop and at each iteration. If for\n * some reason speed is suffering for you, you can turn off this\n * functionality by simply overriding this method as a blank { }.

    \n *\n */\n sync(recognizer) {\n // If already recovering, don't try to sync\n if (this.inErrorRecoveryMode(recognizer)) {\n return;\n }\n const s = recognizer._interp.atn.states[recognizer.state];\n const la = recognizer.getTokenStream().LA(1);\n // try cheaper subset first; might get lucky. seems to shave a wee bit off\n const nextTokens = recognizer.atn.nextTokens(s);\n if(nextTokens.contains(la)) {\n this.nextTokensContext = null;\n this.nextTokenState = ATNState.INVALID_STATE_NUMBER;\n return;\n } else if (nextTokens.contains(Token.EPSILON)) {\n if(this.nextTokensContext === null) {\n // It's possible the next token won't match information tracked\n // by sync is restricted for performance.\n this.nextTokensContext = recognizer._ctx;\n this.nextTokensState = recognizer._stateNumber;\n }\n return;\n }\n switch (s.stateType) {\n case ATNState.BLOCK_START:\n case ATNState.STAR_BLOCK_START:\n case ATNState.PLUS_BLOCK_START:\n case ATNState.STAR_LOOP_ENTRY:\n // report error and recover if possible\n if( this.singleTokenDeletion(recognizer) !== null) {\n return;\n } else {\n throw new InputMismatchException(recognizer);\n }\n case ATNState.PLUS_LOOP_BACK:\n case ATNState.STAR_LOOP_BACK:\n this.reportUnwantedToken(recognizer);\n const expecting = new IntervalSet()\n expecting.addSet(recognizer.getExpectedTokens());\n const whatFollowsLoopIterationOrRule = expecting.addSet(this.getErrorRecoverySet(recognizer))\n this.consumeUntil(recognizer, whatFollowsLoopIterationOrRule);\n break;\n default:\n // do nothing if we can't identify the exact kind of ATN state\n }\n }\n\n /**\n * This is called by {@link //reportError} when the exception is a\n * {@link NoViableAltException}.\n *\n * @see //reportError\n *\n * @param recognizer the parser instance\n * @param e the recognition exception\n */\n reportNoViableAlternative(recognizer, e) {\n const tokens = recognizer.getTokenStream()\n let input\n if(tokens !== null) {\n if (e.startToken.type===Token.EOF) {\n input = \"\";\n } else {\n input = tokens.getText(new Interval(e.startToken.tokenIndex, e.offendingToken.tokenIndex));\n }\n } else {\n input = \"\";\n }\n const msg = \"no viable alternative at input \" + this.escapeWSAndQuote(input)\n recognizer.notifyErrorListeners(msg, e.offendingToken, e);\n }\n\n /**\n * This is called by {@link //reportError} when the exception is an\n * {@link InputMismatchException}.\n *\n * @see //reportError\n *\n * @param recognizer the parser instance\n * @param e the recognition exception\n */\n reportInputMismatch(recognizer, e) {\n const msg = \"mismatched input \" + this.getTokenErrorDisplay(e.offendingToken) +\n \" expecting \" + e.getExpectedTokens().toString(recognizer.literalNames, recognizer.symbolicNames)\n recognizer.notifyErrorListeners(msg, e.offendingToken, e);\n }\n\n /**\n * This is called by {@link //reportError} when the exception is a\n * {@link FailedPredicateException}.\n *\n * @see //reportError\n *\n * @param recognizer the parser instance\n * @param e the recognition exception\n */\n reportFailedPredicate(recognizer, e) {\n const ruleName = recognizer.ruleNames[recognizer._ctx.ruleIndex]\n const msg = \"rule \" + ruleName + \" \" + e.message\n recognizer.notifyErrorListeners(msg, e.offendingToken, e);\n }\n\n /**\n * This method is called to report a syntax error which requires the removal\n * of a token from the input stream. At the time this method is called, the\n * erroneous symbol is current {@code LT(1)} symbol and has not yet been\n * removed from the input stream. When this method returns,\n * {@code recognizer} is in error recovery mode.\n *\n *

    This method is called when {@link //singleTokenDeletion} identifies\n * single-token deletion as a viable recovery strategy for a mismatched\n * input error.

    \n *\n *

    The default implementation simply returns if the handler is already in\n * error recovery mode. Otherwise, it calls {@link //beginErrorCondition} to\n * enter error recovery mode, followed by calling\n * {@link Parser//notifyErrorListeners}.

    \n *\n * @param recognizer the parser instance\n *\n */\n reportUnwantedToken(recognizer) {\n if (this.inErrorRecoveryMode(recognizer)) {\n return;\n }\n this.beginErrorCondition(recognizer);\n const t = recognizer.getCurrentToken()\n const tokenName = this.getTokenErrorDisplay(t)\n const expecting = this.getExpectedTokens(recognizer)\n const msg = \"extraneous input \" + tokenName + \" expecting \" +\n expecting.toString(recognizer.literalNames, recognizer.symbolicNames)\n recognizer.notifyErrorListeners(msg, t, null);\n }\n\n /**\n * This method is called to report a syntax error which requires the\n * insertion of a missing token into the input stream. At the time this\n * method is called, the missing token has not yet been inserted. When this\n * method returns, {@code recognizer} is in error recovery mode.\n *\n *

    This method is called when {@link //singleTokenInsertion} identifies\n * single-token insertion as a viable recovery strategy for a mismatched\n * input error.

    \n *\n *

    The default implementation simply returns if the handler is already in\n * error recovery mode. Otherwise, it calls {@link //beginErrorCondition} to\n * enter error recovery mode, followed by calling\n * {@link Parser//notifyErrorListeners}.

    \n *\n * @param recognizer the parser instance\n */\n reportMissingToken(recognizer) {\n if ( this.inErrorRecoveryMode(recognizer)) {\n return;\n }\n this.beginErrorCondition(recognizer);\n const t = recognizer.getCurrentToken()\n const expecting = this.getExpectedTokens(recognizer)\n const msg = \"missing \" + expecting.toString(recognizer.literalNames, recognizer.symbolicNames) +\n \" at \" + this.getTokenErrorDisplay(t)\n recognizer.notifyErrorListeners(msg, t, null);\n }\n\n /**\n *

    The default implementation attempts to recover from the mismatched input\n * by using single token insertion and deletion as described below. If the\n * recovery attempt fails, this method throws an\n * {@link InputMismatchException}.

    \n *\n *

    EXTRA TOKEN (single token deletion)

    \n *\n *

    {@code LA(1)} is not what we are looking for. If {@code LA(2)} has the\n * right token, however, then assume {@code LA(1)} is some extra spurious\n * token and delete it. Then consume and return the next token (which was\n * the {@code LA(2)} token) as the successful result of the match operation.

    \n *\n *

    This recovery strategy is implemented by {@link\n * //singleTokenDeletion}.

    \n *\n *

    MISSING TOKEN (single token insertion)

    \n *\n *

    If current token (at {@code LA(1)}) is consistent with what could come\n * after the expected {@code LA(1)} token, then assume the token is missing\n * and use the parser's {@link TokenFactory} to create it on the fly. The\n * \"insertion\" is performed by returning the created token as the successful\n * result of the match operation.

    \n *\n *

    This recovery strategy is implemented by {@link\n * //singleTokenInsertion}.

    \n *\n *

    EXAMPLE

    \n *\n *

    For example, Input {@code i=(3;} is clearly missing the {@code ')'}. When\n * the parser returns from the nested call to {@code expr}, it will have\n * call chain:

    \n *\n *
    \n     * stat → expr → atom\n     * 
    \n *\n * and it will be trying to match the {@code ')'} at this point in the\n * derivation:\n *\n *
    \n     * => ID '=' '(' INT ')' ('+' atom)* ';'\n     * ^\n     * 
    \n *\n * The attempt to match {@code ')'} will fail when it sees {@code ';'} and\n * call {@link //recoverInline}. To recover, it sees that {@code LA(1)==';'}\n * is in the set of tokens that can follow the {@code ')'} token reference\n * in rule {@code atom}. It can assume that you forgot the {@code ')'}.\n */\n recoverInline(recognizer) {\n // SINGLE TOKEN DELETION\n const matchedSymbol = this.singleTokenDeletion(recognizer)\n if (matchedSymbol !== null) {\n // we have deleted the extra token.\n // now, move past ttype token as if all were ok\n recognizer.consume();\n return matchedSymbol;\n }\n // SINGLE TOKEN INSERTION\n if (this.singleTokenInsertion(recognizer)) {\n return this.getMissingSymbol(recognizer);\n }\n // even that didn't work; must throw the exception\n throw new InputMismatchException(recognizer);\n }\n\n /**\n * This method implements the single-token insertion inline error recovery\n * strategy. It is called by {@link //recoverInline} if the single-token\n * deletion strategy fails to recover from the mismatched input. If this\n * method returns {@code true}, {@code recognizer} will be in error recovery\n * mode.\n *\n *

    This method determines whether or not single-token insertion is viable by\n * checking if the {@code LA(1)} input symbol could be successfully matched\n * if it were instead the {@code LA(2)} symbol. If this method returns\n * {@code true}, the caller is responsible for creating and inserting a\n * token with the correct type to produce this behavior.

    \n *\n * @param recognizer the parser instance\n * @return {@code true} if single-token insertion is a viable recovery\n * strategy for the current mismatched input, otherwise {@code false}\n */\n singleTokenInsertion(recognizer) {\n const currentSymbolType = recognizer.getTokenStream().LA(1)\n // if current token is consistent with what could come after current\n // ATN state, then we know we're missing a token; error recovery\n // is free to conjure up and insert the missing token\n const atn = recognizer._interp.atn\n const currentState = atn.states[recognizer.state]\n const next = currentState.transitions[0].target\n const expectingAtLL2 = atn.nextTokens(next, recognizer._ctx)\n if (expectingAtLL2.contains(currentSymbolType) ){\n this.reportMissingToken(recognizer);\n return true;\n } else {\n return false;\n }\n }\n\n /**\n * This method implements the single-token deletion inline error recovery\n * strategy. It is called by {@link //recoverInline} to attempt to recover\n * from mismatched input. If this method returns null, the parser and error\n * handler state will not have changed. If this method returns non-null,\n * {@code recognizer} will not be in error recovery mode since the\n * returned token was a successful match.\n *\n *

    If the single-token deletion is successful, this method calls\n * {@link //reportUnwantedToken} to report the error, followed by\n * {@link Parser//consume} to actually \"delete\" the extraneous token. Then,\n * before returning {@link //reportMatch} is called to signal a successful\n * match.

    \n *\n * @param recognizer the parser instance\n * @return the successfully matched {@link Token} instance if single-token\n * deletion successfully recovers from the mismatched input, otherwise\n * {@code null}\n */\n singleTokenDeletion(recognizer) {\n const nextTokenType = recognizer.getTokenStream().LA(2)\n const expecting = this.getExpectedTokens(recognizer)\n if (expecting.contains(nextTokenType)) {\n this.reportUnwantedToken(recognizer);\n // print(\"recoverFromMismatchedToken deleting \" \\\n // + str(recognizer.getTokenStream().LT(1)) \\\n // + \" since \" + str(recognizer.getTokenStream().LT(2)) \\\n // + \" is what we want\", file=sys.stderr)\n recognizer.consume(); // simply delete extra token\n // we want to return the token we're actually matching\n const matchedSymbol = recognizer.getCurrentToken()\n this.reportMatch(recognizer); // we know current token is correct\n return matchedSymbol;\n } else {\n return null;\n }\n }\n\n /**\n * Conjure up a missing token during error recovery.\n *\n * The recognizer attempts to recover from single missing\n * symbols. But, actions might refer to that missing symbol.\n * For example, x=ID {f($x);}. The action clearly assumes\n * that there has been an identifier matched previously and that\n * $x points at that token. If that token is missing, but\n * the next token in the stream is what we want we assume that\n * this token is missing and we keep going. Because we\n * have to return some token to replace the missing token,\n * we have to conjure one up. This method gives the user control\n * over the tokens returned for missing tokens. Mostly,\n * you will want to create something special for identifier\n * tokens. For literals such as '{' and ',', the default\n * action in the parser or tree parser works. It simply creates\n * a CommonToken of the appropriate type. The text will be the token.\n * If you change what tokens must be created by the lexer,\n * override this method to create the appropriate tokens.\n *\n */\n getMissingSymbol(recognizer) {\n const currentSymbol = recognizer.getCurrentToken()\n const expecting = this.getExpectedTokens(recognizer)\n const expectedTokenType = expecting.first() // get any element\n let tokenText\n if (expectedTokenType===Token.EOF) {\n tokenText = \"\";\n } else {\n tokenText = \"\";\n }\n let current = currentSymbol\n const lookback = recognizer.getTokenStream().LT(-1)\n if (current.type===Token.EOF && lookback !== null) {\n current = lookback;\n }\n return recognizer.getTokenFactory().create(current.source,\n expectedTokenType, tokenText, Token.DEFAULT_CHANNEL,\n -1, -1, current.line, current.column);\n }\n\n getExpectedTokens(recognizer) {\n return recognizer.getExpectedTokens();\n }\n\n /**\n * How should a token be displayed in an error message? The default\n * is to display just the text, but during development you might\n * want to have a lot of information spit out. Override in that case\n * to use t.toString() (which, for CommonToken, dumps everything about\n * the token). This is better than forcing you to override a method in\n * your token objects because you don't have to go modify your lexer\n * so that it creates a new Java type.\n */\n getTokenErrorDisplay(t) {\n if (t === null) {\n return \"\";\n }\n let s = t.text\n if (s === null) {\n if (t.type===Token.EOF) {\n s = \"\";\n } else {\n s = \"<\" + t.type + \">\";\n }\n }\n return this.escapeWSAndQuote(s);\n }\n\n escapeWSAndQuote(s) {\n s = s.replace(/\\n/g,\"\\\\n\");\n s = s.replace(/\\r/g,\"\\\\r\");\n s = s.replace(/\\t/g,\"\\\\t\");\n return \"'\" + s + \"'\";\n }\n\n /**\n * Compute the error recovery set for the current rule. During\n * rule invocation, the parser pushes the set of tokens that can\n * follow that rule reference on the stack; this amounts to\n * computing FIRST of what follows the rule reference in the\n * enclosing rule. See LinearApproximator.FIRST().\n * This local follow set only includes tokens\n * from within the rule; i.e., the FIRST computation done by\n * ANTLR stops at the end of a rule.\n *\n * EXAMPLE\n *\n * When you find a \"no viable alt exception\", the input is not\n * consistent with any of the alternatives for rule r. The best\n * thing to do is to consume tokens until you see something that\n * can legally follow a call to r//or* any rule that called r.\n * You don't want the exact set of viable next tokens because the\n * input might just be missing a token--you might consume the\n * rest of the input looking for one of the missing tokens.\n *\n * Consider grammar:\n *\n * a : '[' b ']'\n * | '(' b ')'\n * ;\n * b : c '^' INT ;\n * c : ID\n * | INT\n * ;\n *\n * At each rule invocation, the set of tokens that could follow\n * that rule is pushed on a stack. Here are the various\n * context-sensitive follow sets:\n *\n * FOLLOW(b1_in_a) = FIRST(']') = ']'\n * FOLLOW(b2_in_a) = FIRST(')') = ')'\n * FOLLOW(c_in_b) = FIRST('^') = '^'\n *\n * Upon erroneous input \"[]\", the call chain is\n *\n * a -> b -> c\n *\n * and, hence, the follow context stack is:\n *\n * depth follow set start of rule execution\n * 0 a (from main())\n * 1 ']' b\n * 2 '^' c\n *\n * Notice that ')' is not included, because b would have to have\n * been called from a different context in rule a for ')' to be\n * included.\n *\n * For error recovery, we cannot consider FOLLOW(c)\n * (context-sensitive or otherwise). We need the combined set of\n * all context-sensitive FOLLOW sets--the set of all tokens that\n * could follow any reference in the call chain. We need to\n * resync to one of those tokens. Note that FOLLOW(c)='^' and if\n * we resync'd to that token, we'd consume until EOF. We need to\n * sync to context-sensitive FOLLOWs for a, b, and c: {']','^'}.\n * In this case, for input \"[]\", LA(1) is ']' and in the set, so we would\n * not consume anything. After printing an error, rule c would\n * return normally. Rule b would not find the required '^' though.\n * At this point, it gets a mismatched token error and throws an\n * exception (since LA(1) is not in the viable following token\n * set). The rule exception handler tries to recover, but finds\n * the same recovery set and doesn't consume anything. Rule b\n * exits normally returning to rule a. Now it finds the ']' (and\n * with the successful match exits errorRecovery mode).\n *\n * So, you can see that the parser walks up the call chain looking\n * for the token that was a member of the recovery set.\n *\n * Errors are not generated in errorRecovery mode.\n *\n * ANTLR's error recovery mechanism is based upon original ideas:\n *\n * \"Algorithms + Data Structures = Programs\" by Niklaus Wirth\n *\n * and\n *\n * \"A note on error recovery in recursive descent parsers\":\n * http://portal.acm.org/citation.cfm?id=947902.947905\n *\n * Later, Josef Grosch had some good ideas:\n *\n * \"Efficient and Comfortable Error Recovery in Recursive Descent\n * Parsers\":\n * ftp://www.cocolab.com/products/cocktail/doca4.ps/ell.ps.zip\n *\n * Like Grosch I implement context-sensitive FOLLOW sets that are combined\n * at run-time upon error to avoid overhead during parsing.\n */\n getErrorRecoverySet(recognizer) {\n const atn = recognizer._interp.atn\n let ctx = recognizer._ctx\n const recoverSet = new IntervalSet()\n while (ctx !== null && ctx.invokingState>=0) {\n // compute what follows who invoked us\n const invokingState = atn.states[ctx.invokingState]\n const rt = invokingState.transitions[0]\n const follow = atn.nextTokens(rt.followState)\n recoverSet.addSet(follow);\n ctx = ctx.parentCtx;\n }\n recoverSet.removeOne(Token.EPSILON);\n return recoverSet;\n }\n\n// Consume tokens until one matches the given token set.//\n consumeUntil(recognizer, set) {\n let ttype = recognizer.getTokenStream().LA(1)\n while( ttype !== Token.EOF && !set.contains(ttype)) {\n recognizer.consume();\n ttype = recognizer.getTokenStream().LA(1);\n }\n }\n}\n\n\n/**\n * This implementation of {@link ANTLRErrorStrategy} responds to syntax errors\n * by immediately canceling the parse operation with a\n * {@link ParseCancellationException}. The implementation ensures that the\n * {@link ParserRuleContext//exception} field is set for all parse tree nodes\n * that were not completed prior to encountering the error.\n *\n *

    \n * This error strategy is useful in the following scenarios.

    \n *\n *
      \n *
    • Two-stage parsing: This error strategy allows the first\n * stage of two-stage parsing to immediately terminate if an error is\n * encountered, and immediately fall back to the second stage. In addition to\n * avoiding wasted work by attempting to recover from errors here, the empty\n * implementation of {@link BailErrorStrategy//sync} improves the performance of\n * the first stage.
    • \n *
    • Silent validation: When syntax errors are not being\n * reported or logged, and the parse result is simply ignored if errors occur,\n * the {@link BailErrorStrategy} avoids wasting work on recovering from errors\n * when the result will be ignored either way.
    • \n *
    \n *\n *

    \n * {@code myparser.setErrorHandler(new BailErrorStrategy());}

    \n *\n * @see Parser//setErrorHandler(ANTLRErrorStrategy)\n * */\nclass BailErrorStrategy extends DefaultErrorStrategy {\n constructor() {\n super();\n }\n\n /**\n * Instead of recovering from exception {@code e}, re-throw it wrapped\n * in a {@link ParseCancellationException} so it is not caught by the\n * rule function catches. Use {@link Exception//getCause()} to get the\n * original {@link RecognitionException}.\n */\n recover(recognizer, e) {\n let context = recognizer._ctx\n while (context !== null) {\n context.exception = e;\n context = context.parentCtx;\n }\n throw new ParseCancellationException(e);\n }\n\n /**\n * Make sure we don't attempt to recover inline; if the parser\n * successfully recovers, it won't throw an exception.\n */\n recoverInline(recognizer) {\n this.recover(recognizer, new InputMismatchException(recognizer));\n }\n\n// Make sure we don't attempt to recover from problems in subrules.//\n sync(recognizer) {\n // pass\n }\n}\n\n\nmodule.exports = {BailErrorStrategy, DefaultErrorStrategy};\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nmodule.exports.RecognitionException = require('./Errors').RecognitionException;\nmodule.exports.NoViableAltException = require('./Errors').NoViableAltException;\nmodule.exports.LexerNoViableAltException = require('./Errors').LexerNoViableAltException;\nmodule.exports.InputMismatchException = require('./Errors').InputMismatchException;\nmodule.exports.FailedPredicateException = require('./Errors').FailedPredicateException;\nmodule.exports.DiagnosticErrorListener = require('./DiagnosticErrorListener');\nmodule.exports.BailErrorStrategy = require('./ErrorStrategy').BailErrorStrategy;\nmodule.exports.DefaultErrorStrategy = require('./ErrorStrategy').DefaultErrorStrategy;\nmodule.exports.ErrorListener = require('./ErrorListener').ErrorListener;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Token} = require('./Token');\nrequire('./polyfills/codepointat');\nrequire('./polyfills/fromcodepoint');\n\n/**\n * If decodeToUnicodeCodePoints is true, the input is treated\n * as a series of Unicode code points.\n *\n * Otherwise, the input is treated as a series of 16-bit UTF-16 code\n * units.\n */\nclass InputStream {\n\tconstructor(data, decodeToUnicodeCodePoints) {\n\t\tthis.name = \"\";\n\t\tthis.strdata = data;\n\t\tthis.decodeToUnicodeCodePoints = decodeToUnicodeCodePoints || false;\n\t\t// _loadString - Vacuum all input from a string and then treat it like a buffer.\n\t\tthis._index = 0;\n\t\tthis.data = [];\n\t\tif (this.decodeToUnicodeCodePoints) {\n\t\t\tfor (let i = 0; i < this.strdata.length; ) {\n\t\t\t\tconst codePoint = this.strdata.codePointAt(i);\n\t\t\t\tthis.data.push(codePoint);\n\t\t\t\ti += codePoint <= 0xFFFF ? 1 : 2;\n\t\t\t}\n\t\t} else {\n\t\t\tfor (let i = 0; i < this.strdata.length; i++) {\n\t\t\t\tconst codeUnit = this.strdata.charCodeAt(i);\n\t\t\t\tthis.data.push(codeUnit);\n\t\t\t}\n\t\t}\n\t\tthis._size = this.data.length;\n\t}\n\n\t/**\n\t * Reset the stream so that it's in the same state it was\n\t * when the object was created *except* the data array is not\n\t * touched.\n\t */\n\treset() {\n\t\tthis._index = 0;\n\t}\n\n\tconsume() {\n\t\tif (this._index >= this._size) {\n\t\t\t// assert this.LA(1) == Token.EOF\n\t\t\tthrow (\"cannot consume EOF\");\n\t\t}\n\t\tthis._index += 1;\n\t}\n\n\tLA(offset) {\n\t\tif (offset === 0) {\n\t\t\treturn 0; // undefined\n\t\t}\n\t\tif (offset < 0) {\n\t\t\toffset += 1; // e.g., translate LA(-1) to use offset=0\n\t\t}\n\t\tconst pos = this._index + offset - 1;\n\t\tif (pos < 0 || pos >= this._size) { // invalid\n\t\t\treturn Token.EOF;\n\t\t}\n\t\treturn this.data[pos];\n\t}\n\n\tLT(offset) {\n\t\treturn this.LA(offset);\n\t}\n\n// mark/release do nothing; we have entire buffer\n\tmark() {\n\t\treturn -1;\n\t}\n\n\trelease(marker) {\n\t}\n\n\t/**\n\t * consume() ahead until p==_index; can't just set p=_index as we must\n\t * update line and column. If we seek backwards, just set p\n\t */\n\tseek(_index) {\n\t\tif (_index <= this._index) {\n\t\t\tthis._index = _index; // just jump; don't update stream state (line,\n\t\t\t\t\t\t\t\t\t// ...)\n\t\t\treturn;\n\t\t}\n\t\t// seek forward\n\t\tthis._index = Math.min(_index, this._size);\n\t}\n\n\tgetText(start, stop) {\n\t\tif (stop >= this._size) {\n\t\t\tstop = this._size - 1;\n\t\t}\n\t\tif (start >= this._size) {\n\t\t\treturn \"\";\n\t\t} else {\n\t\t\tif (this.decodeToUnicodeCodePoints) {\n\t\t\t\tlet result = \"\";\n\t\t\t\tfor (let i = start; i <= stop; i++) {\n\t\t\t\t\tresult += String.fromCodePoint(this.data[i]);\n\t\t\t\t}\n\t\t\t\treturn result;\n\t\t\t} else {\n\t\t\t\treturn this.strdata.slice(start, stop + 1);\n\t\t\t}\n\t\t}\n\t}\n\n\ttoString() {\n\t\treturn this.strdata;\n\t}\n\n\tget index(){\n\t\treturn this._index;\n\t}\n\n\tget size(){\n\t\treturn this._size;\n\t}\n}\n\n\nmodule.exports = InputStream;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst InputStream = require('./InputStream');\nconst fs = require(\"fs\");\n\n/**\n * Utility functions to create InputStreams from various sources.\n *\n * All returned InputStreams support the full range of Unicode\n * up to U+10FFFF (the default behavior of InputStream only supports\n * code points up to U+FFFF).\n */\nconst CharStreams = {\n // Creates an InputStream from a string.\n fromString: function(str) {\n return new InputStream(str, true);\n },\n\n /**\n * Asynchronously creates an InputStream from a blob given the\n * encoding of the bytes in that blob (defaults to 'utf8' if\n * encoding is null).\n *\n * Invokes onLoad(result) on success, onError(error) on\n * failure.\n */\n fromBlob: function(blob, encoding, onLoad, onError) {\n const reader = new window.FileReader();\n reader.onload = function(e) {\n const is = new InputStream(e.target.result, true);\n onLoad(is);\n };\n reader.onerror = onError;\n reader.readAsText(blob, encoding);\n },\n\n /**\n * Creates an InputStream from a Buffer given the\n * encoding of the bytes in that buffer (defaults to 'utf8' if\n * encoding is null).\n */\n fromBuffer: function(buffer, encoding) {\n return new InputStream(buffer.toString(encoding), true);\n },\n\n /** Asynchronously creates an InputStream from a file on disk given\n * the encoding of the bytes in that file (defaults to 'utf8' if\n * encoding is null).\n *\n * Invokes callback(error, result) on completion.\n */\n fromPath: function(path, encoding, callback) {\n fs.readFile(path, encoding, function(err, data) {\n let is = null;\n if (data !== null) {\n is = new InputStream(data, true);\n }\n callback(err, is);\n });\n },\n\n /**\n * Synchronously creates an InputStream given a path to a file\n * on disk and the encoding of the bytes in that file (defaults to\n * 'utf8' if encoding is null).\n */\n fromPathSync: function(path, encoding) {\n const data = fs.readFileSync(path, encoding);\n return new InputStream(data, true);\n }\n};\n\nmodule.exports = CharStreams;\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst InputStream = require('./InputStream');\nconst fs = require(\"fs\");\n\n/**\n * This is an InputStream that is loaded from a file all at once\n * when you construct the object.\n */\nclass FileStream extends InputStream {\n\tconstructor(fileName, decodeToUnicodeCodePoints) {\n\t\tconst data = fs.readFileSync(fileName, \"utf8\");\n\t\tsuper(data, decodeToUnicodeCodePoints);\n\t\tthis.fileName = fileName;\n\t}\n}\n\nmodule.exports = FileStream\n", "/* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.\n * Use of this file is governed by the BSD 3-clause license that\n * can be found in the LICENSE.txt file in the project root.\n */\n\nconst {Token} = require('./Token');\nconst Lexer = require('./Lexer');\nconst {Interval} = require('./IntervalSet');\n\n// this is just to keep meaningful parameter types to Parser\nclass TokenStream {}\n\n/**\n * This implementation of {@link TokenStream} loads tokens from a\n * {@link TokenSource} on-demand, and places the tokens in a buffer to provide\n * access to any previous token by index.\n *\n *

    \n * This token stream ignores the value of {@link Token//getChannel}. If your\n * parser requires the token stream filter tokens to only those on a particular\n * channel, such as {@link Token//DEFAULT_CHANNEL} or\n * {@link Token//HIDDEN_CHANNEL}, use a filtering token stream such a\n * {@link CommonTokenStream}.

    \n */\nclass BufferedTokenStream extends TokenStream {\n\tconstructor(tokenSource) {\n\n\t\tsuper();\n\t\t// The {@link TokenSource} from which tokens for this stream are fetched.\n\t\tthis.tokenSource = tokenSource;\n\t\t/**\n\t\t * A collection of all tokens fetched from the token source. The list is\n\t\t * considered a complete view of the input once {@link //fetchedEOF} is set\n\t\t * to {@code true}.\n\t\t */\n\t\tthis.tokens = [];\n\n\t\t/**\n\t\t * The index into {@link //tokens} of the current token (next token to\n\t\t * {@link //consume}). {@link //tokens}{@code [}{@link //p}{@code ]} should\n\t\t * be\n\t\t * {@link //LT LT(1)}.\n\t\t *\n\t\t *

    This field is set to -1 when the stream is first constructed or when\n\t\t * {@link //setTokenSource} is called, indicating that the first token has\n\t\t * not yet been fetched from the token source. For additional information,\n\t\t * see the documentation of {@link IntStream} for a description of\n\t\t * Initializing Methods.

    \n\t\t */\n\t\tthis.index = -1;\n\n\t\t/**\n\t\t * Indicates whether the {@link Token//EOF} token has been fetched from\n\t\t * {@link //tokenSource} and added to {@link //tokens}. This field improves\n\t\t * performance for the following cases:\n\t\t *\n\t\t *