@cparra/apex-reflection 0.1.0-alpha.0 → 0.1.1-alpha.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (158) hide show
  1. package/__tests__/end-to-end.test.ts +256 -0
  2. package/coverage/clover.xml +12857 -0
  3. package/coverage/coverage-final.json +3 -0
  4. package/coverage/lcov-report/base.css +224 -0
  5. package/coverage/lcov-report/block-navigation.js +79 -0
  6. package/coverage/lcov-report/favicon.png +0 -0
  7. package/coverage/lcov-report/index.html +126 -0
  8. package/coverage/lcov-report/index.js.html +104 -0
  9. package/coverage/lcov-report/out.js.html +41126 -0
  10. package/coverage/lcov-report/prettify.css +1 -0
  11. package/coverage/lcov-report/prettify.js +2 -0
  12. package/coverage/lcov-report/sort-arrow-sprite.png +0 -0
  13. package/coverage/lcov-report/sorter.js +170 -0
  14. package/coverage/lcov.info +24801 -0
  15. package/index.d.ts +118 -0
  16. package/index.js +8 -0
  17. package/index.ts +141 -0
  18. package/jest.config.js +11 -0
  19. package/out.js +13708 -0
  20. package/package.json +22 -17
  21. package/tsconfig.json +5 -3
  22. package/.idea/apexdocs-dart.iml +0 -18
  23. package/.idea/jsLibraryMappings.xml +0 -6
  24. package/.idea/libraries/Dart_Packages.xml +0 -556
  25. package/.idea/libraries/Dart_SDK.xml +0 -28
  26. package/.idea/misc.xml +0 -6
  27. package/.idea/modules.xml +0 -8
  28. package/.idea/runConfigurations/_template__of_Dart_Test.xml +0 -6
  29. package/.idea/vcs.xml +0 -6
  30. package/CHANGELOG.md +0 -3
  31. package/README.md +0 -26
  32. package/TODO.md +0 -5
  33. package/analysis_options.yaml +0 -30
  34. package/antlr4-4.9.2/CHANGELOG.md +0 -4
  35. package/antlr4-4.9.2/LICENSE +0 -52
  36. package/antlr4-4.9.2/README.md +0 -11
  37. package/antlr4-4.9.2/analysis_options.yaml +0 -1
  38. package/antlr4-4.9.2/lib/antlr4.dart +0 -21
  39. package/antlr4-4.9.2/lib/src/atn/atn.dart +0 -18
  40. package/antlr4-4.9.2/lib/src/atn/src/atn.dart +0 -170
  41. package/antlr4-4.9.2/lib/src/atn/src/atn_config.dart +0 -242
  42. package/antlr4-4.9.2/lib/src/atn/src/atn_config_set.dart +0 -283
  43. package/antlr4-4.9.2/lib/src/atn/src/atn_deserializer.dart +0 -809
  44. package/antlr4-4.9.2/lib/src/atn/src/atn_simulator.dart +0 -95
  45. package/antlr4-4.9.2/lib/src/atn/src/atn_state.dart +0 -296
  46. package/antlr4-4.9.2/lib/src/atn/src/atn_type.dart +0 -14
  47. package/antlr4-4.9.2/lib/src/atn/src/info.dart +0 -553
  48. package/antlr4-4.9.2/lib/src/atn/src/lexer_action.dart +0 -601
  49. package/antlr4-4.9.2/lib/src/atn/src/lexer_action_executor.dart +0 -167
  50. package/antlr4-4.9.2/lib/src/atn/src/lexer_atn_simulator.dart +0 -731
  51. package/antlr4-4.9.2/lib/src/atn/src/parser_atn_simulator.dart +0 -2630
  52. package/antlr4-4.9.2/lib/src/atn/src/profiling_atn_simulator.dart +0 -229
  53. package/antlr4-4.9.2/lib/src/atn/src/semantic_context.dart +0 -404
  54. package/antlr4-4.9.2/lib/src/atn/src/transition.dart +0 -305
  55. package/antlr4-4.9.2/lib/src/dfa/dfa.dart +0 -8
  56. package/antlr4-4.9.2/lib/src/dfa/src/dfa.dart +0 -138
  57. package/antlr4-4.9.2/lib/src/dfa/src/dfa_serializer.dart +0 -76
  58. package/antlr4-4.9.2/lib/src/dfa/src/dfa_state.dart +0 -151
  59. package/antlr4-4.9.2/lib/src/error/error.dart +0 -10
  60. package/antlr4-4.9.2/lib/src/error/src/diagnostic_error_listener.dart +0 -116
  61. package/antlr4-4.9.2/lib/src/error/src/error_listener.dart +0 -241
  62. package/antlr4-4.9.2/lib/src/error/src/error_strategy.dart +0 -902
  63. package/antlr4-4.9.2/lib/src/error/src/errors.dart +0 -204
  64. package/antlr4-4.9.2/lib/src/input_stream.dart +0 -335
  65. package/antlr4-4.9.2/lib/src/interval_set.dart +0 -735
  66. package/antlr4-4.9.2/lib/src/lexer.dart +0 -343
  67. package/antlr4-4.9.2/lib/src/ll1_analyzer.dart +0 -204
  68. package/antlr4-4.9.2/lib/src/misc/multi_map.dart +0 -32
  69. package/antlr4-4.9.2/lib/src/misc/pair.dart +0 -34
  70. package/antlr4-4.9.2/lib/src/parser.dart +0 -777
  71. package/antlr4-4.9.2/lib/src/parser_interpreter.dart +0 -393
  72. package/antlr4-4.9.2/lib/src/parser_rule_context.dart +0 -275
  73. package/antlr4-4.9.2/lib/src/prediction_context.dart +0 -877
  74. package/antlr4-4.9.2/lib/src/recognizer.dart +0 -182
  75. package/antlr4-4.9.2/lib/src/rule_context.dart +0 -192
  76. package/antlr4-4.9.2/lib/src/runtime_meta_data.dart +0 -188
  77. package/antlr4-4.9.2/lib/src/token.dart +0 -431
  78. package/antlr4-4.9.2/lib/src/token_factory.dart +0 -88
  79. package/antlr4-4.9.2/lib/src/token_source.dart +0 -241
  80. package/antlr4-4.9.2/lib/src/token_stream.dart +0 -627
  81. package/antlr4-4.9.2/lib/src/tree/src/pattern/chunk.dart +0 -90
  82. package/antlr4-4.9.2/lib/src/tree/src/pattern/parse_tree_match.dart +0 -635
  83. package/antlr4-4.9.2/lib/src/tree/src/tree.dart +0 -370
  84. package/antlr4-4.9.2/lib/src/tree/src/trees.dart +0 -226
  85. package/antlr4-4.9.2/lib/src/tree/tree.dart +0 -10
  86. package/antlr4-4.9.2/lib/src/util/bit_set.dart +0 -308
  87. package/antlr4-4.9.2/lib/src/util/murmur_hash.dart +0 -77
  88. package/antlr4-4.9.2/lib/src/util/utils.dart +0 -31
  89. package/antlr4-4.9.2/lib/src/vocabulary.dart +0 -254
  90. package/antlr4-4.9.2/pubspec.yaml +0 -13
  91. package/example/node_example/index.js +0 -8
  92. package/example/node_example/package.json +0 -12
  93. package/example/node_example_ts/package-lock.json +0 -70
  94. package/example/node_example_ts/package.json +0 -19
  95. package/example/node_example_ts/src/index.js +0 -5
  96. package/example/node_example_ts/src/index.ts +0 -9
  97. package/example/node_example_ts/tsconfig.json +0 -79
  98. package/js/dart2jsout.js +0 -25898
  99. package/js/dart2jsout.js.map +0 -16
  100. package/js/index.d.ts +0 -1
  101. package/js/index.js +0 -4
  102. package/js/out.js +0 -26023
  103. package/js/out.js.map +0 -16
  104. package/js/package-lock.json +0 -57
  105. package/js/preamble.js +0 -125
  106. package/lib/apexdocs_dart.dart +0 -28
  107. package/lib/src/antlr/grammars/Apexdoc/ApexdocLexer.g4 +0 -120
  108. package/lib/src/antlr/grammars/Apexdoc/ApexdocParser.g4 +0 -158
  109. package/lib/src/antlr/grammars/Apexdoc/gen/ApexdocLexer.interp +0 -95
  110. package/lib/src/antlr/grammars/Apexdoc/gen/ApexdocLexer.java +0 -238
  111. package/lib/src/antlr/grammars/Apexdoc/gen/ApexdocLexer.tokens +0 -23
  112. package/lib/src/antlr/grammars/apex/ApexLexer.g4 +0 -255
  113. package/lib/src/antlr/grammars/apex/ApexParser.g4 +0 -567
  114. package/lib/src/antlr/grammars/apex/examples/ApexClass.cls +0 -6
  115. package/lib/src/antlr/lib/apex/ApexLexer.dart +0 -1223
  116. package/lib/src/antlr/lib/apex/ApexLexer.interp +0 -393
  117. package/lib/src/antlr/lib/apex/ApexLexer.tokens +0 -212
  118. package/lib/src/antlr/lib/apex/ApexParser.dart +0 -9349
  119. package/lib/src/antlr/lib/apex/ApexParser.interp +0 -326
  120. package/lib/src/antlr/lib/apex/ApexParser.tokens +0 -212
  121. package/lib/src/antlr/lib/apex/ApexParserBaseListener.dart +0 -1036
  122. package/lib/src/antlr/lib/apex/ApexParserListener.dart +0 -975
  123. package/lib/src/antlr/lib/apexdoc/ApexdocLexer.dart +0 -373
  124. package/lib/src/antlr/lib/apexdoc/ApexdocLexer.interp +0 -95
  125. package/lib/src/antlr/lib/apexdoc/ApexdocLexer.tokens +0 -23
  126. package/lib/src/antlr/lib/apexdoc/ApexdocParser.dart +0 -2471
  127. package/lib/src/antlr/lib/apexdoc/ApexdocParser.interp +0 -69
  128. package/lib/src/antlr/lib/apexdoc/ApexdocParser.tokens +0 -23
  129. package/lib/src/antlr/lib/apexdoc/ApexdocParserBaseListener.dart +0 -252
  130. package/lib/src/antlr/lib/apexdoc/ApexdocParserListener.dart +0 -215
  131. package/lib/src/builders/builders.dart +0 -32
  132. package/lib/src/model/apex_file_manifest.dart +0 -37
  133. package/lib/src/model/apex_file_manifest.g.dart +0 -18
  134. package/lib/src/model/declaration.dart +0 -50
  135. package/lib/src/model/doc_comment.dart +0 -117
  136. package/lib/src/model/doc_comment.g.dart +0 -118
  137. package/lib/src/model/members.dart +0 -143
  138. package/lib/src/model/members.g.dart +0 -105
  139. package/lib/src/model/types.dart +0 -159
  140. package/lib/src/model/types.g.dart +0 -111
  141. package/lib/src/service/apex_listener.dart +0 -226
  142. package/lib/src/service/apexdoc_listener.dart +0 -82
  143. package/lib/src/service/parsers.dart +0 -33
  144. package/lib/src/service/utils/parsing/access_modifiers_parser.dart +0 -33
  145. package/lib/src/service/utils/parsing/parameters_parser.dart +0 -18
  146. package/lib/src/service/utils/parsing/parsing_utils.dart +0 -2
  147. package/lib/src/service/walker.dart +0 -82
  148. package/pubspec.yaml +0 -19
  149. package/test/apex_file_manifest_test.dart +0 -16
  150. package/test/apex_listener_test.dart +0 -703
  151. package/test/apexdoc_parser_test.dart +0 -179
  152. package/test/doc_comment_test.dart +0 -89
  153. package/test/members_serialization_test.dart +0 -158
  154. package/test/members_test.dart +0 -178
  155. package/test/types_serialization_test.dart +0 -191
  156. package/test/types_test.dart +0 -311
  157. package/test/walker_test.dart +0 -58
  158. package/tool/grind.dart +0 -20
@@ -1,627 +0,0 @@
1
- /*
2
- * Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
3
- * Use of this file is governed by the BSD 3-clause license that
4
- * can be found in the LICENSE.txt file in the project root.
5
- */
6
- import 'input_stream.dart';
7
- import 'interval_set.dart';
8
- import 'lexer.dart';
9
- import 'rule_context.dart';
10
- import 'token.dart';
11
- import 'token_source.dart';
12
-
13
- /// An [IntStream] whose symbols are [Token] instances.
14
- abstract class TokenStream extends IntStream {
15
- /// Get the [Token] instance associated with the value returned by
16
- /// {@link #LA LA(k)}. This method has the same pre- and post-conditions as
17
- /// {@link IntStream#LA}. In addition, when the preconditions of this method
18
- /// are met, the return value is non-null and the value of
19
- /// {@code LT(k).getType()==LA(k)}.
20
- ///
21
- /// @see IntStream#LA
22
- Token LT(int k);
23
-
24
- /// Gets the [Token] at the specified [index] in the stream. When
25
- /// the preconditions of this method are met, the return value is non-null.
26
- ///
27
- /// <p>The preconditions for this method are the same as the preconditions of
28
- /// {@link IntStream#seek}. If the behavior of {@code seek(index)} is
29
- /// unspecified for the current state and given [index], then the
30
- /// behavior of this method is also unspecified.</p>
31
- ///
32
- /// <p>The symbol referred to by [index] differs from {@code seek()} only
33
- /// in the case of filtering streams where [index] lies before the end
34
- /// of the stream. Unlike {@code seek()}, this method does not adjust
35
- /// [index] to point to a non-ignored symbol.</p>
36
- ///
37
- /// @throws IllegalArgumentException if {code index} is less than 0
38
- /// @throws UnsupportedOperationException if the stream does not support
39
- /// retrieving the token at the specified index
40
- Token get(int index);
41
-
42
- /// Gets the underlying [TokenSource] which provides tokens for this
43
- /// stream.
44
- TokenSource get tokenSource;
45
-
46
- /// Return the text of all tokens within the specified [interval]. This
47
- /// method behaves like the following code (including potential exceptions
48
- /// for violating preconditions of {@link #get}, but may be optimized by the
49
- /// specific implementation.
50
- ///
51
- /// <pre>
52
- /// TokenStream stream = ...;
53
- /// String text = "";
54
- /// for (int i = interval.a; i &lt;= interval.b; i++) {
55
- /// text += stream.get(i).getText();
56
- /// }
57
- /// </pre>
58
- ///
59
- /// <pre>
60
- /// TokenStream stream = ...;
61
- /// String text = stream.getText(new Interval(0, stream.length));
62
- /// </pre>
63
- ///
64
- /// <pre>
65
- /// TokenStream stream = ...;
66
- /// String text = stream.getText(ctx.getSourceInterval());
67
- /// </pre>
68
- ///
69
- /// @param interval The interval of tokens within this stream to get text
70
- /// for.
71
- /// @return The text of all tokens / within the specified interval in this
72
- /// stream.
73
- String getText([Interval interval]);
74
-
75
- String get text;
76
-
77
- /// Return the text of all tokens in the source interval of the specified
78
- /// context. This method behaves like the following code, including potential
79
- /// exceptions from the call to {@link #getText(Interval)}, but may be
80
- /// optimized by the specific implementation.
81
- ///
82
- /// <p>If {@code ctx.getSourceInterval()} does not return a valid interval of
83
- /// tokens provided by this stream, the behavior is unspecified.</p>
84
- ///
85
- /// @param ctx The context providing the source interval of tokens to get
86
- /// text for.
87
- /// @return The text of all tokens within the source interval of [ctx].
88
- String getTextFromCtx(RuleContext ctx);
89
-
90
- /// Return the text of all tokens in this stream between [start] and
91
- /// [stop] (inclusive).
92
- ///
93
- /// <p>If the specified [start] or [stop] token was not provided by
94
- /// this stream, or if the [stop] occurred before the [start]
95
- /// token, the behavior is unspecified.</p>
96
- ///
97
- /// <p>For streams which ensure that the {@link Token#getTokenIndex} method is
98
- /// accurate for all of its provided tokens, this method behaves like the
99
- /// following code. Other streams may implement this method in other ways
100
- /// provided the behavior is consistent with this at a high level.</p>
101
- ///
102
- /// <pre>
103
- /// TokenStream stream = ...;
104
- /// String text = "";
105
- /// for (int i = start.getTokenIndex(); i &lt;= stop.getTokenIndex(); i++) {
106
- /// text += stream.get(i).getText();
107
- /// }
108
- /// </pre>
109
- ///
110
- /// @param start The first token in the interval to get text for.
111
- /// @param stop The last token in the interval to get text for (inclusive).
112
- /// @return The text of all tokens lying between the specified [start]
113
- /// and [stop] tokens.
114
- ///
115
- /// @throws UnsupportedOperationException if this stream does not support
116
- /// this method for the specified tokens
117
- String getTextRange(Token start, Token stop);
118
- }
119
-
120
- /// This implementation of [TokenStream] loads tokens from a
121
- /// [TokenSource] on-demand, and places the tokens in a buffer to provide
122
- /// access to any previous token by index.
123
- ///
124
- /// <p>
125
- /// This token stream ignores the value of {@link Token#getChannel}. If your
126
- /// parser requires the token stream filter tokens to only those on a particular
127
- /// channel, such as {@link Token#DEFAULT_CHANNEL} or
128
- /// {@link Token#HIDDEN_CHANNEL}, use a filtering token stream such a
129
- /// [CommonTokenStream].</p>
130
- class BufferedTokenStream implements TokenStream {
131
- /// The [TokenSource] from which tokens for this stream are fetched.
132
- TokenSource _tokenSource;
133
-
134
- /// A collection of all tokens fetched from the token source. The list is
135
- /// considered a complete view of the input once {@link #fetchedEOF} is set
136
- /// to [true].
137
- List<Token> tokens = <Token>[];
138
-
139
- /// The index into [tokens] of the current token (next token to [consume]).
140
- /// [tokens][p] should be [LT(1)].
141
- ///
142
- /// <p>This field is set to -1 when the stream is first constructed or when
143
- /// [tokenSource] is set, indicating that the first token has
144
- /// not yet been fetched from the token source. For additional information,
145
- /// see the documentation of [IntStream] for a description of
146
- /// Initializing Methods.</p>
147
- int p = -1;
148
-
149
- /// Indicates whether the [Token.EOF] token has been fetched from
150
- /// [tokenSource] and added to [tokens]. This field improves
151
- /// performance for the following cases:
152
- ///
153
- /// <ul>
154
- /// <li>{@link #consume}: The lookahead check in {@link #consume} to prevent
155
- /// consuming the EOF symbol is optimized by checking the values of
156
- /// {@link #fetchedEOF} and {@link #p} instead of calling {@link #LA}.</li>
157
- /// <li>{@link #fetch}: The check to prevent adding multiple EOF symbols into
158
- /// [{@link #]tokens} is trivial with this field.</li>
159
- /// <ul>
160
- bool fetchedEOF = false;
161
-
162
- BufferedTokenStream(this._tokenSource) {
163
- if (_tokenSource == null) {
164
- throw ArgumentError.notNull('tokenSource');
165
- }
166
- }
167
-
168
- @override
169
- int get index => p;
170
-
171
- @override
172
- int mark() {
173
- return 0;
174
- }
175
-
176
- @override
177
- void release(int marker) {
178
- // no resources to release
179
- }
180
-
181
- @override
182
- void seek(int index) {
183
- lazyInit();
184
- p = adjustSeekIndex(index);
185
- }
186
-
187
- @override
188
- int get size {
189
- return tokens.length;
190
- }
191
-
192
- @override
193
- void consume() {
194
- bool skipEofCheck;
195
- if (p >= 0) {
196
- if (fetchedEOF) {
197
- // the last token in tokens is EOF. skip check if p indexes any
198
- // fetched token except the last.
199
- skipEofCheck = p < tokens.length - 1;
200
- } else {
201
- // no EOF token in tokens. skip check if p indexes a fetched token.
202
- skipEofCheck = p < tokens.length;
203
- }
204
- } else {
205
- // not yet initialized
206
- skipEofCheck = false;
207
- }
208
-
209
- if (!skipEofCheck && LA(1) == IntStream.EOF) {
210
- throw StateError('cannot consume EOF');
211
- }
212
-
213
- if (sync(p + 1)) {
214
- p = adjustSeekIndex(p + 1);
215
- }
216
- }
217
-
218
- /// Make sure index [i] in tokens has a token.
219
- ///
220
- /// @return [true] if a token is located at index [i], otherwise
221
- /// [false].
222
- /// @see #get(int i)
223
- bool sync(int i) {
224
- assert(i >= 0);
225
- final n = i - tokens.length + 1; // how many more elements we need?
226
- //System.out.println("sync("+i+") needs "+n);
227
- if (n > 0) {
228
- final fetched = fetch(n);
229
- return fetched >= n;
230
- }
231
-
232
- return true;
233
- }
234
-
235
- /// Add [n] elements to buffer.
236
- ///
237
- /// @return The actual number of elements added to the buffer.
238
- int fetch(int n) {
239
- if (fetchedEOF) {
240
- return 0;
241
- }
242
-
243
- for (var i = 0; i < n; i++) {
244
- final t = tokenSource.nextToken();
245
- if (t is WritableToken) {
246
- t.tokenIndex = tokens.length;
247
- }
248
- tokens.add(t);
249
- if (t.type == Token.EOF) {
250
- fetchedEOF = true;
251
- return i + 1;
252
- }
253
- }
254
-
255
- return n;
256
- }
257
-
258
- @override
259
- Token get(int i) {
260
- if (i < 0 || i >= tokens.length) {
261
- throw RangeError.index(i, tokens);
262
- }
263
- return tokens[i];
264
- }
265
-
266
- /// Get all tokens from start..stop inclusively */
267
- List<Token> getRange(int start, [int stop]) {
268
- if (start < 0 || stop < 0) return null;
269
- lazyInit();
270
- final subset = <Token>[];
271
- if (stop >= tokens.length) stop = tokens.length - 1;
272
- for (var i = start; i <= stop; i++) {
273
- final t = tokens[i];
274
- if (t.type == Token.EOF) break;
275
- subset.add(t);
276
- }
277
- return subset;
278
- }
279
-
280
- @override
281
- int LA(int i) {
282
- return LT(i).type;
283
- }
284
-
285
- Token LB(int k) {
286
- if ((p - k) < 0) return null;
287
- return tokens[p - k];
288
- }
289
-
290
- @override
291
- Token LT(int k) {
292
- lazyInit();
293
- if (k == 0) return null;
294
- if (k < 0) return LB(-k);
295
-
296
- final i = p + k - 1;
297
- sync(i);
298
- if (i >= tokens.length) {
299
- // return EOF token
300
- // EOF must be last token
301
- return tokens.last;
302
- }
303
- // if ( i>range ) range = i;
304
- return tokens[i];
305
- }
306
-
307
- /// Allowed derived classes to modify the behavior of operations which change
308
- /// the current stream position by adjusting the target token index of a seek
309
- /// operation. The default implementation simply returns [i]. If an
310
- /// exception is thrown in this method, the current stream index should not be
311
- /// changed.
312
- ///
313
- /// <p>For example, [CommonTokenStream] overrides this method to ensure that
314
- /// the seek target is always an on-channel token.</p>
315
- ///
316
- /// @param i The target token index.
317
- /// @return The adjusted target token index.
318
- int adjustSeekIndex(int i) {
319
- return i;
320
- }
321
-
322
- void lazyInit() {
323
- if (p == -1) {
324
- setup();
325
- }
326
- }
327
-
328
- void setup() {
329
- sync(0);
330
- p = adjustSeekIndex(0);
331
- }
332
-
333
- @override
334
- TokenSource get tokenSource => _tokenSource;
335
-
336
- /// Reset this token stream by setting its token source. */
337
- set tokenSource(TokenSource tokenSource) {
338
- _tokenSource = tokenSource;
339
- tokens.clear();
340
- p = -1;
341
- fetchedEOF = false;
342
- }
343
-
344
- /// Given a start and stop index, return a List of all tokens in
345
- /// the token type BitSet. Return null if no tokens were found. This
346
- /// method looks at both on and off channel tokens.
347
- List<Token> getTokens(
348
- [int start, int stop, Set<int> types]) {
349
- if (start == null && stop == null) {
350
- return tokens;
351
- }
352
- lazyInit();
353
- if (start < 0 || start >= tokens.length) {
354
- throw RangeError.index(start, tokens);
355
- } else if (stop < 0 || stop >= tokens.length) {
356
- throw RangeError.index(stop, tokens);
357
- }
358
- if (start > stop) return null;
359
-
360
- // list = tokens[start:stop]:{T t, t.getType() in types}
361
- var filteredTokens = <Token>[];
362
- for (var i = start; i <= stop; i++) {
363
- final t = tokens[i];
364
- if (types == null || types.contains(t.type)) {
365
- filteredTokens.add(t);
366
- }
367
- }
368
- if (filteredTokens.isEmpty) {
369
- filteredTokens = null;
370
- }
371
- return filteredTokens;
372
- }
373
-
374
- /// Given a starting index, return the index of the next token on channel.
375
- /// Return [i] if {@code tokens[i]} is on channel. Return the index of
376
- /// the EOF token if there are no tokens on channel between [i] and
377
- /// EOF.
378
- int nextTokenOnChannel(int i, int channel) {
379
- sync(i);
380
- if (i >= size) {
381
- return size - 1;
382
- }
383
-
384
- var token = tokens[i];
385
- while (token.channel != channel) {
386
- if (token.type == Token.EOF) {
387
- return i;
388
- }
389
-
390
- i++;
391
- sync(i);
392
- token = tokens[i];
393
- }
394
-
395
- return i;
396
- }
397
-
398
- /// Given a starting index, return the index of the previous token on
399
- /// channel. Return [i] if {@code tokens[i]} is on channel. Return -1
400
- /// if there are no tokens on channel between [i] and 0.
401
- ///
402
- /// <p>
403
- /// If [i] specifies an index at or after the EOF token, the EOF token
404
- /// index is returned. This is due to the fact that the EOF token is treated
405
- /// as though it were on every channel.</p>
406
- int previousTokenOnChannel(int i, int channel) {
407
- sync(i);
408
- if (i >= size) {
409
- // the EOF token is on every channel
410
- return size - 1;
411
- }
412
-
413
- while (i >= 0) {
414
- final token = tokens[i];
415
- if (token.type == Token.EOF || token.channel == channel) {
416
- return i;
417
- }
418
-
419
- i--;
420
- }
421
-
422
- return i;
423
- }
424
-
425
- /// Collect all tokens on specified channel to the right of
426
- /// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL or
427
- /// EOF. If channel is -1, find any non default channel token.
428
- List<Token> getHiddenTokensToRight(int tokenIndex, [int channel = -1]) {
429
- lazyInit();
430
- if (tokenIndex < 0 || tokenIndex >= tokens.length) {
431
- throw RangeError.index(tokenIndex, tokens);
432
- }
433
-
434
- final nextOnChannel =
435
- nextTokenOnChannel(tokenIndex + 1, Lexer.DEFAULT_TOKEN_CHANNEL);
436
- // if none onchannel to right, nextOnChannel=-1 so set to = last token
437
- final to = nextOnChannel == -1 ? size - 1 : nextOnChannel;
438
- final from = tokenIndex + 1;
439
-
440
- return filterForChannel(from, to, channel);
441
- }
442
-
443
- /// Collect all tokens on specified channel to the left of
444
- /// the current token up until we see a token on DEFAULT_TOKEN_CHANNEL.
445
- /// If channel is -1, find any non default channel token.
446
- List<Token> getHiddenTokensToLeft(int tokenIndex, [int channel = -1]) {
447
- lazyInit();
448
- if (tokenIndex < 0 || tokenIndex >= tokens.length) {
449
- throw RangeError.index(tokenIndex, tokens);
450
- }
451
-
452
- if (tokenIndex == 0) {
453
- // obviously no tokens can appear before the first token
454
- return null;
455
- }
456
-
457
- final prevOnChannel =
458
- previousTokenOnChannel(tokenIndex - 1, Lexer.DEFAULT_TOKEN_CHANNEL);
459
- if (prevOnChannel == tokenIndex - 1) return null;
460
- // if none onchannel to left, prevOnChannel=-1 then from=0
461
- final from = prevOnChannel + 1;
462
- final to = tokenIndex - 1;
463
-
464
- return filterForChannel(from, to, channel);
465
- }
466
-
467
- List<Token> filterForChannel(int from, int to, int channel) {
468
- final hidden = <Token>[];
469
- for (var i = from; i <= to; i++) {
470
- final t = tokens[i];
471
- if (channel == -1) {
472
- if (t.channel != Lexer.DEFAULT_TOKEN_CHANNEL) hidden.add(t);
473
- } else {
474
- if (t.channel == channel) hidden.add(t);
475
- }
476
- }
477
- if (hidden.isEmpty) return null;
478
- return hidden;
479
- }
480
-
481
- @override
482
- String get sourceName => tokenSource.sourceName;
483
-
484
- @override
485
- String get text => getText();
486
-
487
- @override
488
- String getText([Interval interval]) {
489
- interval = interval ??
490
- Interval.of(0, size - 1); // Get the text of all tokens in this buffer.
491
- final start = interval.a;
492
- var stop = interval.b;
493
- if (start < 0 || stop < 0) return '';
494
- fill();
495
- if (stop >= tokens.length) stop = tokens.length - 1;
496
-
497
- final buf = StringBuffer();
498
- for (var i = start; i <= stop; i++) {
499
- final t = tokens[i];
500
- if (t.type == Token.EOF) break;
501
- buf.write(t.text);
502
- }
503
- return buf.toString();
504
- }
505
-
506
- @override
507
- String getTextFromCtx(RuleContext ctx) {
508
- return getText(ctx.sourceInterval);
509
- }
510
-
511
- @override
512
- String getTextRange(Token start, Token stop) {
513
- if (start != null && stop != null) {
514
- return getText(Interval.of(start.tokenIndex, stop.tokenIndex));
515
- }
516
-
517
- return '';
518
- }
519
-
520
- /// Get all tokens from lexer until EOF */
521
- void fill() {
522
- lazyInit();
523
- final blockSize = 1000;
524
- while (true) {
525
- final fetched = fetch(blockSize);
526
- if (fetched < blockSize) {
527
- return;
528
- }
529
- }
530
- }
531
- }
532
-
533
- /// This class extends [BufferedTokenStream] with functionality to filter
534
- /// token streams to tokens on a particular channel (tokens where
535
- /// {@link Token#getChannel} returns a particular value).
536
- ///
537
- /// <p>
538
- /// This token stream provides access to all tokens by index or when calling
539
- /// methods like {@link #getText}. The channel filtering is only used for code
540
- /// accessing tokens via the lookahead methods {@link #LA}, {@link #LT}, and
541
- /// {@link #LB}.</p>
542
- ///
543
- /// <p>
544
- /// By default, tokens are placed on the default channel
545
- /// ({@link Token#DEFAULT_CHANNEL}), but may be reassigned by using the
546
- /// {@code ->channel(HIDDEN)} lexer command, or by using an embedded action to
547
- /// call {@link Lexer#setChannel}.
548
- /// </p>
549
- ///
550
- /// <p>
551
- /// Note: lexer rules which use the {@code ->skip} lexer command or call
552
- /// {@link Lexer#skip} do not produce tokens at all, so input text matched by
553
- /// such a rule will not be available as part of the token stream, regardless of
554
- /// channel.</p>we
555
- class CommonTokenStream extends BufferedTokenStream {
556
- /// Specifies the channel to use for filtering tokens.
557
- ///
558
- /// <p>
559
- /// The default value is {@link Token#DEFAULT_CHANNEL}, which matches the
560
- /// default channel assigned to tokens created by the lexer.</p>
561
- int channel;
562
-
563
- /// Constructs a new [CommonTokenStream] using the specified token
564
- /// source and filtering tokens to the specified channel. Only tokens whose
565
- /// {@link Token#getChannel} matches [channel] or have the
566
- /// {@link Token#getType} equal to {@link Token#EOF} will be returned by the
567
- /// token stream lookahead methods.
568
- ///
569
- /// @param tokenSource The token source.
570
- /// @param channel The channel to use for filtering tokens.
571
- CommonTokenStream(TokenSource tokenSource,
572
- [this.channel = Token.DEFAULT_CHANNEL])
573
- : super(tokenSource);
574
-
575
- @override
576
- int adjustSeekIndex(int i) {
577
- return nextTokenOnChannel(i, channel);
578
- }
579
-
580
- @override
581
- Token LB(int k) {
582
- if (k == 0 || (p - k) < 0) return null;
583
-
584
- var i = p;
585
- var n = 1;
586
- // find k good tokens looking backwards
587
- while (n <= k && i > 0) {
588
- // skip off-channel tokens
589
- i = previousTokenOnChannel(i - 1, channel);
590
- n++;
591
- }
592
- if (i < 0) return null;
593
- return tokens[i];
594
- }
595
-
596
- @override
597
- Token LT(int k) {
598
- //System.out.println("enter LT("+k+")");
599
- lazyInit();
600
- if (k == 0) return null;
601
- if (k < 0) return LB(-k);
602
- var i = p;
603
- var n = 1; // we know tokens[p] is a good one
604
- // find k good tokens
605
- while (n < k) {
606
- // skip off-channel tokens, but make sure to not look past EOF
607
- if (sync(i + 1)) {
608
- i = nextTokenOnChannel(i + 1, channel);
609
- }
610
- n++;
611
- }
612
- // if ( i>range ) range = i;
613
- return tokens[i];
614
- }
615
-
616
- /// Count EOF just once. */
617
- int get numberOfOnChannelTokens {
618
- var n = 0;
619
- fill();
620
- for (var i = 0; i < tokens.length; i++) {
621
- final t = tokens[i];
622
- if (t.channel == channel) n++;
623
- if (t.type == Token.EOF) break;
624
- }
625
- return n;
626
- }
627
- }