@cparra/apex-reflection 0.1.0-alpha.0 → 0.1.1-alpha.10
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/__tests__/end-to-end.test.ts +256 -0
- package/coverage/clover.xml +12857 -0
- package/coverage/coverage-final.json +3 -0
- package/coverage/lcov-report/base.css +224 -0
- package/coverage/lcov-report/block-navigation.js +79 -0
- package/coverage/lcov-report/favicon.png +0 -0
- package/coverage/lcov-report/index.html +126 -0
- package/coverage/lcov-report/index.js.html +104 -0
- package/coverage/lcov-report/out.js.html +41126 -0
- package/coverage/lcov-report/prettify.css +1 -0
- package/coverage/lcov-report/prettify.js +2 -0
- package/coverage/lcov-report/sort-arrow-sprite.png +0 -0
- package/coverage/lcov-report/sorter.js +170 -0
- package/coverage/lcov.info +24801 -0
- package/index.d.ts +118 -0
- package/index.js +8 -0
- package/index.ts +141 -0
- package/jest.config.js +11 -0
- package/out.js +13708 -0
- package/package.json +22 -17
- package/tsconfig.json +5 -3
- package/.idea/apexdocs-dart.iml +0 -18
- package/.idea/jsLibraryMappings.xml +0 -6
- package/.idea/libraries/Dart_Packages.xml +0 -556
- package/.idea/libraries/Dart_SDK.xml +0 -28
- package/.idea/misc.xml +0 -6
- package/.idea/modules.xml +0 -8
- package/.idea/runConfigurations/_template__of_Dart_Test.xml +0 -6
- package/.idea/vcs.xml +0 -6
- package/CHANGELOG.md +0 -3
- package/README.md +0 -26
- package/TODO.md +0 -5
- package/analysis_options.yaml +0 -30
- package/antlr4-4.9.2/CHANGELOG.md +0 -4
- package/antlr4-4.9.2/LICENSE +0 -52
- package/antlr4-4.9.2/README.md +0 -11
- package/antlr4-4.9.2/analysis_options.yaml +0 -1
- package/antlr4-4.9.2/lib/antlr4.dart +0 -21
- package/antlr4-4.9.2/lib/src/atn/atn.dart +0 -18
- package/antlr4-4.9.2/lib/src/atn/src/atn.dart +0 -170
- package/antlr4-4.9.2/lib/src/atn/src/atn_config.dart +0 -242
- package/antlr4-4.9.2/lib/src/atn/src/atn_config_set.dart +0 -283
- package/antlr4-4.9.2/lib/src/atn/src/atn_deserializer.dart +0 -809
- package/antlr4-4.9.2/lib/src/atn/src/atn_simulator.dart +0 -95
- package/antlr4-4.9.2/lib/src/atn/src/atn_state.dart +0 -296
- package/antlr4-4.9.2/lib/src/atn/src/atn_type.dart +0 -14
- package/antlr4-4.9.2/lib/src/atn/src/info.dart +0 -553
- package/antlr4-4.9.2/lib/src/atn/src/lexer_action.dart +0 -601
- package/antlr4-4.9.2/lib/src/atn/src/lexer_action_executor.dart +0 -167
- package/antlr4-4.9.2/lib/src/atn/src/lexer_atn_simulator.dart +0 -731
- package/antlr4-4.9.2/lib/src/atn/src/parser_atn_simulator.dart +0 -2630
- package/antlr4-4.9.2/lib/src/atn/src/profiling_atn_simulator.dart +0 -229
- package/antlr4-4.9.2/lib/src/atn/src/semantic_context.dart +0 -404
- package/antlr4-4.9.2/lib/src/atn/src/transition.dart +0 -305
- package/antlr4-4.9.2/lib/src/dfa/dfa.dart +0 -8
- package/antlr4-4.9.2/lib/src/dfa/src/dfa.dart +0 -138
- package/antlr4-4.9.2/lib/src/dfa/src/dfa_serializer.dart +0 -76
- package/antlr4-4.9.2/lib/src/dfa/src/dfa_state.dart +0 -151
- package/antlr4-4.9.2/lib/src/error/error.dart +0 -10
- package/antlr4-4.9.2/lib/src/error/src/diagnostic_error_listener.dart +0 -116
- package/antlr4-4.9.2/lib/src/error/src/error_listener.dart +0 -241
- package/antlr4-4.9.2/lib/src/error/src/error_strategy.dart +0 -902
- package/antlr4-4.9.2/lib/src/error/src/errors.dart +0 -204
- package/antlr4-4.9.2/lib/src/input_stream.dart +0 -335
- package/antlr4-4.9.2/lib/src/interval_set.dart +0 -735
- package/antlr4-4.9.2/lib/src/lexer.dart +0 -343
- package/antlr4-4.9.2/lib/src/ll1_analyzer.dart +0 -204
- package/antlr4-4.9.2/lib/src/misc/multi_map.dart +0 -32
- package/antlr4-4.9.2/lib/src/misc/pair.dart +0 -34
- package/antlr4-4.9.2/lib/src/parser.dart +0 -777
- package/antlr4-4.9.2/lib/src/parser_interpreter.dart +0 -393
- package/antlr4-4.9.2/lib/src/parser_rule_context.dart +0 -275
- package/antlr4-4.9.2/lib/src/prediction_context.dart +0 -877
- package/antlr4-4.9.2/lib/src/recognizer.dart +0 -182
- package/antlr4-4.9.2/lib/src/rule_context.dart +0 -192
- package/antlr4-4.9.2/lib/src/runtime_meta_data.dart +0 -188
- package/antlr4-4.9.2/lib/src/token.dart +0 -431
- package/antlr4-4.9.2/lib/src/token_factory.dart +0 -88
- package/antlr4-4.9.2/lib/src/token_source.dart +0 -241
- package/antlr4-4.9.2/lib/src/token_stream.dart +0 -627
- package/antlr4-4.9.2/lib/src/tree/src/pattern/chunk.dart +0 -90
- package/antlr4-4.9.2/lib/src/tree/src/pattern/parse_tree_match.dart +0 -635
- package/antlr4-4.9.2/lib/src/tree/src/tree.dart +0 -370
- package/antlr4-4.9.2/lib/src/tree/src/trees.dart +0 -226
- package/antlr4-4.9.2/lib/src/tree/tree.dart +0 -10
- package/antlr4-4.9.2/lib/src/util/bit_set.dart +0 -308
- package/antlr4-4.9.2/lib/src/util/murmur_hash.dart +0 -77
- package/antlr4-4.9.2/lib/src/util/utils.dart +0 -31
- package/antlr4-4.9.2/lib/src/vocabulary.dart +0 -254
- package/antlr4-4.9.2/pubspec.yaml +0 -13
- package/example/node_example/index.js +0 -8
- package/example/node_example/package.json +0 -12
- package/example/node_example_ts/package-lock.json +0 -70
- package/example/node_example_ts/package.json +0 -19
- package/example/node_example_ts/src/index.js +0 -5
- package/example/node_example_ts/src/index.ts +0 -9
- package/example/node_example_ts/tsconfig.json +0 -79
- package/js/dart2jsout.js +0 -25898
- package/js/dart2jsout.js.map +0 -16
- package/js/index.d.ts +0 -1
- package/js/index.js +0 -4
- package/js/out.js +0 -26023
- package/js/out.js.map +0 -16
- package/js/package-lock.json +0 -57
- package/js/preamble.js +0 -125
- package/lib/apexdocs_dart.dart +0 -28
- package/lib/src/antlr/grammars/Apexdoc/ApexdocLexer.g4 +0 -120
- package/lib/src/antlr/grammars/Apexdoc/ApexdocParser.g4 +0 -158
- package/lib/src/antlr/grammars/Apexdoc/gen/ApexdocLexer.interp +0 -95
- package/lib/src/antlr/grammars/Apexdoc/gen/ApexdocLexer.java +0 -238
- package/lib/src/antlr/grammars/Apexdoc/gen/ApexdocLexer.tokens +0 -23
- package/lib/src/antlr/grammars/apex/ApexLexer.g4 +0 -255
- package/lib/src/antlr/grammars/apex/ApexParser.g4 +0 -567
- package/lib/src/antlr/grammars/apex/examples/ApexClass.cls +0 -6
- package/lib/src/antlr/lib/apex/ApexLexer.dart +0 -1223
- package/lib/src/antlr/lib/apex/ApexLexer.interp +0 -393
- package/lib/src/antlr/lib/apex/ApexLexer.tokens +0 -212
- package/lib/src/antlr/lib/apex/ApexParser.dart +0 -9349
- package/lib/src/antlr/lib/apex/ApexParser.interp +0 -326
- package/lib/src/antlr/lib/apex/ApexParser.tokens +0 -212
- package/lib/src/antlr/lib/apex/ApexParserBaseListener.dart +0 -1036
- package/lib/src/antlr/lib/apex/ApexParserListener.dart +0 -975
- package/lib/src/antlr/lib/apexdoc/ApexdocLexer.dart +0 -373
- package/lib/src/antlr/lib/apexdoc/ApexdocLexer.interp +0 -95
- package/lib/src/antlr/lib/apexdoc/ApexdocLexer.tokens +0 -23
- package/lib/src/antlr/lib/apexdoc/ApexdocParser.dart +0 -2471
- package/lib/src/antlr/lib/apexdoc/ApexdocParser.interp +0 -69
- package/lib/src/antlr/lib/apexdoc/ApexdocParser.tokens +0 -23
- package/lib/src/antlr/lib/apexdoc/ApexdocParserBaseListener.dart +0 -252
- package/lib/src/antlr/lib/apexdoc/ApexdocParserListener.dart +0 -215
- package/lib/src/builders/builders.dart +0 -32
- package/lib/src/model/apex_file_manifest.dart +0 -37
- package/lib/src/model/apex_file_manifest.g.dart +0 -18
- package/lib/src/model/declaration.dart +0 -50
- package/lib/src/model/doc_comment.dart +0 -117
- package/lib/src/model/doc_comment.g.dart +0 -118
- package/lib/src/model/members.dart +0 -143
- package/lib/src/model/members.g.dart +0 -105
- package/lib/src/model/types.dart +0 -159
- package/lib/src/model/types.g.dart +0 -111
- package/lib/src/service/apex_listener.dart +0 -226
- package/lib/src/service/apexdoc_listener.dart +0 -82
- package/lib/src/service/parsers.dart +0 -33
- package/lib/src/service/utils/parsing/access_modifiers_parser.dart +0 -33
- package/lib/src/service/utils/parsing/parameters_parser.dart +0 -18
- package/lib/src/service/utils/parsing/parsing_utils.dart +0 -2
- package/lib/src/service/walker.dart +0 -82
- package/pubspec.yaml +0 -19
- package/test/apex_file_manifest_test.dart +0 -16
- package/test/apex_listener_test.dart +0 -703
- package/test/apexdoc_parser_test.dart +0 -179
- package/test/doc_comment_test.dart +0 -89
- package/test/members_serialization_test.dart +0 -158
- package/test/members_test.dart +0 -178
- package/test/types_serialization_test.dart +0 -191
- package/test/types_test.dart +0 -311
- package/test/walker_test.dart +0 -58
- package/tool/grind.dart +0 -20
|
@@ -1,343 +0,0 @@
|
|
|
1
|
-
/*
|
|
2
|
-
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
|
3
|
-
* Use of this file is governed by the BSD 3-clause license that
|
|
4
|
-
* can be found in the LICENSE.txt file in the project root.
|
|
5
|
-
*/
|
|
6
|
-
|
|
7
|
-
import 'dart:developer';
|
|
8
|
-
|
|
9
|
-
import 'atn/atn.dart';
|
|
10
|
-
import 'error/error.dart';
|
|
11
|
-
import 'input_stream.dart';
|
|
12
|
-
import 'interval_set.dart';
|
|
13
|
-
import 'misc/pair.dart';
|
|
14
|
-
import 'recognizer.dart';
|
|
15
|
-
import 'token.dart';
|
|
16
|
-
import 'token_factory.dart';
|
|
17
|
-
import 'token_source.dart';
|
|
18
|
-
import 'util/utils.dart';
|
|
19
|
-
|
|
20
|
-
abstract class Lexer extends Recognizer<LexerATNSimulator>
|
|
21
|
-
implements TokenSource {
|
|
22
|
-
static final DEFAULT_MODE = 0;
|
|
23
|
-
static final MORE = -2;
|
|
24
|
-
static final SKIP = -3;
|
|
25
|
-
|
|
26
|
-
static final DEFAULT_TOKEN_CHANNEL = Token.DEFAULT_CHANNEL;
|
|
27
|
-
static final HIDDEN = Token.HIDDEN_CHANNEL;
|
|
28
|
-
static final MIN_CHAR_VALUE = 0x0000;
|
|
29
|
-
static final MAX_CHAR_VALUE = 0x10FFFF;
|
|
30
|
-
|
|
31
|
-
CharStream _input;
|
|
32
|
-
|
|
33
|
-
Pair<TokenSource, CharStream> _tokenFactorySourcePair;
|
|
34
|
-
@override
|
|
35
|
-
TokenFactory tokenFactory = CommonTokenFactory.DEFAULT;
|
|
36
|
-
|
|
37
|
-
/// The goal of all lexer rules/methods is to create a token object.
|
|
38
|
-
/// this is an instance variable as multiple rules may collaborate to
|
|
39
|
-
/// create a single token. nextToken will return this object after
|
|
40
|
-
/// matching lexer rule(s). If you subclass to allow multiple token
|
|
41
|
-
/// emissions, then set this to the last token to be matched or
|
|
42
|
-
/// something nonnull so that the auto token emit mechanism will not
|
|
43
|
-
/// emit another token.
|
|
44
|
-
Token _token;
|
|
45
|
-
|
|
46
|
-
/// What character index in the stream did the current token start at?
|
|
47
|
-
/// Needed, for example, to get the text for current token. Set at
|
|
48
|
-
/// the start of nextToken.
|
|
49
|
-
int tokenStartCharIndex = -1;
|
|
50
|
-
|
|
51
|
-
/// The line on which the first character of the token resides
|
|
52
|
-
int tokenStartLine = -1;
|
|
53
|
-
|
|
54
|
-
/// The character position of first character within the line
|
|
55
|
-
int tokenStartCharPositionInLine = -1;
|
|
56
|
-
|
|
57
|
-
/// Once we see EOF on char stream, next token will be EOF.
|
|
58
|
-
/// If you have DONE : EOF ; then you see DONE EOF.
|
|
59
|
-
bool _hitEOF = false;
|
|
60
|
-
|
|
61
|
-
/// The channel number for the current token
|
|
62
|
-
int channel = Token.DEFAULT_CHANNEL;
|
|
63
|
-
|
|
64
|
-
/// The token type for the current token
|
|
65
|
-
int type = Token.INVALID_TYPE;
|
|
66
|
-
|
|
67
|
-
final List<int> _modeStack = [];
|
|
68
|
-
int mode_ = Lexer.DEFAULT_MODE;
|
|
69
|
-
|
|
70
|
-
/// You can set the text for the current token to override what is in
|
|
71
|
-
/// the input char buffer. Use setText() or can set this instance var.
|
|
72
|
-
String _text;
|
|
73
|
-
|
|
74
|
-
Lexer(CharStream input) {
|
|
75
|
-
_input = input;
|
|
76
|
-
_tokenFactorySourcePair = Pair(this, input);
|
|
77
|
-
}
|
|
78
|
-
|
|
79
|
-
void reset() {
|
|
80
|
-
// wack Lexer state variables
|
|
81
|
-
if (_input != null) {
|
|
82
|
-
_input.seek(0); // rewind the input
|
|
83
|
-
}
|
|
84
|
-
_token = null;
|
|
85
|
-
type = Token.INVALID_TYPE;
|
|
86
|
-
channel = Token.DEFAULT_CHANNEL;
|
|
87
|
-
tokenStartCharIndex = -1;
|
|
88
|
-
tokenStartCharPositionInLine = -1;
|
|
89
|
-
tokenStartLine = -1;
|
|
90
|
-
_text = null;
|
|
91
|
-
|
|
92
|
-
_hitEOF = false;
|
|
93
|
-
mode_ = Lexer.DEFAULT_MODE;
|
|
94
|
-
_modeStack.clear();
|
|
95
|
-
|
|
96
|
-
interpreter.reset();
|
|
97
|
-
}
|
|
98
|
-
|
|
99
|
-
/// Return a token from this source; i.e., match a token on the char stream.
|
|
100
|
-
@override
|
|
101
|
-
Token nextToken() {
|
|
102
|
-
if (_input == null) {
|
|
103
|
-
throw StateError('nextToken requires a non-null input stream.');
|
|
104
|
-
}
|
|
105
|
-
|
|
106
|
-
// Mark start location in char stream so unbuffered streams are
|
|
107
|
-
// guaranteed at least have text of current token
|
|
108
|
-
final tokenStartMarker = _input.mark();
|
|
109
|
-
try {
|
|
110
|
-
outer:
|
|
111
|
-
while (true) {
|
|
112
|
-
if (_hitEOF) {
|
|
113
|
-
emitEOF();
|
|
114
|
-
return _token;
|
|
115
|
-
}
|
|
116
|
-
|
|
117
|
-
_token = null;
|
|
118
|
-
channel = Token.DEFAULT_CHANNEL;
|
|
119
|
-
tokenStartCharIndex = _input.index;
|
|
120
|
-
tokenStartCharPositionInLine = interpreter.charPositionInLine;
|
|
121
|
-
tokenStartLine = interpreter.line;
|
|
122
|
-
_text = null;
|
|
123
|
-
do {
|
|
124
|
-
type = Token.INVALID_TYPE;
|
|
125
|
-
// System.out.println("nextToken line "+tokenStartLine+" at "+((char)input.LA(1))+
|
|
126
|
-
// " in mode "+mode+
|
|
127
|
-
// " at index "+input.index());
|
|
128
|
-
int ttype;
|
|
129
|
-
try {
|
|
130
|
-
ttype = interpreter.match(_input, mode_);
|
|
131
|
-
} on LexerNoViableAltException catch (e) {
|
|
132
|
-
notifyListeners(e); // report error
|
|
133
|
-
recover(e);
|
|
134
|
-
ttype = SKIP;
|
|
135
|
-
}
|
|
136
|
-
if (_input.LA(1) == IntStream.EOF) {
|
|
137
|
-
_hitEOF = true;
|
|
138
|
-
}
|
|
139
|
-
if (type == Token.INVALID_TYPE) type = ttype;
|
|
140
|
-
if (type == SKIP) {
|
|
141
|
-
continue outer;
|
|
142
|
-
}
|
|
143
|
-
} while (type == MORE);
|
|
144
|
-
if (_token == null) emit();
|
|
145
|
-
return _token;
|
|
146
|
-
}
|
|
147
|
-
} finally {
|
|
148
|
-
// make sure we release marker after match or
|
|
149
|
-
// unbuffered char stream will keep buffering
|
|
150
|
-
_input.release(tokenStartMarker);
|
|
151
|
-
}
|
|
152
|
-
}
|
|
153
|
-
|
|
154
|
-
/// Instruct the lexer to skip creating a token for current lexer rule
|
|
155
|
-
/// and look for another token. nextToken() knows to keep looking when
|
|
156
|
-
/// a lexer rule finishes with token set to SKIP_TOKEN. Recall that
|
|
157
|
-
/// if token==null at end of any token rule, it creates one for you
|
|
158
|
-
/// and emits it.
|
|
159
|
-
void skip() {
|
|
160
|
-
type = Lexer.SKIP;
|
|
161
|
-
}
|
|
162
|
-
|
|
163
|
-
void more() {
|
|
164
|
-
type = Lexer.MORE;
|
|
165
|
-
}
|
|
166
|
-
|
|
167
|
-
void mode(int m) {
|
|
168
|
-
mode_ = m;
|
|
169
|
-
}
|
|
170
|
-
|
|
171
|
-
void pushMode(int m) {
|
|
172
|
-
if (LexerATNSimulator.debug) {
|
|
173
|
-
log('pushMode $m');
|
|
174
|
-
}
|
|
175
|
-
_modeStack.add(mode_);
|
|
176
|
-
mode(m);
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
int popMode() {
|
|
180
|
-
if (_modeStack.isEmpty) throw StateError('');
|
|
181
|
-
if (LexerATNSimulator.debug) log('popMode back to ${_modeStack.last}');
|
|
182
|
-
mode(_modeStack.removeLast());
|
|
183
|
-
return mode_;
|
|
184
|
-
}
|
|
185
|
-
|
|
186
|
-
/// Set the char stream and reset the lexer
|
|
187
|
-
@override
|
|
188
|
-
set inputStream(IntStream input) {
|
|
189
|
-
_input = null;
|
|
190
|
-
_tokenFactorySourcePair =
|
|
191
|
-
Pair<TokenSource, CharStream>(this, _input);
|
|
192
|
-
reset();
|
|
193
|
-
_input = input;
|
|
194
|
-
_tokenFactorySourcePair =
|
|
195
|
-
Pair<TokenSource, CharStream>(this, _input);
|
|
196
|
-
}
|
|
197
|
-
|
|
198
|
-
@override
|
|
199
|
-
String get sourceName {
|
|
200
|
-
return _input.sourceName;
|
|
201
|
-
}
|
|
202
|
-
|
|
203
|
-
@override
|
|
204
|
-
CharStream get inputStream {
|
|
205
|
-
return _input;
|
|
206
|
-
}
|
|
207
|
-
|
|
208
|
-
/// By default does not support multiple emits per nextToken invocation
|
|
209
|
-
/// for efficiency reasons. Subclass and override this method, nextToken,
|
|
210
|
-
/// and getToken (to push tokens into a list and pull from that list
|
|
211
|
-
/// rather than a single variable as this implementation does).
|
|
212
|
-
void emitToken(Token token) {
|
|
213
|
-
//System.err.println("emit "+token);
|
|
214
|
-
_token = token;
|
|
215
|
-
}
|
|
216
|
-
|
|
217
|
-
/// The standard method called to automatically emit a token at the
|
|
218
|
-
/// outermost lexical rule. The token object should point into the
|
|
219
|
-
/// char buffer start..stop. If there is a text override in 'text',
|
|
220
|
-
/// use that to set the token's text. Override this method to emit
|
|
221
|
-
/// custom Token objects or provide a new factory.
|
|
222
|
-
Token emit() {
|
|
223
|
-
final t = tokenFactory.create(
|
|
224
|
-
type,
|
|
225
|
-
_text,
|
|
226
|
-
_tokenFactorySourcePair,
|
|
227
|
-
channel,
|
|
228
|
-
tokenStartCharIndex,
|
|
229
|
-
charIndex - 1,
|
|
230
|
-
tokenStartLine,
|
|
231
|
-
tokenStartCharPositionInLine);
|
|
232
|
-
emitToken(t);
|
|
233
|
-
return t;
|
|
234
|
-
}
|
|
235
|
-
|
|
236
|
-
Token emitEOF() {
|
|
237
|
-
final cpos = charPositionInLine;
|
|
238
|
-
final eof = tokenFactory.create(Token.EOF, null, _tokenFactorySourcePair,
|
|
239
|
-
Token.DEFAULT_CHANNEL, _input.index, _input.index - 1, line, cpos);
|
|
240
|
-
emitToken(eof);
|
|
241
|
-
return eof;
|
|
242
|
-
}
|
|
243
|
-
|
|
244
|
-
@override
|
|
245
|
-
int get charPositionInLine {
|
|
246
|
-
return interpreter.charPositionInLine;
|
|
247
|
-
}
|
|
248
|
-
|
|
249
|
-
@override
|
|
250
|
-
int get line {
|
|
251
|
-
return interpreter.line;
|
|
252
|
-
}
|
|
253
|
-
|
|
254
|
-
set line(int line) {
|
|
255
|
-
interpreter.line = line;
|
|
256
|
-
}
|
|
257
|
-
|
|
258
|
-
set charPositionInLine(int charPositionInLine) {
|
|
259
|
-
interpreter.charPositionInLine = charPositionInLine;
|
|
260
|
-
}
|
|
261
|
-
|
|
262
|
-
/// What is the index of the current character of lookahead?
|
|
263
|
-
int get charIndex {
|
|
264
|
-
return _input.index;
|
|
265
|
-
}
|
|
266
|
-
|
|
267
|
-
/// Return the text matched so far for the current token or any
|
|
268
|
-
/// text override.
|
|
269
|
-
String get text {
|
|
270
|
-
if (_text != null) {
|
|
271
|
-
return _text;
|
|
272
|
-
}
|
|
273
|
-
return interpreter.getText(_input);
|
|
274
|
-
}
|
|
275
|
-
|
|
276
|
-
/// Set the complete text of this token; it wipes any previous
|
|
277
|
-
/// changes to the text.
|
|
278
|
-
set text(String text) {
|
|
279
|
-
_text = text;
|
|
280
|
-
}
|
|
281
|
-
|
|
282
|
-
/// Override if emitting multiple tokens.
|
|
283
|
-
Token get token {
|
|
284
|
-
return _token;
|
|
285
|
-
}
|
|
286
|
-
|
|
287
|
-
void setToken(Token _token) {
|
|
288
|
-
this._token = _token;
|
|
289
|
-
}
|
|
290
|
-
|
|
291
|
-
List<String> get channelNames => null;
|
|
292
|
-
|
|
293
|
-
List<String> get modeNames => null;
|
|
294
|
-
|
|
295
|
-
/// Return a list of all Token objects in input char stream.
|
|
296
|
-
/// Forces load of all tokens. Does not include EOF token.
|
|
297
|
-
List<Token> get allTokens {
|
|
298
|
-
final tokens = <Token>[];
|
|
299
|
-
var t = nextToken();
|
|
300
|
-
while (t.type != Token.EOF) {
|
|
301
|
-
tokens.add(t);
|
|
302
|
-
t = nextToken();
|
|
303
|
-
}
|
|
304
|
-
return tokens;
|
|
305
|
-
}
|
|
306
|
-
|
|
307
|
-
void notifyListeners(LexerNoViableAltException e) {
|
|
308
|
-
final text =
|
|
309
|
-
_input.getText(Interval.of(tokenStartCharIndex, _input.index));
|
|
310
|
-
final msg = "token recognition error at: '" + getErrorDisplay(text) + "'";
|
|
311
|
-
|
|
312
|
-
final listener = errorListenerDispatch;
|
|
313
|
-
listener.syntaxError(
|
|
314
|
-
this, null, tokenStartLine, tokenStartCharPositionInLine, msg, e);
|
|
315
|
-
}
|
|
316
|
-
|
|
317
|
-
String getErrorDisplay(String s) {
|
|
318
|
-
return escapeWhitespace(s);
|
|
319
|
-
}
|
|
320
|
-
|
|
321
|
-
String getCharErrorDisplay(int c) {
|
|
322
|
-
final s = getErrorDisplay(String.fromCharCode(c));
|
|
323
|
-
return "'$s'";
|
|
324
|
-
}
|
|
325
|
-
|
|
326
|
-
/// Lexers can normally match any char in it's vocabulary after matching
|
|
327
|
-
/// a token, so do the easy thing and just kill a character and hope
|
|
328
|
-
/// it all works out. You can instead use the rule invocation stack
|
|
329
|
-
/// to do sophisticated error recovery if you are in a fragment rule.
|
|
330
|
-
void recover(RecognitionException re) {
|
|
331
|
-
if (re is LexerNoViableAltException) {
|
|
332
|
-
if (_input.LA(1) != IntStream.EOF) {
|
|
333
|
-
// skip a char and try again
|
|
334
|
-
interpreter.consume(_input);
|
|
335
|
-
}
|
|
336
|
-
} else {
|
|
337
|
-
//System.out.println("consuming char "+(char)input.LA(1)+" during recovery");
|
|
338
|
-
//re.printStackTrace();
|
|
339
|
-
// TODO: Do we lose character or line position information?
|
|
340
|
-
_input.consume();
|
|
341
|
-
}
|
|
342
|
-
}
|
|
343
|
-
}
|
|
@@ -1,204 +0,0 @@
|
|
|
1
|
-
/*
|
|
2
|
-
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
|
3
|
-
* Use of this file is governed by the BSD 3-clause license that
|
|
4
|
-
* can be found in the LICENSE.txt file in the project root.
|
|
5
|
-
*/
|
|
6
|
-
|
|
7
|
-
import './util/bit_set.dart';
|
|
8
|
-
import 'atn/atn.dart';
|
|
9
|
-
import 'interval_set.dart';
|
|
10
|
-
import 'prediction_context.dart';
|
|
11
|
-
import 'rule_context.dart';
|
|
12
|
-
import 'token.dart';
|
|
13
|
-
import 'util/bit_set.dart';
|
|
14
|
-
|
|
15
|
-
class LL1Analyzer {
|
|
16
|
-
/// Special value added to the lookahead sets to indicate that we hit
|
|
17
|
-
/// a predicate during analysis if {@code seeThruPreds==false}.
|
|
18
|
-
static final int HIT_PRED = Token.INVALID_TYPE;
|
|
19
|
-
|
|
20
|
-
final ATN atn;
|
|
21
|
-
|
|
22
|
-
LL1Analyzer(this.atn);
|
|
23
|
-
|
|
24
|
-
/// Calculates the SLL(1) expected lookahead set for each outgoing transition
|
|
25
|
-
/// of an [ATNState]. The returned array has one element for each
|
|
26
|
-
/// outgoing transition in [s]. If the closure from transition
|
|
27
|
-
/// <em>i</em> leads to a semantic predicate before matching a symbol, the
|
|
28
|
-
/// element at index <em>i</em> of the result will be null.
|
|
29
|
-
///
|
|
30
|
-
/// @param s the ATN state
|
|
31
|
-
/// @return the expected symbols for each outgoing transition of [s].
|
|
32
|
-
List<IntervalSet> getDecisionLookahead(ATNState s) {
|
|
33
|
-
// System.out.println("LOOK("+s.stateNumber+")");
|
|
34
|
-
if (s == null) {
|
|
35
|
-
return null;
|
|
36
|
-
}
|
|
37
|
-
|
|
38
|
-
final look = List<IntervalSet>(s.numberOfTransitions);
|
|
39
|
-
for (var alt = 0; alt < s.numberOfTransitions; alt++) {
|
|
40
|
-
look[alt] = IntervalSet();
|
|
41
|
-
final lookBusy = <ATNConfig>{};
|
|
42
|
-
final seeThruPreds = false; // fail to get lookahead upon pred
|
|
43
|
-
_LOOK(s.transition(alt).target, null, PredictionContext.EMPTY, look[alt],
|
|
44
|
-
lookBusy, BitSet(), seeThruPreds, false);
|
|
45
|
-
// Wipe out lookahead for this alternative if we found nothing
|
|
46
|
-
// or we had a predicate when we !seeThruPreds
|
|
47
|
-
if (look[alt].length == 0 || look[alt].contains(HIT_PRED)) {
|
|
48
|
-
look[alt] = null;
|
|
49
|
-
}
|
|
50
|
-
}
|
|
51
|
-
return look;
|
|
52
|
-
}
|
|
53
|
-
|
|
54
|
-
/// Compute set of tokens that can follow [s] in the ATN in the
|
|
55
|
-
/// specified [ctx].
|
|
56
|
-
///
|
|
57
|
-
/// <p>If [ctx] is null and the end of the rule containing
|
|
58
|
-
/// [s] is reached, {@link Token#EPSILON} is added to the result set.
|
|
59
|
-
/// If [ctx] is not null and the end of the outermost rule is
|
|
60
|
-
/// reached, {@link Token#EOF} is added to the result set.</p>
|
|
61
|
-
///
|
|
62
|
-
/// @param s the ATN state
|
|
63
|
-
/// @param stopState the ATN state to stop at. This can be a
|
|
64
|
-
/// [BlockEndState] to detect epsilon paths through a closure.
|
|
65
|
-
/// @param ctx the complete parser context, or null if the context
|
|
66
|
-
/// should be ignored
|
|
67
|
-
///
|
|
68
|
-
/// @return The set of tokens that can follow [s] in the ATN in the
|
|
69
|
-
/// specified [ctx].
|
|
70
|
-
|
|
71
|
-
IntervalSet LOOK(ATNState s, RuleContext ctx, [ATNState stopState]) {
|
|
72
|
-
final r = IntervalSet();
|
|
73
|
-
final seeThruPreds = true; // ignore preds; get all lookahead
|
|
74
|
-
final lookContext =
|
|
75
|
-
ctx != null ? PredictionContext.fromRuleContext(s.atn, ctx) : null;
|
|
76
|
-
_LOOK(
|
|
77
|
-
s, stopState, lookContext, r, <ATNConfig>{}, BitSet(), seeThruPreds, true);
|
|
78
|
-
return r;
|
|
79
|
-
}
|
|
80
|
-
|
|
81
|
-
/// Compute set of tokens that can follow [s] in the ATN in the
|
|
82
|
-
/// specified [ctx].
|
|
83
|
-
///
|
|
84
|
-
/// <p>If [ctx] is null and [stopState] or the end of the
|
|
85
|
-
/// rule containing [s] is reached, {@link Token#EPSILON} is added to
|
|
86
|
-
/// the result set. If [ctx] is not null and [addEOF] is
|
|
87
|
-
/// [true] and [stopState] or the end of the outermost rule is
|
|
88
|
-
/// reached, {@link Token#EOF} is added to the result set.</p>
|
|
89
|
-
///
|
|
90
|
-
/// @param s the ATN state.
|
|
91
|
-
/// @param stopState the ATN state to stop at. This can be a
|
|
92
|
-
/// [BlockEndState] to detect epsilon paths through a closure.
|
|
93
|
-
/// @param ctx The outer context, or null if the outer context should
|
|
94
|
-
/// not be used.
|
|
95
|
-
/// @param look The result lookahead set.
|
|
96
|
-
/// @param lookBusy A set used for preventing epsilon closures in the ATN
|
|
97
|
-
/// from causing a stack overflow. Outside code should pass
|
|
98
|
-
/// {@code new HashSet<ATNConfig>} for this argument.
|
|
99
|
-
/// @param calledRuleStack A set used for preventing left recursion in the
|
|
100
|
-
/// ATN from causing a stack overflow. Outside code should pass
|
|
101
|
-
/// {@code new BitSet()} for this argument.
|
|
102
|
-
/// @param seeThruPreds [true] to true semantic predicates as
|
|
103
|
-
/// implicitly [true] and "see through them", otherwise [false]
|
|
104
|
-
/// to treat semantic predicates as opaque and add {@link #HIT_PRED} to the
|
|
105
|
-
/// result if one is encountered.
|
|
106
|
-
/// @param addEOF Add {@link Token#EOF} to the result if the end of the
|
|
107
|
-
/// outermost context is reached. This parameter has no effect if [ctx]
|
|
108
|
-
/// is null.
|
|
109
|
-
void _LOOK(
|
|
110
|
-
ATNState s,
|
|
111
|
-
ATNState stopState,
|
|
112
|
-
PredictionContext ctx,
|
|
113
|
-
IntervalSet look,
|
|
114
|
-
Set<ATNConfig> lookBusy,
|
|
115
|
-
BitSet calledRuleStack,
|
|
116
|
-
bool seeThruPreds,
|
|
117
|
-
bool addEOF) {
|
|
118
|
-
// System.out.println("_LOOK("+s.stateNumber+", ctx="+ctx);
|
|
119
|
-
final c = ATNConfig(s, 0, ctx);
|
|
120
|
-
if (!lookBusy.add(c)) return;
|
|
121
|
-
|
|
122
|
-
if (s == stopState) {
|
|
123
|
-
if (ctx == null) {
|
|
124
|
-
look.addOne(Token.EPSILON);
|
|
125
|
-
return;
|
|
126
|
-
} else if (ctx.isEmpty && addEOF) {
|
|
127
|
-
look.addOne(Token.EOF);
|
|
128
|
-
return;
|
|
129
|
-
}
|
|
130
|
-
}
|
|
131
|
-
|
|
132
|
-
if (s is RuleStopState) {
|
|
133
|
-
if (ctx == null) {
|
|
134
|
-
look.addOne(Token.EPSILON);
|
|
135
|
-
return;
|
|
136
|
-
} else if (ctx.isEmpty && addEOF) {
|
|
137
|
-
look.addOne(Token.EOF);
|
|
138
|
-
return;
|
|
139
|
-
}
|
|
140
|
-
|
|
141
|
-
if (ctx != PredictionContext.EMPTY) {
|
|
142
|
-
// run thru all possible stack tops in ctx
|
|
143
|
-
final removed = calledRuleStack[s.ruleIndex];
|
|
144
|
-
try {
|
|
145
|
-
calledRuleStack.clear(s.ruleIndex);
|
|
146
|
-
for (var i = 0; i < ctx.length; i++) {
|
|
147
|
-
final returnState = atn.states[ctx.getReturnState(i)];
|
|
148
|
-
// System.out.println("popping back to "+retState);
|
|
149
|
-
_LOOK(returnState, stopState, ctx.getParent(i), look, lookBusy,
|
|
150
|
-
calledRuleStack, seeThruPreds, addEOF);
|
|
151
|
-
}
|
|
152
|
-
} finally {
|
|
153
|
-
if (removed) {
|
|
154
|
-
calledRuleStack.set(s.ruleIndex);
|
|
155
|
-
}
|
|
156
|
-
}
|
|
157
|
-
return;
|
|
158
|
-
}
|
|
159
|
-
}
|
|
160
|
-
|
|
161
|
-
for (var i = 0; i < s.numberOfTransitions; i++) {
|
|
162
|
-
final t = s.transition(i);
|
|
163
|
-
if (t is RuleTransition) {
|
|
164
|
-
if (calledRuleStack[t.target.ruleIndex]) {
|
|
165
|
-
continue;
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
PredictionContext newContext =
|
|
169
|
-
SingletonPredictionContext.create(ctx, t.followState.stateNumber);
|
|
170
|
-
|
|
171
|
-
try {
|
|
172
|
-
calledRuleStack.set(t.target.ruleIndex);
|
|
173
|
-
_LOOK(t.target, stopState, newContext, look, lookBusy,
|
|
174
|
-
calledRuleStack, seeThruPreds, addEOF);
|
|
175
|
-
} finally {
|
|
176
|
-
calledRuleStack.clear(t.target.ruleIndex);
|
|
177
|
-
}
|
|
178
|
-
} else if (t is AbstractPredicateTransition) {
|
|
179
|
-
if (seeThruPreds) {
|
|
180
|
-
_LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack,
|
|
181
|
-
seeThruPreds, addEOF);
|
|
182
|
-
} else {
|
|
183
|
-
look.addOne(HIT_PRED);
|
|
184
|
-
}
|
|
185
|
-
} else if (t.isEpsilon) {
|
|
186
|
-
_LOOK(t.target, stopState, ctx, look, lookBusy, calledRuleStack,
|
|
187
|
-
seeThruPreds, addEOF);
|
|
188
|
-
} else if (t is WildcardTransition) {
|
|
189
|
-
look.addAll(
|
|
190
|
-
IntervalSet.ofRange(Token.MIN_USER_TOKEN_TYPE, atn.maxTokenType));
|
|
191
|
-
} else {
|
|
192
|
-
// System.out.println("adding "+ t);
|
|
193
|
-
var set = t.label;
|
|
194
|
-
if (set != null) {
|
|
195
|
-
if (t is NotSetTransition) {
|
|
196
|
-
set = set.complement(IntervalSet.ofRange(
|
|
197
|
-
Token.MIN_USER_TOKEN_TYPE, atn.maxTokenType));
|
|
198
|
-
}
|
|
199
|
-
look.addAll(set);
|
|
200
|
-
}
|
|
201
|
-
}
|
|
202
|
-
}
|
|
203
|
-
}
|
|
204
|
-
}
|
|
@@ -1,32 +0,0 @@
|
|
|
1
|
-
/*
|
|
2
|
-
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
|
3
|
-
* Use of this file is governed by the BSD 3-clause license that
|
|
4
|
-
* can be found in the LICENSE.txt file in the project root.
|
|
5
|
-
*/
|
|
6
|
-
|
|
7
|
-
import 'package:collection/collection.dart';
|
|
8
|
-
|
|
9
|
-
import 'pair.dart';
|
|
10
|
-
|
|
11
|
-
class MultiMap<K, V> extends DelegatingMap<K, List<V>> {
|
|
12
|
-
MultiMap() : super({});
|
|
13
|
-
|
|
14
|
-
void put(K key, V value) {
|
|
15
|
-
var elementsForKey = this[key];
|
|
16
|
-
if (elementsForKey == null) {
|
|
17
|
-
elementsForKey = [];
|
|
18
|
-
this[key] = elementsForKey;
|
|
19
|
-
}
|
|
20
|
-
elementsForKey.add(value);
|
|
21
|
-
}
|
|
22
|
-
|
|
23
|
-
List<Pair<K, V>> get pairs {
|
|
24
|
-
final pairs = <Pair<K, V>>[];
|
|
25
|
-
for (var key in keys) {
|
|
26
|
-
for (var value in this[key]) {
|
|
27
|
-
pairs.add(Pair<K, V>(key, value));
|
|
28
|
-
}
|
|
29
|
-
}
|
|
30
|
-
return pairs;
|
|
31
|
-
}
|
|
32
|
-
}
|
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
/*
|
|
2
|
-
* Copyright (c) 2012-2017 The ANTLR Project. All rights reserved.
|
|
3
|
-
* Use of this file is governed by the BSD 3-clause license that
|
|
4
|
-
* can be found in the LICENSE.txt file in the project root.
|
|
5
|
-
*/
|
|
6
|
-
|
|
7
|
-
import '../util/murmur_hash.dart';
|
|
8
|
-
|
|
9
|
-
class Pair<A, B> {
|
|
10
|
-
final A a;
|
|
11
|
-
final B b;
|
|
12
|
-
|
|
13
|
-
const Pair(this.a, this.b);
|
|
14
|
-
|
|
15
|
-
@override
|
|
16
|
-
bool operator ==(other) {
|
|
17
|
-
return identical(this, other) || other is Pair && a == other.a && b == other.b;
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
@override
|
|
21
|
-
String toString() {
|
|
22
|
-
return '($a, $b)';
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
@override
|
|
26
|
-
int get hashCode {
|
|
27
|
-
MurmurHash.initialize();
|
|
28
|
-
|
|
29
|
-
var hash = MurmurHash.initialize();
|
|
30
|
-
hash = MurmurHash.update(hash, a);
|
|
31
|
-
hash = MurmurHash.update(hash, b);
|
|
32
|
-
return MurmurHash.finish(hash, 2);
|
|
33
|
-
}
|
|
34
|
-
}
|