@chaoswise/intl 1.0.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/bin/chaoswise-intl.js +5 -1
- package/bin/scripts/collect.js +38 -18
- package/bin/scripts/conf/default.js +40 -2
- package/bin/scripts/conf/getCustomConfig.js +8 -0
- package/bin/scripts/conf/index.js +2 -25
- package/bin/scripts/conf/initConfig.js +37 -0
- package/bin/scripts/initConfig.js +3 -0
- package/bin/scripts/service/index.js +2 -2
- package/bin/scripts/update.js +5 -5
- package/bin/scripts/util/FormPath/contexts.d.ts +10 -0
- package/bin/scripts/util/FormPath/contexts.js +23 -0
- package/bin/scripts/util/FormPath/destructor.d.ts +15 -0
- package/bin/scripts/util/FormPath/destructor.js +124 -0
- package/bin/scripts/util/FormPath/index.d.ts +49 -0
- package/bin/scripts/util/FormPath/index.js +536 -0
- package/bin/scripts/util/FormPath/lru.d.ts +1 -0
- package/bin/scripts/util/FormPath/lru.js +246 -0
- package/bin/scripts/util/FormPath/matcher.d.ts +33 -0
- package/bin/scripts/util/FormPath/matcher.js +216 -0
- package/bin/scripts/util/FormPath/parser.d.ts +28 -0
- package/bin/scripts/util/FormPath/parser.js +302 -0
- package/bin/scripts/util/FormPath/tokenizer.d.ts +26 -0
- package/bin/scripts/util/FormPath/tokenizer.js +280 -0
- package/bin/scripts/util/FormPath/tokens.d.ts +26 -0
- package/bin/scripts/util/FormPath/tokens.js +212 -0
- package/bin/scripts/util/FormPath/types.d.ts +76 -0
- package/bin/scripts/util/FormPath/types.js +17 -0
- package/bin/scripts/util/FormPath/utils.d.ts +10 -0
- package/bin/scripts/util/FormPath/utils.js +63 -0
- package/bin/scripts/util/downloadJson.js +10 -1
- package/bin/scripts/util/file.js +31 -0
- package/bin/scripts/util/getGroupName.js +15 -0
- package/bin/scripts/util/getWord.js +13 -0
- package/bin/scripts/util/log.js +25 -4
- package/bin/scripts/util/makeVisitorCollect.js +351 -46
- package/bin/scripts/util/makeVisitorUpdate.js +38 -1
- package/bin/scripts/util/specialMatch.js +14 -0
- package/bin/scripts/util/transformAst.js +30 -22
- package/bin/scripts/util/writeNewWordsFile.js +30 -0
- package/lib/index.js +2 -1
- package/package.json +7 -5
|
@@ -0,0 +1,302 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
var __extends = (this && this.__extends) || (function () {
|
|
3
|
+
var extendStatics = function (d, b) {
|
|
4
|
+
extendStatics = Object.setPrototypeOf ||
|
|
5
|
+
({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) ||
|
|
6
|
+
function (d, b) { for (var p in b) if (b.hasOwnProperty(p)) d[p] = b[p]; };
|
|
7
|
+
return extendStatics(d, b);
|
|
8
|
+
};
|
|
9
|
+
return function (d, b) {
|
|
10
|
+
extendStatics(d, b);
|
|
11
|
+
function __() { this.constructor = d; }
|
|
12
|
+
d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __());
|
|
13
|
+
};
|
|
14
|
+
})();
|
|
15
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
16
|
+
exports.Parser = void 0;
|
|
17
|
+
var tokenizer_1 = require("./tokenizer");
|
|
18
|
+
var tokens_1 = require("./tokens");
|
|
19
|
+
var contexts_1 = require("./contexts");
|
|
20
|
+
var destructor_1 = require("./destructor");
|
|
21
|
+
var Parser = (function (_super) {
|
|
22
|
+
__extends(Parser, _super);
|
|
23
|
+
function Parser() {
|
|
24
|
+
return _super !== null && _super.apply(this, arguments) || this;
|
|
25
|
+
}
|
|
26
|
+
Parser.prototype.parse = function () {
|
|
27
|
+
var node;
|
|
28
|
+
this.data = {
|
|
29
|
+
segments: []
|
|
30
|
+
};
|
|
31
|
+
if (!this.eat(tokens_1.eofTok)) {
|
|
32
|
+
this.next();
|
|
33
|
+
node = this.parseAtom(this.state.type);
|
|
34
|
+
}
|
|
35
|
+
this.data.tree = node;
|
|
36
|
+
return node;
|
|
37
|
+
};
|
|
38
|
+
Parser.prototype.append = function (parent, node) {
|
|
39
|
+
if (parent && node) {
|
|
40
|
+
parent.after = node;
|
|
41
|
+
}
|
|
42
|
+
};
|
|
43
|
+
Parser.prototype.parseAtom = function (type) {
|
|
44
|
+
switch (type) {
|
|
45
|
+
case tokens_1.braceLTok:
|
|
46
|
+
case tokens_1.bracketLTok:
|
|
47
|
+
if (this.includesContext(contexts_1.destructorContext)) {
|
|
48
|
+
if (type === tokens_1.braceLTok) {
|
|
49
|
+
return this.parseObjectPattern();
|
|
50
|
+
}
|
|
51
|
+
else {
|
|
52
|
+
return this.parseArrayPattern();
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
return this.parseDestructorExpression();
|
|
56
|
+
case tokens_1.nameTok:
|
|
57
|
+
return this.parseIdentifier();
|
|
58
|
+
case tokens_1.expandTok:
|
|
59
|
+
return this.parseExpandOperator();
|
|
60
|
+
case tokens_1.starTok:
|
|
61
|
+
return this.parseWildcardOperator();
|
|
62
|
+
case tokens_1.bracketDLTok:
|
|
63
|
+
return this.parseIgnoreExpression();
|
|
64
|
+
case tokens_1.dotTok:
|
|
65
|
+
return this.parseDotOperator();
|
|
66
|
+
}
|
|
67
|
+
};
|
|
68
|
+
Parser.prototype.pushSegments = function (key) {
|
|
69
|
+
this.data.segments.push(key);
|
|
70
|
+
};
|
|
71
|
+
Parser.prototype.parseIdentifier = function () {
|
|
72
|
+
var node = {
|
|
73
|
+
type: 'Identifier',
|
|
74
|
+
value: this.state.value
|
|
75
|
+
};
|
|
76
|
+
var hasNotInDestructor = !this.includesContext(contexts_1.destructorContext) &&
|
|
77
|
+
!this.isMatchPattern &&
|
|
78
|
+
!this.isWildMatchPattern;
|
|
79
|
+
this.next();
|
|
80
|
+
if (this.includesContext(contexts_1.bracketArrayContext)) {
|
|
81
|
+
if (this.state.type !== tokens_1.bracketRTok) {
|
|
82
|
+
throw this.unexpect();
|
|
83
|
+
}
|
|
84
|
+
else {
|
|
85
|
+
this.state.context.pop();
|
|
86
|
+
this.next();
|
|
87
|
+
}
|
|
88
|
+
}
|
|
89
|
+
else if (hasNotInDestructor) {
|
|
90
|
+
this.pushSegments(node.value);
|
|
91
|
+
}
|
|
92
|
+
if (this.state.type === tokens_1.bracketLTok) {
|
|
93
|
+
this.next();
|
|
94
|
+
if (this.state.type !== tokens_1.nameTok) {
|
|
95
|
+
throw this.unexpect();
|
|
96
|
+
}
|
|
97
|
+
this.state.context.push(contexts_1.bracketArrayContext);
|
|
98
|
+
var isNumberKey = false;
|
|
99
|
+
if (/^\d+$/.test(this.state.value)) {
|
|
100
|
+
isNumberKey = true;
|
|
101
|
+
}
|
|
102
|
+
var value = this.state.value;
|
|
103
|
+
this.pushSegments(isNumberKey ? Number(value) : value);
|
|
104
|
+
var after = this.parseAtom(this.state.type);
|
|
105
|
+
if (isNumberKey) {
|
|
106
|
+
after.arrayIndex = true;
|
|
107
|
+
}
|
|
108
|
+
this.append(node, after);
|
|
109
|
+
}
|
|
110
|
+
else {
|
|
111
|
+
this.append(node, this.parseAtom(this.state.type));
|
|
112
|
+
}
|
|
113
|
+
return node;
|
|
114
|
+
};
|
|
115
|
+
Parser.prototype.parseExpandOperator = function () {
|
|
116
|
+
var node = {
|
|
117
|
+
type: 'ExpandOperator'
|
|
118
|
+
};
|
|
119
|
+
this.isMatchPattern = true;
|
|
120
|
+
this.isWildMatchPattern = true;
|
|
121
|
+
this.data.segments = [];
|
|
122
|
+
this.next();
|
|
123
|
+
this.append(node, this.parseAtom(this.state.type));
|
|
124
|
+
return node;
|
|
125
|
+
};
|
|
126
|
+
Parser.prototype.parseWildcardOperator = function () {
|
|
127
|
+
var node = {
|
|
128
|
+
type: 'WildcardOperator'
|
|
129
|
+
};
|
|
130
|
+
this.isMatchPattern = true;
|
|
131
|
+
this.isWildMatchPattern = true;
|
|
132
|
+
this.data.segments = [];
|
|
133
|
+
this.next();
|
|
134
|
+
if (this.state.type === tokens_1.parenLTok) {
|
|
135
|
+
node.filter = this.parseGroupExpression(node);
|
|
136
|
+
}
|
|
137
|
+
else if (this.state.type === tokens_1.bracketLTok) {
|
|
138
|
+
node.filter = this.parseRangeExpression(node);
|
|
139
|
+
}
|
|
140
|
+
this.append(node, this.parseAtom(this.state.type));
|
|
141
|
+
return node;
|
|
142
|
+
};
|
|
143
|
+
Parser.prototype.parseDestructorExpression = function () {
|
|
144
|
+
var node = {
|
|
145
|
+
type: 'DestructorExpression'
|
|
146
|
+
};
|
|
147
|
+
this.state.context.push(contexts_1.destructorContext);
|
|
148
|
+
var startPos = this.state.pos - 1;
|
|
149
|
+
node.value =
|
|
150
|
+
this.state.type === tokens_1.braceLTok
|
|
151
|
+
? this.parseObjectPattern()
|
|
152
|
+
: this.parseArrayPattern();
|
|
153
|
+
var endPos = this.state.pos;
|
|
154
|
+
this.state.context.pop();
|
|
155
|
+
this.next();
|
|
156
|
+
this.append(node, this.parseAtom(this.state.type));
|
|
157
|
+
node.source = this.input.substring(startPos, endPos).replace(/\s*/g, '');
|
|
158
|
+
destructor_1.setDestructor(node.source, destructor_1.parseDestructorRules(node));
|
|
159
|
+
this.pushSegments(node.source);
|
|
160
|
+
return node;
|
|
161
|
+
};
|
|
162
|
+
Parser.prototype.parseArrayPattern = function () {
|
|
163
|
+
var node = {
|
|
164
|
+
type: 'ArrayPattern',
|
|
165
|
+
elements: []
|
|
166
|
+
};
|
|
167
|
+
this.next();
|
|
168
|
+
node.elements = this.parseArrayPatternElements();
|
|
169
|
+
return node;
|
|
170
|
+
};
|
|
171
|
+
Parser.prototype.parseArrayPatternElements = function () {
|
|
172
|
+
var nodes = [];
|
|
173
|
+
while (this.state.type !== tokens_1.bracketRTok && this.state.type !== tokens_1.eofTok) {
|
|
174
|
+
nodes.push(this.parseAtom(this.state.type));
|
|
175
|
+
if (this.state.type === tokens_1.bracketRTok) {
|
|
176
|
+
this.next();
|
|
177
|
+
break;
|
|
178
|
+
}
|
|
179
|
+
this.next();
|
|
180
|
+
}
|
|
181
|
+
return nodes;
|
|
182
|
+
};
|
|
183
|
+
Parser.prototype.parseObjectPattern = function () {
|
|
184
|
+
var node = {
|
|
185
|
+
type: 'ObjectPattern',
|
|
186
|
+
properties: []
|
|
187
|
+
};
|
|
188
|
+
this.next();
|
|
189
|
+
node.properties = this.parseObjectProperties();
|
|
190
|
+
return node;
|
|
191
|
+
};
|
|
192
|
+
Parser.prototype.parseObjectProperties = function () {
|
|
193
|
+
var nodes = [];
|
|
194
|
+
while (this.state.type !== tokens_1.braceRTok && this.state.type !== tokens_1.eofTok) {
|
|
195
|
+
var node = {
|
|
196
|
+
type: 'ObjectPatternProperty',
|
|
197
|
+
key: this.parseAtom(this.state.type)
|
|
198
|
+
};
|
|
199
|
+
nodes.push(node);
|
|
200
|
+
if (this.state.type === tokens_1.colonTok) {
|
|
201
|
+
this.next();
|
|
202
|
+
node.value = this.parseAtom(this.state.type);
|
|
203
|
+
}
|
|
204
|
+
if (this.state.type === tokens_1.braceRTok) {
|
|
205
|
+
this.next();
|
|
206
|
+
break;
|
|
207
|
+
}
|
|
208
|
+
this.next();
|
|
209
|
+
}
|
|
210
|
+
return nodes;
|
|
211
|
+
};
|
|
212
|
+
Parser.prototype.parseDotOperator = function () {
|
|
213
|
+
var node = {
|
|
214
|
+
type: 'DotOperator'
|
|
215
|
+
};
|
|
216
|
+
this.next();
|
|
217
|
+
this.append(node, this.parseAtom(this.state.type));
|
|
218
|
+
return node;
|
|
219
|
+
};
|
|
220
|
+
Parser.prototype.parseIgnoreExpression = function () {
|
|
221
|
+
this.next();
|
|
222
|
+
var value = String(this.state.value).replace(/\s*/g, '');
|
|
223
|
+
var node = {
|
|
224
|
+
type: 'IgnoreExpression',
|
|
225
|
+
value: value
|
|
226
|
+
};
|
|
227
|
+
this.pushSegments(value);
|
|
228
|
+
this.next();
|
|
229
|
+
this.append(node, this.parseAtom(this.state.type));
|
|
230
|
+
this.next();
|
|
231
|
+
return node;
|
|
232
|
+
};
|
|
233
|
+
Parser.prototype.parseGroupExpression = function (parent) {
|
|
234
|
+
var node = {
|
|
235
|
+
type: 'GroupExpression',
|
|
236
|
+
value: []
|
|
237
|
+
};
|
|
238
|
+
this.isMatchPattern = true;
|
|
239
|
+
this.data.segments = [];
|
|
240
|
+
this.next();
|
|
241
|
+
loop: while (true) {
|
|
242
|
+
switch (this.state.type) {
|
|
243
|
+
case tokens_1.commaTok:
|
|
244
|
+
this.next();
|
|
245
|
+
break;
|
|
246
|
+
case tokens_1.bangTok:
|
|
247
|
+
node.isExclude = true;
|
|
248
|
+
this.haveExcludePattern = true;
|
|
249
|
+
this.next();
|
|
250
|
+
break;
|
|
251
|
+
case tokens_1.eofTok:
|
|
252
|
+
break loop;
|
|
253
|
+
case tokens_1.parenRTok:
|
|
254
|
+
break loop;
|
|
255
|
+
default:
|
|
256
|
+
node.value.push(this.parseAtom(this.state.type));
|
|
257
|
+
}
|
|
258
|
+
}
|
|
259
|
+
this.next();
|
|
260
|
+
this.append(parent, this.parseAtom(this.state.type));
|
|
261
|
+
return node;
|
|
262
|
+
};
|
|
263
|
+
Parser.prototype.parseRangeExpression = function (parent) {
|
|
264
|
+
var node = {
|
|
265
|
+
type: 'RangeExpression'
|
|
266
|
+
};
|
|
267
|
+
this.next();
|
|
268
|
+
this.isMatchPattern = true;
|
|
269
|
+
this.data.segments = [];
|
|
270
|
+
var start = false, hasColon = false;
|
|
271
|
+
loop: while (true) {
|
|
272
|
+
switch (this.state.type) {
|
|
273
|
+
case tokens_1.colonTok:
|
|
274
|
+
hasColon = true;
|
|
275
|
+
start = true;
|
|
276
|
+
this.next();
|
|
277
|
+
break;
|
|
278
|
+
case tokens_1.bracketRTok:
|
|
279
|
+
if (!hasColon && !node.end) {
|
|
280
|
+
node.end = node.start;
|
|
281
|
+
}
|
|
282
|
+
break loop;
|
|
283
|
+
case tokens_1.commaTok:
|
|
284
|
+
throw this.unexpect();
|
|
285
|
+
case tokens_1.eofTok:
|
|
286
|
+
break loop;
|
|
287
|
+
default:
|
|
288
|
+
if (!start) {
|
|
289
|
+
node.start = this.parseAtom(this.state.type);
|
|
290
|
+
}
|
|
291
|
+
else {
|
|
292
|
+
node.end = this.parseAtom(this.state.type);
|
|
293
|
+
}
|
|
294
|
+
}
|
|
295
|
+
}
|
|
296
|
+
this.next();
|
|
297
|
+
this.append(parent, this.parseAtom(this.state.type));
|
|
298
|
+
return node;
|
|
299
|
+
};
|
|
300
|
+
return Parser;
|
|
301
|
+
}(tokenizer_1.Tokenizer));
|
|
302
|
+
exports.Parser = Parser;
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import { Token } from './tokens';
|
|
2
|
+
import { Context } from './contexts';
|
|
3
|
+
export declare class Tokenizer {
|
|
4
|
+
input: string;
|
|
5
|
+
state: {
|
|
6
|
+
context: Context[];
|
|
7
|
+
type: Token;
|
|
8
|
+
pos: number;
|
|
9
|
+
value?: any;
|
|
10
|
+
};
|
|
11
|
+
constructor(input: string);
|
|
12
|
+
curContext(): Context;
|
|
13
|
+
includesContext(context: Context): boolean;
|
|
14
|
+
unexpect(type?: Token): Error;
|
|
15
|
+
expectNext(type?: Token, next?: Token): void;
|
|
16
|
+
expectPrev(type?: Token, prev?: Token): void;
|
|
17
|
+
match(type?: Token): boolean;
|
|
18
|
+
skipSpace(): void;
|
|
19
|
+
next(): void;
|
|
20
|
+
getCode(pos?: number): number;
|
|
21
|
+
eat(type: any): boolean;
|
|
22
|
+
readKeyWord(): void;
|
|
23
|
+
readIngoreString(): void;
|
|
24
|
+
finishToken(type: Token, value?: any): void;
|
|
25
|
+
readToken(code: number, prevCode: number): void;
|
|
26
|
+
}
|
|
@@ -0,0 +1,280 @@
|
|
|
1
|
+
"use strict";
|
|
2
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
3
|
+
exports.Tokenizer = void 0;
|
|
4
|
+
var tokens_1 = require("./tokens");
|
|
5
|
+
var contexts_1 = require("./contexts");
|
|
6
|
+
var nonASCIIwhitespace = /[\u1680\u180e\u2000-\u200a\u202f\u205f\u3000\ufeff]/;
|
|
7
|
+
var fullCharCodeAtPos = function (input, pos) {
|
|
8
|
+
var code = input.charCodeAt(pos);
|
|
9
|
+
if (code <= 0xd7ff || code >= 0xe000)
|
|
10
|
+
return code;
|
|
11
|
+
var next = input.charCodeAt(pos + 1);
|
|
12
|
+
return (code << 10) + next - 0x35fdc00;
|
|
13
|
+
};
|
|
14
|
+
var isRewordCode = function (code) {
|
|
15
|
+
return code === 42 ||
|
|
16
|
+
code === 46 ||
|
|
17
|
+
code === 33 ||
|
|
18
|
+
code === 91 ||
|
|
19
|
+
code === 93 ||
|
|
20
|
+
code === 40 ||
|
|
21
|
+
code === 41 ||
|
|
22
|
+
code === 44 ||
|
|
23
|
+
code === 58 ||
|
|
24
|
+
code === 126 ||
|
|
25
|
+
code === 123 ||
|
|
26
|
+
code === 125;
|
|
27
|
+
};
|
|
28
|
+
var getError = function (message, props) {
|
|
29
|
+
var err = new Error(message);
|
|
30
|
+
Object.assign(err, props);
|
|
31
|
+
return err;
|
|
32
|
+
};
|
|
33
|
+
var slice = function (string, start, end) {
|
|
34
|
+
var str = '';
|
|
35
|
+
for (var i = start; i < end; i++) {
|
|
36
|
+
var ch = string.charAt(i);
|
|
37
|
+
if (ch !== '\\') {
|
|
38
|
+
str += ch;
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
return str;
|
|
42
|
+
};
|
|
43
|
+
var Tokenizer = (function () {
|
|
44
|
+
function Tokenizer(input) {
|
|
45
|
+
this.input = input;
|
|
46
|
+
this.state = {
|
|
47
|
+
context: [],
|
|
48
|
+
type: null,
|
|
49
|
+
pos: 0
|
|
50
|
+
};
|
|
51
|
+
}
|
|
52
|
+
Tokenizer.prototype.curContext = function () {
|
|
53
|
+
return this.state.context[this.state.context.length - 1];
|
|
54
|
+
};
|
|
55
|
+
Tokenizer.prototype.includesContext = function (context) {
|
|
56
|
+
for (var len = this.state.context.length - 1; len >= 0; len--) {
|
|
57
|
+
if (this.state.context[len] === context) {
|
|
58
|
+
return true;
|
|
59
|
+
}
|
|
60
|
+
}
|
|
61
|
+
return false;
|
|
62
|
+
};
|
|
63
|
+
Tokenizer.prototype.unexpect = function (type) {
|
|
64
|
+
type = type || this.state.type;
|
|
65
|
+
return getError("Unexpect token \"" + type.flag + "\" in " + this.state.pos + " char.", {
|
|
66
|
+
pos: this.state.pos
|
|
67
|
+
});
|
|
68
|
+
};
|
|
69
|
+
Tokenizer.prototype.expectNext = function (type, next) {
|
|
70
|
+
if (type && type.expectNext) {
|
|
71
|
+
if (next && !type.expectNext.call(this, next)) {
|
|
72
|
+
throw getError("Unexpect token \"" + next.flag + "\" token should not be behind \"" + type.flag + "\" token.(" + this.state.pos + "th char)", {
|
|
73
|
+
pos: this.state.pos
|
|
74
|
+
});
|
|
75
|
+
}
|
|
76
|
+
}
|
|
77
|
+
};
|
|
78
|
+
Tokenizer.prototype.expectPrev = function (type, prev) {
|
|
79
|
+
if (type && type.expectPrev) {
|
|
80
|
+
if (prev && !type.expectPrev.call(this, prev)) {
|
|
81
|
+
throw getError("Unexpect token \"" + type.flag + "\" should not be behind \"" + prev.flag + "\"(" + this.state.pos + "th char).", {
|
|
82
|
+
pos: this.state.pos
|
|
83
|
+
});
|
|
84
|
+
}
|
|
85
|
+
}
|
|
86
|
+
};
|
|
87
|
+
Tokenizer.prototype.match = function (type) {
|
|
88
|
+
return this.state.type === type;
|
|
89
|
+
};
|
|
90
|
+
Tokenizer.prototype.skipSpace = function () {
|
|
91
|
+
if (this.curContext() === contexts_1.bracketDContext)
|
|
92
|
+
return;
|
|
93
|
+
loop: while (this.state.pos < this.input.length) {
|
|
94
|
+
var ch = this.input.charCodeAt(this.state.pos);
|
|
95
|
+
switch (ch) {
|
|
96
|
+
case 32:
|
|
97
|
+
case 160:
|
|
98
|
+
++this.state.pos;
|
|
99
|
+
break;
|
|
100
|
+
case 13:
|
|
101
|
+
if (this.input.charCodeAt(this.state.pos + 1) === 10) {
|
|
102
|
+
++this.state.pos;
|
|
103
|
+
}
|
|
104
|
+
case 10:
|
|
105
|
+
case 8232:
|
|
106
|
+
case 8233:
|
|
107
|
+
++this.state.pos;
|
|
108
|
+
break;
|
|
109
|
+
default:
|
|
110
|
+
if ((ch > 8 && ch < 14) ||
|
|
111
|
+
(ch >= 5760 && nonASCIIwhitespace.test(String.fromCharCode(ch)))) {
|
|
112
|
+
++this.state.pos;
|
|
113
|
+
}
|
|
114
|
+
else {
|
|
115
|
+
break loop;
|
|
116
|
+
}
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
};
|
|
120
|
+
Tokenizer.prototype.next = function () {
|
|
121
|
+
if (this.input.length <= this.state.pos) {
|
|
122
|
+
return this.finishToken(tokens_1.eofTok);
|
|
123
|
+
}
|
|
124
|
+
this.skipSpace();
|
|
125
|
+
this.readToken(this.getCode(), this.state.pos > 0 ? this.getCode(this.state.pos - 1) : -Infinity);
|
|
126
|
+
};
|
|
127
|
+
Tokenizer.prototype.getCode = function (pos) {
|
|
128
|
+
if (pos === void 0) { pos = this.state.pos; }
|
|
129
|
+
return fullCharCodeAtPos(this.input, pos);
|
|
130
|
+
};
|
|
131
|
+
Tokenizer.prototype.eat = function (type) {
|
|
132
|
+
if (this.match(type)) {
|
|
133
|
+
this.next();
|
|
134
|
+
return true;
|
|
135
|
+
}
|
|
136
|
+
else {
|
|
137
|
+
return false;
|
|
138
|
+
}
|
|
139
|
+
};
|
|
140
|
+
Tokenizer.prototype.readKeyWord = function () {
|
|
141
|
+
var startPos = this.state.pos, string = '';
|
|
142
|
+
while (true) {
|
|
143
|
+
var code = this.getCode();
|
|
144
|
+
var prevCode = this.getCode(this.state.pos - 1);
|
|
145
|
+
if (this.input.length === this.state.pos) {
|
|
146
|
+
string = slice(this.input, startPos, this.state.pos + 1);
|
|
147
|
+
break;
|
|
148
|
+
}
|
|
149
|
+
if (!isRewordCode(code) || prevCode === 92) {
|
|
150
|
+
if (code === 32 ||
|
|
151
|
+
code === 160 ||
|
|
152
|
+
code === 10 ||
|
|
153
|
+
code === 8232 ||
|
|
154
|
+
code === 8233) {
|
|
155
|
+
string = slice(this.input, startPos, this.state.pos);
|
|
156
|
+
break;
|
|
157
|
+
}
|
|
158
|
+
if (code === 13 && this.input.charCodeAt(this.state.pos + 1) === 10) {
|
|
159
|
+
string = slice(this.input, startPos, this.state.pos);
|
|
160
|
+
break;
|
|
161
|
+
}
|
|
162
|
+
if ((code > 8 && code < 14) ||
|
|
163
|
+
(code >= 5760 && nonASCIIwhitespace.test(String.fromCharCode(code)))) {
|
|
164
|
+
string = slice(this.input, startPos, this.state.pos);
|
|
165
|
+
break;
|
|
166
|
+
}
|
|
167
|
+
this.state.pos++;
|
|
168
|
+
}
|
|
169
|
+
else {
|
|
170
|
+
string = slice(this.input, startPos, this.state.pos);
|
|
171
|
+
break;
|
|
172
|
+
}
|
|
173
|
+
}
|
|
174
|
+
this.finishToken(tokens_1.nameTok, string);
|
|
175
|
+
};
|
|
176
|
+
Tokenizer.prototype.readIngoreString = function () {
|
|
177
|
+
var startPos = this.state.pos, prevCode, string = '';
|
|
178
|
+
while (true) {
|
|
179
|
+
var code = this.getCode();
|
|
180
|
+
if (this.state.pos >= this.input.length)
|
|
181
|
+
break;
|
|
182
|
+
if ((code === 91 || code === 93) && prevCode === 92) {
|
|
183
|
+
this.state.pos++;
|
|
184
|
+
prevCode = '';
|
|
185
|
+
}
|
|
186
|
+
else if (code == 93 && prevCode === 93) {
|
|
187
|
+
string = this.input
|
|
188
|
+
.slice(startPos, this.state.pos - 1)
|
|
189
|
+
.replace(/\\([\[\]])/g, '$1');
|
|
190
|
+
this.state.pos++;
|
|
191
|
+
break;
|
|
192
|
+
}
|
|
193
|
+
else {
|
|
194
|
+
this.state.pos++;
|
|
195
|
+
prevCode = code;
|
|
196
|
+
}
|
|
197
|
+
}
|
|
198
|
+
this.finishToken(tokens_1.ignoreTok, string);
|
|
199
|
+
this.finishToken(tokens_1.bracketDRTok);
|
|
200
|
+
};
|
|
201
|
+
Tokenizer.prototype.finishToken = function (type, value) {
|
|
202
|
+
var preType = this.state.type;
|
|
203
|
+
this.state.type = type;
|
|
204
|
+
if (value !== undefined)
|
|
205
|
+
this.state.value = value;
|
|
206
|
+
this.expectNext(preType, type);
|
|
207
|
+
this.expectPrev(type, preType);
|
|
208
|
+
if (type.updateContext) {
|
|
209
|
+
type.updateContext.call(this, preType);
|
|
210
|
+
}
|
|
211
|
+
};
|
|
212
|
+
Tokenizer.prototype.readToken = function (code, prevCode) {
|
|
213
|
+
if (prevCode === 92) {
|
|
214
|
+
return this.readKeyWord();
|
|
215
|
+
}
|
|
216
|
+
if (this.input.length <= this.state.pos) {
|
|
217
|
+
this.finishToken(tokens_1.eofTok);
|
|
218
|
+
}
|
|
219
|
+
else if (this.curContext() === contexts_1.bracketDContext) {
|
|
220
|
+
this.readIngoreString();
|
|
221
|
+
}
|
|
222
|
+
else if (code === 123) {
|
|
223
|
+
this.state.pos++;
|
|
224
|
+
this.finishToken(tokens_1.braceLTok);
|
|
225
|
+
}
|
|
226
|
+
else if (code === 125) {
|
|
227
|
+
this.state.pos++;
|
|
228
|
+
this.finishToken(tokens_1.braceRTok);
|
|
229
|
+
}
|
|
230
|
+
else if (code === 42) {
|
|
231
|
+
this.state.pos++;
|
|
232
|
+
this.finishToken(tokens_1.starTok);
|
|
233
|
+
}
|
|
234
|
+
else if (code === 33) {
|
|
235
|
+
this.state.pos++;
|
|
236
|
+
this.finishToken(tokens_1.bangTok);
|
|
237
|
+
}
|
|
238
|
+
else if (code === 46) {
|
|
239
|
+
this.state.pos++;
|
|
240
|
+
this.finishToken(tokens_1.dotTok);
|
|
241
|
+
}
|
|
242
|
+
else if (code === 91) {
|
|
243
|
+
this.state.pos++;
|
|
244
|
+
if (this.getCode() === 91) {
|
|
245
|
+
this.state.pos++;
|
|
246
|
+
return this.finishToken(tokens_1.bracketDLTok);
|
|
247
|
+
}
|
|
248
|
+
this.finishToken(tokens_1.bracketLTok);
|
|
249
|
+
}
|
|
250
|
+
else if (code === 126) {
|
|
251
|
+
this.state.pos++;
|
|
252
|
+
this.finishToken(tokens_1.expandTok);
|
|
253
|
+
}
|
|
254
|
+
else if (code === 93) {
|
|
255
|
+
this.state.pos++;
|
|
256
|
+
this.finishToken(tokens_1.bracketRTok);
|
|
257
|
+
}
|
|
258
|
+
else if (code === 40) {
|
|
259
|
+
this.state.pos++;
|
|
260
|
+
this.finishToken(tokens_1.parenLTok);
|
|
261
|
+
}
|
|
262
|
+
else if (code === 41) {
|
|
263
|
+
this.state.pos++;
|
|
264
|
+
this.finishToken(tokens_1.parenRTok);
|
|
265
|
+
}
|
|
266
|
+
else if (code === 44) {
|
|
267
|
+
this.state.pos++;
|
|
268
|
+
this.finishToken(tokens_1.commaTok);
|
|
269
|
+
}
|
|
270
|
+
else if (code === 58) {
|
|
271
|
+
this.state.pos++;
|
|
272
|
+
this.finishToken(tokens_1.colonTok);
|
|
273
|
+
}
|
|
274
|
+
else {
|
|
275
|
+
this.readKeyWord();
|
|
276
|
+
}
|
|
277
|
+
};
|
|
278
|
+
return Tokenizer;
|
|
279
|
+
}());
|
|
280
|
+
exports.Tokenizer = Tokenizer;
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
interface ITokenProps {
|
|
2
|
+
expectNext?: (next?: Token) => boolean;
|
|
3
|
+
expectPrev?: (prev?: Token) => boolean;
|
|
4
|
+
updateContext?: (prev?: Token) => void;
|
|
5
|
+
}
|
|
6
|
+
export declare type Token = ITokenProps & {
|
|
7
|
+
flag: string;
|
|
8
|
+
};
|
|
9
|
+
export declare const nameTok: Token;
|
|
10
|
+
export declare const starTok: Token;
|
|
11
|
+
export declare const dotTok: Token;
|
|
12
|
+
export declare const bangTok: Token;
|
|
13
|
+
export declare const colonTok: Token;
|
|
14
|
+
export declare const braceLTok: Token;
|
|
15
|
+
export declare const braceRTok: Token;
|
|
16
|
+
export declare const bracketLTok: Token;
|
|
17
|
+
export declare const bracketRTok: Token;
|
|
18
|
+
export declare const bracketDLTok: Token;
|
|
19
|
+
export declare const bracketDRTok: Token;
|
|
20
|
+
export declare const parenLTok: Token;
|
|
21
|
+
export declare const parenRTok: Token;
|
|
22
|
+
export declare const commaTok: Token;
|
|
23
|
+
export declare const ignoreTok: Token;
|
|
24
|
+
export declare const expandTok: Token;
|
|
25
|
+
export declare const eofTok: Token;
|
|
26
|
+
export {};
|