ts-highlight 0.2.0 → 0.4.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.d.ts +106 -0
- package/dist/index.js +1 -0
- package/package.json +6 -2
- package/.github/workflows/ci.yaml +0 -29
- package/assets/marigolds.webp +0 -0
- package/biome.json +0 -30
- package/rollup.config.js +0 -34
- package/src/__tests__/bench/tokenizerBench.ts +0 -0
- package/src/__tests__/tokenizer.test.ts +0 -59
- package/src/generator/constants.ts +0 -25
- package/src/generator/generate.ts +0 -214
- package/src/generator/index.ts +0 -2
- package/src/generator/types.ts +0 -75
- package/src/highlight/highlight.ts +0 -54
- package/src/highlight/index.ts +0 -1
- package/src/index.ts +0 -2
- package/src/tokenizer/constants.ts +0 -188
- package/src/tokenizer/index.ts +0 -2
- package/src/tokenizer/tokenize.ts +0 -304
- package/src/tokenizer/types.ts +0 -197
- package/tsconfig.json +0 -28
|
@@ -1,188 +0,0 @@
|
|
|
1
|
-
import type {
|
|
2
|
-
TokenSet,
|
|
3
|
-
SingleOperators,
|
|
4
|
-
DoubleOperators,
|
|
5
|
-
TripleOperators,
|
|
6
|
-
QuadrupleOperator,
|
|
7
|
-
IdentifierLikeMap,
|
|
8
|
-
} from './types';
|
|
9
|
-
|
|
10
|
-
// operators
|
|
11
|
-
|
|
12
|
-
const singleOperatorsInit: SingleOperators = [
|
|
13
|
-
'=',
|
|
14
|
-
'+',
|
|
15
|
-
'-',
|
|
16
|
-
'*',
|
|
17
|
-
'/',
|
|
18
|
-
'%',
|
|
19
|
-
'~',
|
|
20
|
-
'^',
|
|
21
|
-
'.',
|
|
22
|
-
':',
|
|
23
|
-
'|',
|
|
24
|
-
'&',
|
|
25
|
-
'?',
|
|
26
|
-
'!',
|
|
27
|
-
'<',
|
|
28
|
-
'>',
|
|
29
|
-
'{',
|
|
30
|
-
'}',
|
|
31
|
-
'[',
|
|
32
|
-
']',
|
|
33
|
-
'(',
|
|
34
|
-
')',
|
|
35
|
-
';',
|
|
36
|
-
',',
|
|
37
|
-
];
|
|
38
|
-
const doubleOperatorsInit: DoubleOperators = [
|
|
39
|
-
'==',
|
|
40
|
-
'!=',
|
|
41
|
-
|
|
42
|
-
'<=',
|
|
43
|
-
'>=',
|
|
44
|
-
|
|
45
|
-
'++',
|
|
46
|
-
'--',
|
|
47
|
-
'**',
|
|
48
|
-
|
|
49
|
-
'*=',
|
|
50
|
-
'/=',
|
|
51
|
-
'%=',
|
|
52
|
-
|
|
53
|
-
'^=',
|
|
54
|
-
'&=',
|
|
55
|
-
'|=',
|
|
56
|
-
|
|
57
|
-
'&&',
|
|
58
|
-
'||',
|
|
59
|
-
'??',
|
|
60
|
-
];
|
|
61
|
-
const tripleOperatorsInit: TripleOperators = [
|
|
62
|
-
'===',
|
|
63
|
-
'!==',
|
|
64
|
-
|
|
65
|
-
'**=',
|
|
66
|
-
'<<=',
|
|
67
|
-
'>>=',
|
|
68
|
-
'>>>',
|
|
69
|
-
'&&=',
|
|
70
|
-
'||=',
|
|
71
|
-
'??=',
|
|
72
|
-
];
|
|
73
|
-
|
|
74
|
-
/**
|
|
75
|
-
* `Set` with all javascript one-symbol operators
|
|
76
|
-
*
|
|
77
|
-
* @example '='
|
|
78
|
-
*/
|
|
79
|
-
export const singleOperators: TokenSet = new Set(singleOperatorsInit);
|
|
80
|
-
|
|
81
|
-
/**
|
|
82
|
-
* `Set` with all javascript two-symbol operators
|
|
83
|
-
*
|
|
84
|
-
* @example '++'
|
|
85
|
-
*/
|
|
86
|
-
export const doubleOperators: TokenSet = new Set(doubleOperatorsInit);
|
|
87
|
-
|
|
88
|
-
/**
|
|
89
|
-
* `Set` with all javascript three-symbol operators
|
|
90
|
-
*
|
|
91
|
-
* @example '>>>'
|
|
92
|
-
*/
|
|
93
|
-
export const tripleOperators: TokenSet = new Set(tripleOperatorsInit);
|
|
94
|
-
|
|
95
|
-
/**
|
|
96
|
-
* The javascript four-symbol operator.
|
|
97
|
-
*
|
|
98
|
-
* Always equals '>>>=' on December 18, 2025 ECMAScript Specification
|
|
99
|
-
*/
|
|
100
|
-
export const quadrupleOperator: QuadrupleOperator = '>>>=';
|
|
101
|
-
|
|
102
|
-
// identifier like token types
|
|
103
|
-
/**
|
|
104
|
-
* Object with Identifier like literals and their TokenType.
|
|
105
|
-
*
|
|
106
|
-
* Used to determine correct TokenType.
|
|
107
|
-
*
|
|
108
|
-
*
|
|
109
|
-
*
|
|
110
|
-
* @example
|
|
111
|
-
* ```typescript
|
|
112
|
-
* const unknownIdentifier = 'function';
|
|
113
|
-
*
|
|
114
|
-
* const tokenTypeOfUnknownIdentifier = identifierLikeMap[unknownIdentifier as IdentifierLike];
|
|
115
|
-
*
|
|
116
|
-
* // Output:
|
|
117
|
-
*
|
|
118
|
-
* 'Keyword';
|
|
119
|
-
*
|
|
120
|
-
* ```
|
|
121
|
-
*/
|
|
122
|
-
export const identifierLikeMap: IdentifierLikeMap = {
|
|
123
|
-
NaN: 'NaNLiteral',
|
|
124
|
-
abstract: 'Keyword',
|
|
125
|
-
as: 'Instruction',
|
|
126
|
-
assert: 'Instruction',
|
|
127
|
-
asserts: 'Instruction',
|
|
128
|
-
async: 'Instruction',
|
|
129
|
-
await: 'Instruction',
|
|
130
|
-
break: 'Instruction',
|
|
131
|
-
catch: 'Instruction',
|
|
132
|
-
class: 'Keyword',
|
|
133
|
-
const: 'Keyword',
|
|
134
|
-
continue: 'Instruction',
|
|
135
|
-
debugger: 'Keyword',
|
|
136
|
-
declare: 'Keyword',
|
|
137
|
-
default: 'Instruction',
|
|
138
|
-
delete: 'Keyword',
|
|
139
|
-
do: 'Instruction',
|
|
140
|
-
enum: 'Keyword',
|
|
141
|
-
export: 'Instruction',
|
|
142
|
-
false: 'BooleanLiteral',
|
|
143
|
-
finally: 'Instruction',
|
|
144
|
-
for: 'Instruction',
|
|
145
|
-
function: 'Keyword',
|
|
146
|
-
implements: 'Keyword',
|
|
147
|
-
import: 'Instruction',
|
|
148
|
-
in: 'Keyword',
|
|
149
|
-
instanceof: 'Keyword',
|
|
150
|
-
interface: 'Keyword',
|
|
151
|
-
is: 'Instruction',
|
|
152
|
-
keyof: 'Keyword',
|
|
153
|
-
let: 'Keyword',
|
|
154
|
-
new: 'Keyword',
|
|
155
|
-
package: 'Instruction',
|
|
156
|
-
this: 'Keyword',
|
|
157
|
-
throw: 'Instruction',
|
|
158
|
-
true: 'BooleanLiteral',
|
|
159
|
-
try: 'Instruction',
|
|
160
|
-
type: 'Keyword',
|
|
161
|
-
typeof: 'Keyword',
|
|
162
|
-
var: 'Keyword',
|
|
163
|
-
void: 'Keyword',
|
|
164
|
-
while: 'Instruction',
|
|
165
|
-
with: 'Instruction',
|
|
166
|
-
yield: 'Instruction',
|
|
167
|
-
};
|
|
168
|
-
|
|
169
|
-
// regular expresions (RegExp)
|
|
170
|
-
|
|
171
|
-
/**
|
|
172
|
-
* RegExp that is used to match javascript identifier start symbol
|
|
173
|
-
*
|
|
174
|
-
*/
|
|
175
|
-
|
|
176
|
-
export const IDENTIFIER_START_REGEXP: RegExp = /^[a-zA-Zа-яА-Я_$]$/;
|
|
177
|
-
|
|
178
|
-
/**
|
|
179
|
-
* RegExp that is used to match javascript identifier symbols after the first symbol
|
|
180
|
-
*/
|
|
181
|
-
|
|
182
|
-
export const IDENTIFIER_REGEXP: RegExp = /^[a-zA-Zа-яА-Я_$0-9]$/;
|
|
183
|
-
|
|
184
|
-
/**
|
|
185
|
-
*
|
|
186
|
-
* RegExp that is used to match any number
|
|
187
|
-
*/
|
|
188
|
-
export const NUMBER_REGEXP: RegExp = /^[0-9]$/;
|
package/src/tokenizer/index.ts
DELETED
|
@@ -1,304 +0,0 @@
|
|
|
1
|
-
import {
|
|
2
|
-
IDENTIFIER_START_REGEXP,
|
|
3
|
-
IDENTIFIER_REGEXP,
|
|
4
|
-
NUMBER_REGEXP,
|
|
5
|
-
singleOperators,
|
|
6
|
-
doubleOperators,
|
|
7
|
-
tripleOperators,
|
|
8
|
-
quadrupleOperator,
|
|
9
|
-
identifierLikeMap,
|
|
10
|
-
} from './constants';
|
|
11
|
-
|
|
12
|
-
import type { Token, IdentifierLike } from './types';
|
|
13
|
-
|
|
14
|
-
/**
|
|
15
|
-
*
|
|
16
|
-
* *Tokenizer* or *Lexer* function.
|
|
17
|
-
*
|
|
18
|
-
*
|
|
19
|
-
* Divides `source` to tokens.
|
|
20
|
-
*
|
|
21
|
-
* @param {string} source - javascript or typescript source code to tokenize.
|
|
22
|
-
*
|
|
23
|
-
* @returns {Token[]} array with tokens from `source`.
|
|
24
|
-
*/
|
|
25
|
-
export const tokenize = (source: string): Token[] => {
|
|
26
|
-
const tokens: Token[] = [];
|
|
27
|
-
|
|
28
|
-
const sourceLength = source.length;
|
|
29
|
-
|
|
30
|
-
let pos = 0;
|
|
31
|
-
main: while (pos < sourceLength) {
|
|
32
|
-
if (source[pos] === ' ' || source[pos] === '\t') {
|
|
33
|
-
const startPos = pos;
|
|
34
|
-
|
|
35
|
-
pos++;
|
|
36
|
-
|
|
37
|
-
while (
|
|
38
|
-
pos < sourceLength &&
|
|
39
|
-
(source[pos] === ' ' || source[pos] === '\t')
|
|
40
|
-
) {
|
|
41
|
-
pos++;
|
|
42
|
-
}
|
|
43
|
-
|
|
44
|
-
tokens[tokens.length] = {
|
|
45
|
-
type: 'WhiteSpace',
|
|
46
|
-
|
|
47
|
-
value: source.slice(startPos, pos),
|
|
48
|
-
|
|
49
|
-
start: startPos,
|
|
50
|
-
|
|
51
|
-
end: pos,
|
|
52
|
-
};
|
|
53
|
-
|
|
54
|
-
continue main;
|
|
55
|
-
}
|
|
56
|
-
|
|
57
|
-
if (source[pos] === '\n' || source[pos] === '\r') {
|
|
58
|
-
const startPos = pos;
|
|
59
|
-
|
|
60
|
-
if (source[pos] === '\r') {
|
|
61
|
-
pos++;
|
|
62
|
-
}
|
|
63
|
-
|
|
64
|
-
pos++;
|
|
65
|
-
|
|
66
|
-
tokens[tokens.length] = {
|
|
67
|
-
type: 'LineDivision',
|
|
68
|
-
|
|
69
|
-
value: '\n',
|
|
70
|
-
|
|
71
|
-
start: startPos,
|
|
72
|
-
end: pos,
|
|
73
|
-
};
|
|
74
|
-
|
|
75
|
-
continue main;
|
|
76
|
-
}
|
|
77
|
-
|
|
78
|
-
// literals
|
|
79
|
-
if (IDENTIFIER_START_REGEXP.test(source[pos])) {
|
|
80
|
-
const startPos = pos;
|
|
81
|
-
pos++;
|
|
82
|
-
|
|
83
|
-
while (pos < sourceLength && IDENTIFIER_REGEXP.test(source[pos])) {
|
|
84
|
-
pos++;
|
|
85
|
-
}
|
|
86
|
-
|
|
87
|
-
const identifier = source.slice(startPos, pos);
|
|
88
|
-
|
|
89
|
-
tokens[tokens.length] = {
|
|
90
|
-
type:
|
|
91
|
-
identifierLikeMap[identifier as IdentifierLike] ??
|
|
92
|
-
'Identifier',
|
|
93
|
-
value: identifier,
|
|
94
|
-
start: startPos,
|
|
95
|
-
end: pos,
|
|
96
|
-
};
|
|
97
|
-
|
|
98
|
-
continue main;
|
|
99
|
-
}
|
|
100
|
-
|
|
101
|
-
if (source[pos] === "'" || source[pos] === '"' || source[pos] === '`') {
|
|
102
|
-
const startPos = pos;
|
|
103
|
-
const startQuote = source[pos];
|
|
104
|
-
|
|
105
|
-
pos++;
|
|
106
|
-
|
|
107
|
-
while (pos < sourceLength && source[pos] !== startQuote) {
|
|
108
|
-
pos++;
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
pos++;
|
|
112
|
-
|
|
113
|
-
tokens[tokens.length] = {
|
|
114
|
-
type: 'StringLiteral',
|
|
115
|
-
value: source.slice(startPos, pos),
|
|
116
|
-
start: startPos,
|
|
117
|
-
end: pos,
|
|
118
|
-
};
|
|
119
|
-
|
|
120
|
-
continue main;
|
|
121
|
-
}
|
|
122
|
-
if (NUMBER_REGEXP.test(source[pos])) {
|
|
123
|
-
const startPos = pos;
|
|
124
|
-
|
|
125
|
-
while (pos < sourceLength && NUMBER_REGEXP.test(source[pos])) {
|
|
126
|
-
pos++;
|
|
127
|
-
}
|
|
128
|
-
|
|
129
|
-
tokens[tokens.length] = {
|
|
130
|
-
type: 'NumberLiteral',
|
|
131
|
-
value: source.slice(startPos, pos),
|
|
132
|
-
start: startPos,
|
|
133
|
-
end: pos,
|
|
134
|
-
};
|
|
135
|
-
|
|
136
|
-
continue main;
|
|
137
|
-
}
|
|
138
|
-
|
|
139
|
-
// comments
|
|
140
|
-
if (source[pos] === '/') {
|
|
141
|
-
const startPos = pos;
|
|
142
|
-
|
|
143
|
-
pos++;
|
|
144
|
-
|
|
145
|
-
if (source[pos] === '/') {
|
|
146
|
-
pos++;
|
|
147
|
-
|
|
148
|
-
while (
|
|
149
|
-
pos < sourceLength &&
|
|
150
|
-
source[pos] !== '\r' &&
|
|
151
|
-
source[pos] !== '\n'
|
|
152
|
-
) {
|
|
153
|
-
pos++;
|
|
154
|
-
}
|
|
155
|
-
|
|
156
|
-
if (source[pos] === '\r') {
|
|
157
|
-
pos += 2;
|
|
158
|
-
}
|
|
159
|
-
|
|
160
|
-
tokens[tokens.length] = {
|
|
161
|
-
type: 'Comment',
|
|
162
|
-
|
|
163
|
-
value: source.slice(startPos, pos),
|
|
164
|
-
start: startPos,
|
|
165
|
-
end: pos,
|
|
166
|
-
};
|
|
167
|
-
|
|
168
|
-
tokens[tokens.length] = {
|
|
169
|
-
type: 'LineDivision',
|
|
170
|
-
|
|
171
|
-
value: '\n',
|
|
172
|
-
|
|
173
|
-
start: startPos,
|
|
174
|
-
end: pos,
|
|
175
|
-
};
|
|
176
|
-
|
|
177
|
-
continue main;
|
|
178
|
-
}
|
|
179
|
-
|
|
180
|
-
if (source[pos] === '*') {
|
|
181
|
-
pos++;
|
|
182
|
-
|
|
183
|
-
let lastCommentStart = startPos;
|
|
184
|
-
|
|
185
|
-
while (
|
|
186
|
-
pos < sourceLength &&
|
|
187
|
-
!(source[pos] === '*' && source[pos + 1] === '/')
|
|
188
|
-
) {
|
|
189
|
-
if (source[pos] === '\n' || source[pos] === '\r') {
|
|
190
|
-
tokens[tokens.length] = {
|
|
191
|
-
type: 'Comment',
|
|
192
|
-
|
|
193
|
-
value: source.slice(lastCommentStart, pos),
|
|
194
|
-
|
|
195
|
-
start: startPos,
|
|
196
|
-
end: pos,
|
|
197
|
-
};
|
|
198
|
-
tokens[tokens.length] = {
|
|
199
|
-
type: 'LineDivision',
|
|
200
|
-
|
|
201
|
-
value: '\n',
|
|
202
|
-
|
|
203
|
-
start: pos,
|
|
204
|
-
end: pos + 1,
|
|
205
|
-
};
|
|
206
|
-
|
|
207
|
-
if (source[pos] === '\r') {
|
|
208
|
-
pos++;
|
|
209
|
-
}
|
|
210
|
-
|
|
211
|
-
pos++;
|
|
212
|
-
lastCommentStart = pos;
|
|
213
|
-
}
|
|
214
|
-
|
|
215
|
-
pos++;
|
|
216
|
-
}
|
|
217
|
-
|
|
218
|
-
pos += 2;
|
|
219
|
-
|
|
220
|
-
tokens[tokens.length] = {
|
|
221
|
-
type: 'Comment',
|
|
222
|
-
value: source.slice(lastCommentStart, pos),
|
|
223
|
-
start: startPos,
|
|
224
|
-
end: pos,
|
|
225
|
-
};
|
|
226
|
-
|
|
227
|
-
continue main;
|
|
228
|
-
}
|
|
229
|
-
}
|
|
230
|
-
|
|
231
|
-
// operators
|
|
232
|
-
if (
|
|
233
|
-
source[pos] +
|
|
234
|
-
source[pos + 1] +
|
|
235
|
-
source[pos + 2] +
|
|
236
|
-
source[pos + 3] ===
|
|
237
|
-
quadrupleOperator
|
|
238
|
-
) {
|
|
239
|
-
const startPos = pos;
|
|
240
|
-
|
|
241
|
-
pos += 4;
|
|
242
|
-
|
|
243
|
-
tokens[tokens.length] = {
|
|
244
|
-
type: 'Operator',
|
|
245
|
-
value: source.slice(startPos, pos),
|
|
246
|
-
start: startPos,
|
|
247
|
-
end: pos,
|
|
248
|
-
};
|
|
249
|
-
|
|
250
|
-
continue main;
|
|
251
|
-
}
|
|
252
|
-
|
|
253
|
-
if (
|
|
254
|
-
tripleOperators.has(source[pos] + source[pos + 1] + source[pos + 2])
|
|
255
|
-
) {
|
|
256
|
-
const startPos = pos;
|
|
257
|
-
|
|
258
|
-
pos += 3;
|
|
259
|
-
|
|
260
|
-
tokens[tokens.length] = {
|
|
261
|
-
type: 'Operator',
|
|
262
|
-
value: source.slice(startPos, pos),
|
|
263
|
-
start: startPos,
|
|
264
|
-
end: pos,
|
|
265
|
-
};
|
|
266
|
-
|
|
267
|
-
continue main;
|
|
268
|
-
}
|
|
269
|
-
|
|
270
|
-
if (doubleOperators.has(source[pos] + source[pos + 1])) {
|
|
271
|
-
const startPos = pos;
|
|
272
|
-
|
|
273
|
-
pos += 2;
|
|
274
|
-
|
|
275
|
-
tokens[tokens.length] = {
|
|
276
|
-
type: 'Operator',
|
|
277
|
-
value: source.slice(startPos, pos),
|
|
278
|
-
start: startPos,
|
|
279
|
-
end: pos,
|
|
280
|
-
};
|
|
281
|
-
|
|
282
|
-
continue main;
|
|
283
|
-
}
|
|
284
|
-
|
|
285
|
-
if (singleOperators.has(source[pos])) {
|
|
286
|
-
const startPos = pos;
|
|
287
|
-
|
|
288
|
-
pos++;
|
|
289
|
-
|
|
290
|
-
tokens[tokens.length] = {
|
|
291
|
-
type: 'Operator',
|
|
292
|
-
value: source.slice(startPos, pos),
|
|
293
|
-
start: startPos,
|
|
294
|
-
end: pos,
|
|
295
|
-
};
|
|
296
|
-
|
|
297
|
-
continue main;
|
|
298
|
-
}
|
|
299
|
-
|
|
300
|
-
pos++;
|
|
301
|
-
}
|
|
302
|
-
|
|
303
|
-
return tokens;
|
|
304
|
-
};
|
package/src/tokenizer/types.ts
DELETED
|
@@ -1,197 +0,0 @@
|
|
|
1
|
-
type LiteralTokenType =
|
|
2
|
-
| 'NumberLiteral'
|
|
3
|
-
| 'StringLiteral'
|
|
4
|
-
| 'BooleanLiteral'
|
|
5
|
-
| 'NaNLiteral';
|
|
6
|
-
|
|
7
|
-
/**
|
|
8
|
-
* Variety of `Token` types
|
|
9
|
-
*/
|
|
10
|
-
export type TokenType =
|
|
11
|
-
| 'Identifier'
|
|
12
|
-
| 'Keyword'
|
|
13
|
-
| 'Operator'
|
|
14
|
-
| 'WhiteSpace'
|
|
15
|
-
| 'Comment'
|
|
16
|
-
| 'LineDivision'
|
|
17
|
-
| 'Instruction'
|
|
18
|
-
| LiteralTokenType;
|
|
19
|
-
|
|
20
|
-
/**
|
|
21
|
-
*
|
|
22
|
-
*/
|
|
23
|
-
export type Token = {
|
|
24
|
-
type: TokenType;
|
|
25
|
-
value: string;
|
|
26
|
-
|
|
27
|
-
/**
|
|
28
|
-
* Start position of token value in source code
|
|
29
|
-
*/
|
|
30
|
-
start: number;
|
|
31
|
-
|
|
32
|
-
/**
|
|
33
|
-
* The end of token value in source code
|
|
34
|
-
*/
|
|
35
|
-
end: number;
|
|
36
|
-
};
|
|
37
|
-
|
|
38
|
-
export type SingleOperators = [
|
|
39
|
-
'=',
|
|
40
|
-
'+',
|
|
41
|
-
'-',
|
|
42
|
-
'*',
|
|
43
|
-
'/',
|
|
44
|
-
'%',
|
|
45
|
-
'~',
|
|
46
|
-
'^',
|
|
47
|
-
'.',
|
|
48
|
-
':',
|
|
49
|
-
'|',
|
|
50
|
-
'&',
|
|
51
|
-
'?',
|
|
52
|
-
'!',
|
|
53
|
-
'<',
|
|
54
|
-
'>',
|
|
55
|
-
|
|
56
|
-
'{',
|
|
57
|
-
'}',
|
|
58
|
-
'[',
|
|
59
|
-
']',
|
|
60
|
-
'(',
|
|
61
|
-
')',
|
|
62
|
-
|
|
63
|
-
';',
|
|
64
|
-
','
|
|
65
|
-
];
|
|
66
|
-
export type SingleOperator = SingleOperators[number];
|
|
67
|
-
|
|
68
|
-
export type DoubleOperators = [
|
|
69
|
-
'==',
|
|
70
|
-
|
|
71
|
-
'!=',
|
|
72
|
-
'<=',
|
|
73
|
-
'>=',
|
|
74
|
-
'++',
|
|
75
|
-
'--',
|
|
76
|
-
'**',
|
|
77
|
-
'*=',
|
|
78
|
-
'/=',
|
|
79
|
-
'%=',
|
|
80
|
-
'^=',
|
|
81
|
-
'&=',
|
|
82
|
-
'|=',
|
|
83
|
-
|
|
84
|
-
'&&',
|
|
85
|
-
'||',
|
|
86
|
-
'??'
|
|
87
|
-
];
|
|
88
|
-
export type DoubleOperator = DoubleOperators[number];
|
|
89
|
-
|
|
90
|
-
export type TripleOperators = [
|
|
91
|
-
'===',
|
|
92
|
-
'!==',
|
|
93
|
-
'**=',
|
|
94
|
-
|
|
95
|
-
'<<=',
|
|
96
|
-
'>>=',
|
|
97
|
-
'>>>',
|
|
98
|
-
|
|
99
|
-
'&&=',
|
|
100
|
-
'||=',
|
|
101
|
-
'??='
|
|
102
|
-
];
|
|
103
|
-
export type TripleOperator = TripleOperators[number];
|
|
104
|
-
|
|
105
|
-
export type QuadrupleOperator = '>>>=';
|
|
106
|
-
|
|
107
|
-
export type JSKeywords = [
|
|
108
|
-
'var',
|
|
109
|
-
'let',
|
|
110
|
-
|
|
111
|
-
'const',
|
|
112
|
-
|
|
113
|
-
'typeof',
|
|
114
|
-
|
|
115
|
-
'class',
|
|
116
|
-
'in',
|
|
117
|
-
|
|
118
|
-
'new',
|
|
119
|
-
'instanceof',
|
|
120
|
-
|
|
121
|
-
'function',
|
|
122
|
-
'void',
|
|
123
|
-
'delete',
|
|
124
|
-
|
|
125
|
-
'this',
|
|
126
|
-
|
|
127
|
-
'debugger'
|
|
128
|
-
];
|
|
129
|
-
export type JSKeyword = JSKeywords[number];
|
|
130
|
-
export type TSKeywords = [
|
|
131
|
-
'keyof',
|
|
132
|
-
'abstract',
|
|
133
|
-
'interface',
|
|
134
|
-
'enum',
|
|
135
|
-
'type',
|
|
136
|
-
'implements',
|
|
137
|
-
'declare'
|
|
138
|
-
];
|
|
139
|
-
export type TSKeyword = TSKeywords[number];
|
|
140
|
-
|
|
141
|
-
export type Keywords = [...JSKeywords, ...TSKeywords];
|
|
142
|
-
export type Keyword = Keywords[number];
|
|
143
|
-
|
|
144
|
-
type JSInstruction =
|
|
145
|
-
| 'for'
|
|
146
|
-
| 'do'
|
|
147
|
-
| 'while'
|
|
148
|
-
| 'continue'
|
|
149
|
-
| 'break'
|
|
150
|
-
| 'import'
|
|
151
|
-
| 'export'
|
|
152
|
-
| 'package'
|
|
153
|
-
| 'try'
|
|
154
|
-
| 'catch'
|
|
155
|
-
| 'finally'
|
|
156
|
-
| 'async'
|
|
157
|
-
| 'await'
|
|
158
|
-
| 'yield'
|
|
159
|
-
| 'with'
|
|
160
|
-
| 'assert'
|
|
161
|
-
| 'default'
|
|
162
|
-
| 'throw';
|
|
163
|
-
|
|
164
|
-
type TSInstruction = 'as' | 'asserts' | 'is';
|
|
165
|
-
export type Instruction = JSInstruction | TSInstruction;
|
|
166
|
-
|
|
167
|
-
type Literal = 'true' | 'false' | 'NaN';
|
|
168
|
-
|
|
169
|
-
/**
|
|
170
|
-
* Token Types that are like `Identifier` TokenType
|
|
171
|
-
*/
|
|
172
|
-
export type IdentifierLike = Keyword | Instruction | Literal;
|
|
173
|
-
|
|
174
|
-
/**
|
|
175
|
-
* Record with Token Types that are like `Identifier` TokenType.
|
|
176
|
-
* Used to determine correct TokenType
|
|
177
|
-
*/
|
|
178
|
-
export type IdentifierLikeMap = {
|
|
179
|
-
[K in IdentifierLike]: Extract<
|
|
180
|
-
TokenType,
|
|
181
|
-
'Keyword' | 'Instruction' | 'BooleanLiteral' | 'NaNLiteral'
|
|
182
|
-
>;
|
|
183
|
-
};
|
|
184
|
-
|
|
185
|
-
/**
|
|
186
|
-
*
|
|
187
|
-
* Type that contains `TokenType` values to be checked in tokenizer.
|
|
188
|
-
*
|
|
189
|
-
*
|
|
190
|
-
* @example
|
|
191
|
-
* ```typescript
|
|
192
|
-
* const singleOperatorsInit: SingleOperators = ['=', '+', '-' ...];
|
|
193
|
-
*
|
|
194
|
-
* const singleOperators: TokenSet = new Set(singleOperatorsInit);
|
|
195
|
-
* ```
|
|
196
|
-
*/
|
|
197
|
-
export type TokenSet = Set<string>;
|
package/tsconfig.json
DELETED
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
{
|
|
2
|
-
"compilerOptions": {
|
|
3
|
-
"lib": ["ESNext"],
|
|
4
|
-
"target": "ESNext",
|
|
5
|
-
"module": "esnext",
|
|
6
|
-
"moduleDetection": "force",
|
|
7
|
-
|
|
8
|
-
"moduleResolution": "bundler",
|
|
9
|
-
|
|
10
|
-
"verbatimModuleSyntax": true,
|
|
11
|
-
|
|
12
|
-
"noEmit": true,
|
|
13
|
-
|
|
14
|
-
"strict": true,
|
|
15
|
-
"skipLibCheck": true,
|
|
16
|
-
"noFallthroughCasesInSwitch": true,
|
|
17
|
-
"noUncheckedIndexedAccess": false,
|
|
18
|
-
"noPropertyAccessFromIndexSignature": true,
|
|
19
|
-
"noImplicitOverride": true,
|
|
20
|
-
|
|
21
|
-
"noUnusedLocals": true,
|
|
22
|
-
"noUnusedParameters": true,
|
|
23
|
-
|
|
24
|
-
"outDir": "dist"
|
|
25
|
-
},
|
|
26
|
-
|
|
27
|
-
"exclude": ["node_modules", "dist"]
|
|
28
|
-
}
|