meriyah 4.5.0 → 5.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +340 -449
- package/README.md +22 -28
- package/dist/meriyah.amd.js +8916 -8806
- package/dist/meriyah.amd.min.js +1 -1
- package/dist/meriyah.cjs +8916 -8806
- package/dist/meriyah.cjs.js +8916 -8806
- package/dist/meriyah.cjs.min.js +1 -1
- package/dist/meriyah.esm.js +8916 -8806
- package/dist/meriyah.esm.min.js +1 -1
- package/dist/meriyah.esm.min.mjs +1 -1
- package/dist/meriyah.esm.mjs +8916 -8806
- package/dist/meriyah.iife.js +8916 -8806
- package/dist/meriyah.iife.min.js +1 -1
- package/dist/meriyah.min.cjs +1 -1
- package/dist/meriyah.system.js +8916 -8806
- package/dist/meriyah.system.min.js +1 -1
- package/dist/meriyah.umd.cjs +8916 -8806
- package/dist/meriyah.umd.es5.js +8925 -8830
- package/dist/meriyah.umd.es5.min.js +1 -1
- package/dist/meriyah.umd.js +8916 -8806
- package/dist/meriyah.umd.min.cjs +1 -1
- package/dist/meriyah.umd.min.js +1 -1
- package/dist/src/chars.d.ts +135 -135
- package/dist/src/common.d.ts +201 -200
- package/dist/src/common.d.ts.map +1 -1
- package/dist/src/errors.d.ts +188 -187
- package/dist/src/errors.d.ts.map +1 -1
- package/dist/src/estree.d.ts +515 -507
- package/dist/src/estree.d.ts.map +1 -1
- package/dist/src/lexer/charClassifier.d.ts +24 -24
- package/dist/src/lexer/charClassifier.d.ts.map +1 -1
- package/dist/src/lexer/comments.d.ts +14 -14
- package/dist/src/lexer/common.d.ts +26 -26
- package/dist/src/lexer/decodeHTML.d.ts +1 -1
- package/dist/src/lexer/decodeHTML.d.ts.map +1 -1
- package/dist/src/lexer/identifier.d.ts +8 -8
- package/dist/src/lexer/index.d.ts +9 -9
- package/dist/src/lexer/jsx.d.ts +6 -6
- package/dist/src/lexer/jsx.d.ts.map +1 -1
- package/dist/src/lexer/numeric.d.ts +5 -5
- package/dist/src/lexer/regexp.d.ts +3 -3
- package/dist/src/lexer/scan.d.ts +6 -6
- package/dist/src/lexer/scan.d.ts.map +1 -1
- package/dist/src/lexer/string.d.ts +12 -12
- package/dist/src/lexer/template.d.ts +4 -4
- package/dist/src/meriyah.d.ts +7 -7
- package/dist/src/meriyah.d.ts.map +1 -1
- package/dist/src/parser.d.ts +119 -118
- package/dist/src/parser.d.ts.map +1 -1
- package/dist/src/token.d.ts +167 -167
- package/dist/src/unicode.d.ts +5 -5
- package/package.json +32 -27
- package/src/common.ts +42 -49
- package/src/errors.ts +3 -1
- package/src/estree.ts +11 -1
- package/src/lexer/comments.ts +1 -1
- package/src/lexer/decodeHTML.ts +3 -5
- package/src/lexer/identifier.ts +4 -4
- package/src/lexer/jsx.ts +13 -12
- package/src/lexer/numeric.ts +2 -2
- package/src/lexer/regexp.ts +3 -3
- package/src/lexer/scan.ts +8 -6
- package/src/lexer/string.ts +2 -2
- package/src/meriyah.ts +1 -2
- package/src/parser.ts +681 -533
package/src/common.ts
CHANGED
|
@@ -28,15 +28,15 @@ export const enum Context {
|
|
|
28
28
|
InIteration = 1 << 17,
|
|
29
29
|
SuperProperty = 1 << 18,
|
|
30
30
|
SuperCall = 1 << 19,
|
|
31
|
-
InYieldContext = 1 <<
|
|
32
|
-
InAwaitContext = 1 <<
|
|
33
|
-
InArgumentList = 1 <<
|
|
34
|
-
InConstructor = 1 <<
|
|
35
|
-
InMethod = 1 <<
|
|
36
|
-
AllowNewTarget = 1 <<
|
|
37
|
-
DisallowIn = 1 <<
|
|
38
|
-
AllowEscapedKeyword = 1 <<
|
|
39
|
-
OptionsUniqueKeyInPattern = 1 <<
|
|
31
|
+
InYieldContext = 1 << 20,
|
|
32
|
+
InAwaitContext = 1 << 21,
|
|
33
|
+
InArgumentList = 1 << 22,
|
|
34
|
+
InConstructor = 1 << 23,
|
|
35
|
+
InMethod = 1 << 24,
|
|
36
|
+
AllowNewTarget = 1 << 25,
|
|
37
|
+
DisallowIn = 1 << 26,
|
|
38
|
+
AllowEscapedKeyword = 1 << 27,
|
|
39
|
+
OptionsUniqueKeyInPattern = 1 << 28
|
|
40
40
|
}
|
|
41
41
|
|
|
42
42
|
/**
|
|
@@ -91,7 +91,7 @@ export const enum Origin {
|
|
|
91
91
|
Declaration = 1 << 3,
|
|
92
92
|
Arrow = 1 << 4,
|
|
93
93
|
ForStatement = 1 << 5,
|
|
94
|
-
Export = 1 << 6
|
|
94
|
+
Export = 1 << 6
|
|
95
95
|
}
|
|
96
96
|
|
|
97
97
|
/**
|
|
@@ -131,7 +131,7 @@ export const enum Flags {
|
|
|
131
131
|
SimpleParameterList = 1 << 7,
|
|
132
132
|
HasStrictReserved = 1 << 8,
|
|
133
133
|
StrictEvalArguments = 1 << 9,
|
|
134
|
-
DisallowCall
|
|
134
|
+
DisallowCall = 1 << 10,
|
|
135
135
|
HasOptionalChaining = 1 << 11
|
|
136
136
|
}
|
|
137
137
|
|
|
@@ -163,13 +163,16 @@ export const enum ScopeKind {
|
|
|
163
163
|
FunctionRoot = 1 << 8,
|
|
164
164
|
FunctionParams = 1 << 9,
|
|
165
165
|
ArrowParams = 1 << 10,
|
|
166
|
-
CatchIdentifier = 1 << 11
|
|
166
|
+
CatchIdentifier = 1 << 11
|
|
167
167
|
}
|
|
168
168
|
|
|
169
169
|
/**
|
|
170
170
|
* The type of the `onComment` option.
|
|
171
171
|
*/
|
|
172
|
-
export type OnComment =
|
|
172
|
+
export type OnComment =
|
|
173
|
+
| void
|
|
174
|
+
| Comment[]
|
|
175
|
+
| ((type: string, value: string, start: number, end: number, loc: SourceLocation) => any);
|
|
173
176
|
|
|
174
177
|
/**
|
|
175
178
|
* The type of the `onInsertedSemicolon` option.
|
|
@@ -215,7 +218,8 @@ export interface ParserState {
|
|
|
215
218
|
colPos: number;
|
|
216
219
|
linePos: number;
|
|
217
220
|
end: number;
|
|
218
|
-
|
|
221
|
+
getToken(): Token;
|
|
222
|
+
setToken(token: Token): Token;
|
|
219
223
|
onComment: any;
|
|
220
224
|
onInsertedSemicolon: any;
|
|
221
225
|
onToken: any;
|
|
@@ -243,12 +247,8 @@ export interface ParserState {
|
|
|
243
247
|
*/
|
|
244
248
|
|
|
245
249
|
export function matchOrInsertSemicolon(parser: ParserState, context: Context): void {
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
(parser.flags & Flags.NewLine) === 0 &&
|
|
249
|
-
(parser.token & Token.IsAutoSemicolon) !== Token.IsAutoSemicolon
|
|
250
|
-
) {
|
|
251
|
-
report(parser, Errors.UnexpectedToken, KeywordDescTable[parser.token & Token.Type]);
|
|
250
|
+
if ((parser.flags & Flags.NewLine) === 0 && (parser.getToken() & Token.IsAutoSemicolon) !== Token.IsAutoSemicolon) {
|
|
251
|
+
report(parser, Errors.UnexpectedToken, KeywordDescTable[parser.getToken() & Token.Type]);
|
|
252
252
|
}
|
|
253
253
|
|
|
254
254
|
if (!consumeOpt(parser, context, Token.Semicolon)) {
|
|
@@ -259,7 +259,7 @@ export function matchOrInsertSemicolon(parser: ParserState, context: Context): v
|
|
|
259
259
|
|
|
260
260
|
export function isValidStrictMode(parser: ParserState, index: number, tokenPos: number, tokenValue: string): 0 | 1 {
|
|
261
261
|
if (index - tokenPos < 13 && tokenValue === 'use strict') {
|
|
262
|
-
if ((parser.
|
|
262
|
+
if ((parser.getToken() & Token.IsAutoSemicolon) === Token.IsAutoSemicolon || parser.flags & Flags.NewLine) {
|
|
263
263
|
return 1;
|
|
264
264
|
}
|
|
265
265
|
}
|
|
@@ -275,7 +275,7 @@ export function isValidStrictMode(parser: ParserState, index: number, tokenPos:
|
|
|
275
275
|
* @param token The type of token to consume
|
|
276
276
|
*/
|
|
277
277
|
export function optionalBit(parser: ParserState, context: Context, t: Token): 0 | 1 {
|
|
278
|
-
if (parser.
|
|
278
|
+
if (parser.getToken() !== t) return 0;
|
|
279
279
|
nextToken(parser, context);
|
|
280
280
|
return 1;
|
|
281
281
|
}
|
|
@@ -289,7 +289,7 @@ export function optionalBit(parser: ParserState, context: Context, t: Token): 0
|
|
|
289
289
|
* @param token The type of token to consume
|
|
290
290
|
*/
|
|
291
291
|
export function consumeOpt(parser: ParserState, context: Context, t: Token): boolean {
|
|
292
|
-
if (parser.
|
|
292
|
+
if (parser.getToken() !== t) return false;
|
|
293
293
|
nextToken(parser, context);
|
|
294
294
|
return true;
|
|
295
295
|
}
|
|
@@ -303,7 +303,7 @@ export function consumeOpt(parser: ParserState, context: Context, t: Token): boo
|
|
|
303
303
|
* @param t The type of token to consume
|
|
304
304
|
*/
|
|
305
305
|
export function consume(parser: ParserState, context: Context, t: Token): void {
|
|
306
|
-
if (parser.
|
|
306
|
+
if (parser.getToken() !== t) report(parser, Errors.ExpectedToken, KeywordDescTable[t & Token.Type]);
|
|
307
307
|
nextToken(parser, context);
|
|
308
308
|
}
|
|
309
309
|
|
|
@@ -316,21 +316,23 @@ export function consume(parser: ParserState, context: Context, t: Token): void {
|
|
|
316
316
|
*/
|
|
317
317
|
export function reinterpretToPattern(state: ParserState, node: any): void {
|
|
318
318
|
switch (node.type) {
|
|
319
|
-
case 'ArrayExpression':
|
|
319
|
+
case 'ArrayExpression': {
|
|
320
320
|
node.type = 'ArrayPattern';
|
|
321
|
-
const elements = node
|
|
321
|
+
const { elements } = node;
|
|
322
322
|
for (let i = 0, n = elements.length; i < n; ++i) {
|
|
323
323
|
const element = elements[i];
|
|
324
324
|
if (element) reinterpretToPattern(state, element);
|
|
325
325
|
}
|
|
326
326
|
return;
|
|
327
|
-
|
|
327
|
+
}
|
|
328
|
+
case 'ObjectExpression': {
|
|
328
329
|
node.type = 'ObjectPattern';
|
|
329
|
-
const properties = node
|
|
330
|
+
const { properties } = node;
|
|
330
331
|
for (let i = 0, n = properties.length; i < n; ++i) {
|
|
331
332
|
reinterpretToPattern(state, properties[i]);
|
|
332
333
|
}
|
|
333
334
|
return;
|
|
335
|
+
}
|
|
334
336
|
case 'AssignmentExpression':
|
|
335
337
|
node.type = 'AssignmentPattern';
|
|
336
338
|
if (node.operator !== '=') report(state, Errors.InvalidDestructuringTarget);
|
|
@@ -343,7 +345,7 @@ export function reinterpretToPattern(state: ParserState, node: any): void {
|
|
|
343
345
|
case 'SpreadElement':
|
|
344
346
|
node.type = 'RestElement';
|
|
345
347
|
reinterpretToPattern(state, node.argument);
|
|
346
|
-
|
|
348
|
+
// No default
|
|
347
349
|
}
|
|
348
350
|
}
|
|
349
351
|
|
|
@@ -363,9 +365,7 @@ export function validateBindingIdentifier(
|
|
|
363
365
|
t: Token,
|
|
364
366
|
skipEvalArgCheck: 0 | 1
|
|
365
367
|
): void {
|
|
366
|
-
|
|
367
368
|
if (context & Context.Strict) {
|
|
368
|
-
|
|
369
369
|
if ((t & Token.FutureReserved) === Token.FutureReserved) {
|
|
370
370
|
report(parser, Errors.UnexpectedStrictReserved);
|
|
371
371
|
}
|
|
@@ -395,14 +395,8 @@ export function validateBindingIdentifier(
|
|
|
395
395
|
}
|
|
396
396
|
}
|
|
397
397
|
|
|
398
|
-
export function validateFunctionName(
|
|
399
|
-
parser: ParserState,
|
|
400
|
-
context: Context,
|
|
401
|
-
t: Token
|
|
402
|
-
): void {
|
|
403
|
-
|
|
398
|
+
export function validateFunctionName(parser: ParserState, context: Context, t: Token): void {
|
|
404
399
|
if (context & Context.Strict) {
|
|
405
|
-
|
|
406
400
|
if ((t & Token.FutureReserved) === Token.FutureReserved) {
|
|
407
401
|
report(parser, Errors.UnexpectedStrictReserved);
|
|
408
402
|
}
|
|
@@ -413,11 +407,11 @@ export function validateFunctionName(
|
|
|
413
407
|
|
|
414
408
|
if (t === Token.EscapedFutureReserved) {
|
|
415
409
|
report(parser, Errors.InvalidEscapedKeyword);
|
|
416
|
-
|
|
410
|
+
}
|
|
417
411
|
|
|
418
412
|
if (t === Token.EscapedReserved) {
|
|
419
413
|
report(parser, Errors.InvalidEscapedKeyword);
|
|
420
|
-
|
|
414
|
+
}
|
|
421
415
|
}
|
|
422
416
|
|
|
423
417
|
if ((t & Token.Reserved) === Token.Reserved) {
|
|
@@ -569,7 +563,7 @@ export function createArrowHeadParsingScope(parser: ParserState, context: Contex
|
|
|
569
563
|
|
|
570
564
|
/**
|
|
571
565
|
* Record duplicate binding errors that may occur in a arrow head or function parameters
|
|
572
|
-
*
|
|
566
|
+
*
|
|
573
567
|
* @param parser Parser state
|
|
574
568
|
* @param type Errors type
|
|
575
569
|
*/
|
|
@@ -664,6 +658,7 @@ export function addBlockName(
|
|
|
664
658
|
value & BindingKind.FunctionLexical &&
|
|
665
659
|
origin & Origin.BlockStatement
|
|
666
660
|
) {
|
|
661
|
+
// No op
|
|
667
662
|
} else {
|
|
668
663
|
report(parser, Errors.DuplicateBinding, name);
|
|
669
664
|
}
|
|
@@ -671,7 +666,8 @@ export function addBlockName(
|
|
|
671
666
|
|
|
672
667
|
if (
|
|
673
668
|
scope.type & ScopeKind.FunctionBody &&
|
|
674
|
-
(
|
|
669
|
+
(scope as any).parent['#' + name] &&
|
|
670
|
+
((scope as any).parent['#' + name] & BindingKind.Empty) === 0
|
|
675
671
|
) {
|
|
676
672
|
report(parser, Errors.DuplicateBinding, name);
|
|
677
673
|
}
|
|
@@ -718,6 +714,7 @@ export function addVarName(
|
|
|
718
714
|
((kind & BindingKind.FunctionStatement && value & BindingKind.LexicalOrFunction) ||
|
|
719
715
|
(value & BindingKind.FunctionStatement && kind & BindingKind.LexicalOrFunction))
|
|
720
716
|
) {
|
|
717
|
+
// No op
|
|
721
718
|
} else {
|
|
722
719
|
report(parser, Errors.DuplicateBinding, name);
|
|
723
720
|
}
|
|
@@ -776,7 +773,7 @@ export function addBindingToExports(parser: ParserState, name: string): void {
|
|
|
776
773
|
}
|
|
777
774
|
|
|
778
775
|
export function pushComment(context: Context, array: any[]): any {
|
|
779
|
-
return function(type: string, value: string, start: number, end: number, loc: SourceLocation) {
|
|
776
|
+
return function (type: string, value: string, start: number, end: number, loc: SourceLocation) {
|
|
780
777
|
const comment: any = {
|
|
781
778
|
type,
|
|
782
779
|
value
|
|
@@ -795,7 +792,7 @@ export function pushComment(context: Context, array: any[]): any {
|
|
|
795
792
|
}
|
|
796
793
|
|
|
797
794
|
export function pushToken(context: Context, array: any[]): any {
|
|
798
|
-
return function(token: string, start: number, end: number, loc: SourceLocation) {
|
|
795
|
+
return function (token: string, start: number, end: number, loc: SourceLocation) {
|
|
799
796
|
const tokens: any = {
|
|
800
797
|
token
|
|
801
798
|
};
|
|
@@ -827,11 +824,7 @@ export function isValidIdentifier(context: Context, t: Token): boolean {
|
|
|
827
824
|
);
|
|
828
825
|
}
|
|
829
826
|
|
|
830
|
-
export function classifyIdentifier(
|
|
831
|
-
parser: ParserState,
|
|
832
|
-
context: Context,
|
|
833
|
-
t: Token
|
|
834
|
-
): any {
|
|
827
|
+
export function classifyIdentifier(parser: ParserState, context: Context, t: Token): any {
|
|
835
828
|
if ((t & Token.IsEvalOrArguments) === Token.IsEvalOrArguments) {
|
|
836
829
|
if (context & Context.Strict) report(parser, Errors.StrictEvalArguments);
|
|
837
830
|
parser.flags |= Flags.StrictEvalArguments;
|
package/src/errors.ts
CHANGED
|
@@ -138,6 +138,7 @@ export const enum Errors {
|
|
|
138
138
|
InvalidNestedStatement,
|
|
139
139
|
UnknownLabel,
|
|
140
140
|
InvalidImportTail,
|
|
141
|
+
InvalidJSONImportBinding,
|
|
141
142
|
ImportNotOneArg,
|
|
142
143
|
InvalidImportNew,
|
|
143
144
|
InvalidSpreadInImport,
|
|
@@ -316,6 +317,7 @@ export const errorMessages: {
|
|
|
316
317
|
[Errors.InvalidNestedStatement]: 'continue statement must be nested within an iteration statement',
|
|
317
318
|
[Errors.UnknownLabel]: "Undefined label '%0'",
|
|
318
319
|
[Errors.InvalidImportTail]: 'Trailing comma is disallowed inside import(...) arguments',
|
|
320
|
+
[Errors.InvalidJSONImportBinding]: 'Invalid binding in JSON import',
|
|
319
321
|
[Errors.ImportNotOneArg]: 'import() requires exactly one argument',
|
|
320
322
|
[Errors.InvalidImportNew]: 'Cannot use new with import(...)',
|
|
321
323
|
[Errors.InvalidSpreadInImport]: '... is not allowed in import()',
|
|
@@ -356,7 +358,7 @@ export class ParseError extends SyntaxError {
|
|
|
356
358
|
public line: number;
|
|
357
359
|
public column: number;
|
|
358
360
|
public description: string;
|
|
359
|
-
|
|
361
|
+
|
|
360
362
|
constructor(startindex: number, line: number, column: number, type: Errors, ...params: string[]) {
|
|
361
363
|
const message =
|
|
362
364
|
'[' + line + ':' + column + ']: ' + errorMessages[type].replace(/%(\d+)/g, (_: string, i: number) => params[i]);
|
package/src/estree.ts
CHANGED
|
@@ -82,6 +82,7 @@ export type Node =
|
|
|
82
82
|
| Import
|
|
83
83
|
| ImportDeclaration
|
|
84
84
|
| ImportDefaultSpecifier
|
|
85
|
+
| ImportAttribute
|
|
85
86
|
| ImportNamespaceSpecifier
|
|
86
87
|
| ImportSpecifier
|
|
87
88
|
| JSXNamespacedName
|
|
@@ -338,6 +339,7 @@ export interface BreakStatement extends _Node {
|
|
|
338
339
|
export interface ImportExpression extends _Node {
|
|
339
340
|
type: 'ImportExpression';
|
|
340
341
|
source: Expression;
|
|
342
|
+
options?: Expression | null;
|
|
341
343
|
}
|
|
342
344
|
|
|
343
345
|
export interface ChainExpression extends _Node {
|
|
@@ -371,7 +373,7 @@ export interface PropertyDefinition extends _Node {
|
|
|
371
373
|
type: 'PropertyDefinition';
|
|
372
374
|
key: PrivateIdentifier | Expression;
|
|
373
375
|
value: any;
|
|
374
|
-
decorators?: Decorator[]
|
|
376
|
+
decorators?: Decorator[];
|
|
375
377
|
computed: boolean;
|
|
376
378
|
static: boolean;
|
|
377
379
|
}
|
|
@@ -424,6 +426,7 @@ export interface ExportAllDeclaration extends _Node {
|
|
|
424
426
|
type: 'ExportAllDeclaration';
|
|
425
427
|
source: Literal;
|
|
426
428
|
exported: Identifier | null;
|
|
429
|
+
attributes?: ImportAttribute[];
|
|
427
430
|
}
|
|
428
431
|
|
|
429
432
|
export interface ExportDefaultDeclaration extends _Node {
|
|
@@ -500,6 +503,13 @@ export interface ImportDeclaration extends _Node {
|
|
|
500
503
|
type: 'ImportDeclaration';
|
|
501
504
|
source: Literal;
|
|
502
505
|
specifiers: ImportClause[];
|
|
506
|
+
attributes?: ImportAttribute[];
|
|
507
|
+
}
|
|
508
|
+
|
|
509
|
+
export interface ImportAttribute extends _Node {
|
|
510
|
+
type: 'ImportAttribute';
|
|
511
|
+
key: Identifier | Literal;
|
|
512
|
+
value: Literal;
|
|
503
513
|
}
|
|
504
514
|
|
|
505
515
|
export interface ImportDefaultSpecifier extends _Node {
|
package/src/lexer/comments.ts
CHANGED
|
@@ -22,7 +22,7 @@ export const CommentTypes = ['SingleLine', 'MultiLine', 'HTMLOpen', 'HTMLClose',
|
|
|
22
22
|
export function skipHashBang(parser: ParserState): void {
|
|
23
23
|
// HashbangComment ::
|
|
24
24
|
// #! SingleLineCommentChars_opt
|
|
25
|
-
const source = parser
|
|
25
|
+
const { source } = parser;
|
|
26
26
|
if (parser.currentChar === Chars.Hash && source.charCodeAt(parser.index + 1) === Chars.Exclamation) {
|
|
27
27
|
advanceChar(parser);
|
|
28
28
|
advanceChar(parser);
|
package/src/lexer/decodeHTML.ts
CHANGED
|
@@ -2163,11 +2163,9 @@ export function decodeHTMLStrict(text: string): string {
|
|
|
2163
2163
|
return text.replace(/&(?:[a-zA-Z]+|#[xX][\da-fA-F]+|#\d+);/g, (key) => {
|
|
2164
2164
|
if (key.charAt(1) === '#') {
|
|
2165
2165
|
const secondChar = key.charAt(2);
|
|
2166
|
-
|
|
2167
|
-
|
|
2168
|
-
|
|
2169
|
-
: parseInt(key.slice(2), 10);
|
|
2170
|
-
return decodeCodePoint(codePoint);
|
|
2166
|
+
const codePoint =
|
|
2167
|
+
secondChar === 'X' || secondChar === 'x' ? parseInt(key.slice(3), 16) : parseInt(key.slice(2), 10);
|
|
2168
|
+
return decodeCodePoint(codePoint);
|
|
2171
2169
|
}
|
|
2172
2170
|
return entities[key.slice(1, -1)] || key;
|
|
2173
2171
|
});
|
package/src/lexer/identifier.ts
CHANGED
|
@@ -12,7 +12,7 @@ import { report, reportScannerError, Errors } from '../errors';
|
|
|
12
12
|
* @param context Context masks
|
|
13
13
|
*/
|
|
14
14
|
export function scanIdentifier(parser: ParserState, context: Context, isValidAsKeyword: 0 | 1): Token {
|
|
15
|
-
while (isIdPart[advanceChar(parser)])
|
|
15
|
+
while (isIdPart[advanceChar(parser)]);
|
|
16
16
|
parser.tokenValue = parser.source.slice(parser.tokenPos, parser.index);
|
|
17
17
|
|
|
18
18
|
return parser.currentChar !== Chars.Backslash && parser.currentChar <= 0x7e
|
|
@@ -69,7 +69,7 @@ export function scanIdentifierSlowCase(
|
|
|
69
69
|
parser.tokenValue += parser.source.slice(start, parser.index);
|
|
70
70
|
}
|
|
71
71
|
|
|
72
|
-
const length = parser.tokenValue
|
|
72
|
+
const { length } = parser.tokenValue;
|
|
73
73
|
|
|
74
74
|
if (isValidAsKeyword && length >= 2 && length <= 11) {
|
|
75
75
|
const token: Token | undefined = descKeywordTable[parser.tokenValue];
|
|
@@ -110,8 +110,8 @@ export function scanIdentifierSlowCase(
|
|
|
110
110
|
return context & Context.AllowEscapedKeyword
|
|
111
111
|
? Token.AnyIdentifier
|
|
112
112
|
: context & Context.InYieldContext
|
|
113
|
-
|
|
114
|
-
|
|
113
|
+
? Token.EscapedReserved
|
|
114
|
+
: token;
|
|
115
115
|
}
|
|
116
116
|
|
|
117
117
|
// async is not reserved; it can be used as a variable name
|
package/src/lexer/jsx.ts
CHANGED
|
@@ -16,11 +16,12 @@ export function scanJSXAttributeValue(parser: ParserState, context: Context): To
|
|
|
16
16
|
parser.startPos = parser.tokenPos = parser.index;
|
|
17
17
|
parser.startColumn = parser.colPos = parser.column;
|
|
18
18
|
parser.startLine = parser.linePos = parser.line;
|
|
19
|
-
parser.
|
|
19
|
+
parser.setToken(
|
|
20
20
|
CharTypes[parser.currentChar] & CharFlags.StringLiteral
|
|
21
21
|
? scanJSXString(parser, context)
|
|
22
|
-
: scanSingleToken(parser, context, LexerState.None)
|
|
23
|
-
|
|
22
|
+
: scanSingleToken(parser, context, LexerState.None)
|
|
23
|
+
);
|
|
24
|
+
return parser.getToken();
|
|
24
25
|
}
|
|
25
26
|
|
|
26
27
|
/**
|
|
@@ -55,7 +56,7 @@ export function scanJSXToken(parser: ParserState, context: Context): Token {
|
|
|
55
56
|
parser.startColumn = parser.colPos = parser.column;
|
|
56
57
|
parser.startLine = parser.linePos = parser.line;
|
|
57
58
|
|
|
58
|
-
if (parser.index >= parser.end) return
|
|
59
|
+
if (parser.index >= parser.end) return parser.setToken(Token.EOF);
|
|
59
60
|
|
|
60
61
|
const token = TokenLookup[parser.source.charCodeAt(parser.index)];
|
|
61
62
|
|
|
@@ -65,9 +66,9 @@ export function scanJSXToken(parser: ParserState, context: Context): Token {
|
|
|
65
66
|
advanceChar(parser);
|
|
66
67
|
if (parser.currentChar === Chars.Slash) {
|
|
67
68
|
advanceChar(parser);
|
|
68
|
-
parser.
|
|
69
|
+
parser.setToken(Token.JSXClose);
|
|
69
70
|
} else {
|
|
70
|
-
parser.
|
|
71
|
+
parser.setToken(Token.LessThan);
|
|
71
72
|
}
|
|
72
73
|
|
|
73
74
|
break;
|
|
@@ -75,7 +76,7 @@ export function scanJSXToken(parser: ParserState, context: Context): Token {
|
|
|
75
76
|
// '{'
|
|
76
77
|
case Token.LeftBrace: {
|
|
77
78
|
advanceChar(parser);
|
|
78
|
-
parser.
|
|
79
|
+
parser.setToken(Token.LeftBrace);
|
|
79
80
|
break;
|
|
80
81
|
}
|
|
81
82
|
default: {
|
|
@@ -100,11 +101,11 @@ export function scanJSXToken(parser: ParserState, context: Context): Token {
|
|
|
100
101
|
const raw = parser.source.slice(parser.tokenPos, parser.index);
|
|
101
102
|
if (context & Context.OptionsRaw) parser.tokenRaw = raw;
|
|
102
103
|
parser.tokenValue = decodeHTMLStrict(raw);
|
|
103
|
-
parser.
|
|
104
|
+
parser.setToken(Token.JSXText);
|
|
104
105
|
}
|
|
105
106
|
}
|
|
106
107
|
|
|
107
|
-
return parser.
|
|
108
|
+
return parser.getToken();
|
|
108
109
|
}
|
|
109
110
|
|
|
110
111
|
/**
|
|
@@ -113,7 +114,7 @@ export function scanJSXToken(parser: ParserState, context: Context): Token {
|
|
|
113
114
|
* @param parser The parser instance
|
|
114
115
|
*/
|
|
115
116
|
export function scanJSXIdentifier(parser: ParserState): Token {
|
|
116
|
-
if ((parser.
|
|
117
|
+
if ((parser.getToken() & Token.IsIdentifier) === Token.IsIdentifier) {
|
|
117
118
|
const { index } = parser;
|
|
118
119
|
let char = parser.currentChar;
|
|
119
120
|
while (CharTypes[char] & (CharFlags.Hyphen | CharFlags.IdentifierPart)) {
|
|
@@ -121,6 +122,6 @@ export function scanJSXIdentifier(parser: ParserState): Token {
|
|
|
121
122
|
}
|
|
122
123
|
parser.tokenValue += parser.source.slice(index, parser.index);
|
|
123
124
|
}
|
|
124
|
-
parser.
|
|
125
|
-
return parser.
|
|
125
|
+
parser.setToken(Token.Identifier);
|
|
126
|
+
return parser.getToken();
|
|
126
127
|
}
|
package/src/lexer/numeric.ts
CHANGED
|
@@ -212,8 +212,8 @@ export function scanNumber(parser: ParserState, context: Context, kind: NumberKi
|
|
|
212
212
|
kind & (NumberKind.ImplicitOctal | NumberKind.Binary | NumberKind.Hex | NumberKind.Octal)
|
|
213
213
|
? value
|
|
214
214
|
: kind & NumberKind.NonOctalDecimal
|
|
215
|
-
|
|
216
|
-
|
|
215
|
+
? parseFloat(parser.source.substring(parser.tokenPos, parser.index))
|
|
216
|
+
: +value;
|
|
217
217
|
|
|
218
218
|
if (context & Context.OptionsRaw) parser.tokenRaw = parser.source.slice(parser.tokenPos, parser.index);
|
|
219
219
|
|
package/src/lexer/regexp.ts
CHANGED
|
@@ -47,7 +47,7 @@ export function scanRegularExpression(parser: ParserState, context: Context): To
|
|
|
47
47
|
case Chars.LineSeparator:
|
|
48
48
|
case Chars.ParagraphSeparator:
|
|
49
49
|
report(parser, Errors.UnterminatedRegExp);
|
|
50
|
-
|
|
50
|
+
// No default
|
|
51
51
|
}
|
|
52
52
|
}
|
|
53
53
|
|
|
@@ -143,13 +143,13 @@ export function scanRegularExpression(parser: ParserState, context: Context): To
|
|
|
143
143
|
function validate(parser: ParserState, pattern: string, flags: string): RegExp | null | Token {
|
|
144
144
|
try {
|
|
145
145
|
return new RegExp(pattern, flags);
|
|
146
|
-
} catch
|
|
146
|
+
} catch {
|
|
147
147
|
try {
|
|
148
148
|
// Some JavaScript engine has not supported flag "d".
|
|
149
149
|
new RegExp(pattern, flags.replace('d', ''));
|
|
150
150
|
// Use null as tokenValue according to ESTree spec
|
|
151
151
|
return null;
|
|
152
|
-
} catch
|
|
152
|
+
} catch {
|
|
153
153
|
report(parser, Errors.UnterminatedRegExp);
|
|
154
154
|
}
|
|
155
155
|
}
|
package/src/lexer/scan.ts
CHANGED
|
@@ -176,8 +176,8 @@ export function nextToken(parser: ParserState, context: Context): void {
|
|
|
176
176
|
parser.startPos = parser.index;
|
|
177
177
|
parser.startColumn = parser.column;
|
|
178
178
|
parser.startLine = parser.line;
|
|
179
|
-
parser.
|
|
180
|
-
if (parser.onToken && parser.
|
|
179
|
+
parser.setToken(scanSingleToken(parser, context, LexerState.None));
|
|
180
|
+
if (parser.onToken && parser.getToken() !== Token.EOF) {
|
|
181
181
|
const loc = {
|
|
182
182
|
start: {
|
|
183
183
|
line: parser.linePos,
|
|
@@ -188,14 +188,14 @@ export function nextToken(parser: ParserState, context: Context): void {
|
|
|
188
188
|
column: parser.column
|
|
189
189
|
}
|
|
190
190
|
};
|
|
191
|
-
parser.onToken(convertTokenType(parser.
|
|
191
|
+
parser.onToken(convertTokenType(parser.getToken()), parser.tokenPos, parser.index, loc);
|
|
192
192
|
}
|
|
193
193
|
}
|
|
194
194
|
|
|
195
195
|
export function scanSingleToken(parser: ParserState, context: Context, state: LexerState): Token {
|
|
196
196
|
const isStartOfLine = parser.index === 0;
|
|
197
197
|
|
|
198
|
-
const source = parser
|
|
198
|
+
const { source } = parser;
|
|
199
199
|
|
|
200
200
|
// These three are only for HTMLClose comment
|
|
201
201
|
let startPos = parser.index;
|
|
@@ -271,7 +271,7 @@ export function scanSingleToken(parser: ParserState, context: Context, state: Le
|
|
|
271
271
|
break;
|
|
272
272
|
|
|
273
273
|
// `<`, `<=`, `<<`, `<<=`, `</`, `<!--`
|
|
274
|
-
case Token.LessThan:
|
|
274
|
+
case Token.LessThan: {
|
|
275
275
|
let ch = advanceChar(parser);
|
|
276
276
|
if (parser.index < parser.end) {
|
|
277
277
|
if (ch === Chars.LessThan) {
|
|
@@ -325,6 +325,7 @@ export function scanSingleToken(parser: ParserState, context: Context, state: Le
|
|
|
325
325
|
}
|
|
326
326
|
}
|
|
327
327
|
return Token.LessThan;
|
|
328
|
+
}
|
|
328
329
|
|
|
329
330
|
// `=`, `==`, `===`, `=>`
|
|
330
331
|
case Token.Assign: {
|
|
@@ -491,7 +492,7 @@ export function scanSingleToken(parser: ParserState, context: Context, state: Le
|
|
|
491
492
|
}
|
|
492
493
|
|
|
493
494
|
// `.`, `...`, `.123` (numeric literal)
|
|
494
|
-
case Token.Period:
|
|
495
|
+
case Token.Period: {
|
|
495
496
|
const next = advanceChar(parser);
|
|
496
497
|
if (next >= Chars.Zero && next <= Chars.Nine)
|
|
497
498
|
return scanNumber(parser, context, NumberKind.Float | NumberKind.Decimal);
|
|
@@ -504,6 +505,7 @@ export function scanSingleToken(parser: ParserState, context: Context, state: Le
|
|
|
504
505
|
}
|
|
505
506
|
}
|
|
506
507
|
return Token.Period;
|
|
508
|
+
}
|
|
507
509
|
|
|
508
510
|
// `|`, `||`, `|=`, `||=`
|
|
509
511
|
case Token.BitwiseOr: {
|
package/src/lexer/string.ts
CHANGED
|
@@ -214,7 +214,7 @@ export function parseEscape(parser: ParserState, context: Context, first: number
|
|
|
214
214
|
case Chars.Eight:
|
|
215
215
|
case Chars.Nine:
|
|
216
216
|
if ((context & Context.OptionsWebCompat) === 0) return Escape.EightOrNine;
|
|
217
|
-
|
|
217
|
+
// fallthrough
|
|
218
218
|
default:
|
|
219
219
|
return first;
|
|
220
220
|
}
|
|
@@ -237,6 +237,6 @@ export function handleStringError(state: ParserState, code: Escape, isTemplate:
|
|
|
237
237
|
case Escape.OutOfRange:
|
|
238
238
|
report(state, Errors.UnicodeOverflow);
|
|
239
239
|
|
|
240
|
-
default
|
|
240
|
+
// No default
|
|
241
241
|
}
|
|
242
242
|
}
|
package/src/meriyah.ts
CHANGED
|
@@ -2,8 +2,7 @@ import { Context } from './common';
|
|
|
2
2
|
import { parseSource, Options } from './parser';
|
|
3
3
|
import * as ESTree from './estree';
|
|
4
4
|
// Current version
|
|
5
|
-
import
|
|
6
|
-
const version: string = meta.version;
|
|
5
|
+
import { version } from '../package.json';
|
|
7
6
|
|
|
8
7
|
/**
|
|
9
8
|
* Parse a script, optionally with various options.
|