terser 3.7.6 → 3.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of terser might be problematic. Click here for more details.
- package/README.md +55 -15
- package/bin/uglifyjs +19 -7
- package/dist/.gitkeep +0 -0
- package/dist/browser.bundle.js +11960 -0
- package/dist/browser.bundle.js.map +1 -0
- package/lib/ast.js +46 -47
- package/lib/compress.js +309 -254
- package/lib/minify.js +1 -1
- package/lib/mozilla-ast.js +31 -31
- package/lib/output.js +217 -213
- package/lib/parse.js +128 -108
- package/lib/propmangle.js +4 -4
- package/lib/scope.js +19 -19
- package/lib/sourcemap.js +3 -3
- package/lib/transform.js +37 -37
- package/lib/utils.js +29 -29
- package/package.json +35 -6
- package/tools/colorless-console.js +3 -2
- package/tools/exports.js +3 -3
package/lib/parse.js
CHANGED
@@ -44,10 +44,10 @@
|
|
44
44
|
|
45
45
|
"use strict";
|
46
46
|
|
47
|
-
var KEYWORDS =
|
48
|
-
var KEYWORDS_ATOM =
|
49
|
-
var RESERVED_WORDS =
|
50
|
-
var KEYWORDS_BEFORE_EXPRESSION =
|
47
|
+
var KEYWORDS = "break case catch class const continue debugger default delete do else export extends finally for function if in instanceof let new return switch throw try typeof var void while with";
|
48
|
+
var KEYWORDS_ATOM = "false null true";
|
49
|
+
var RESERVED_WORDS = "enum implements import interface package private protected public static super this " + KEYWORDS_ATOM + " " + KEYWORDS;
|
50
|
+
var KEYWORDS_BEFORE_EXPRESSION = "return new delete throw else case yield await";
|
51
51
|
|
52
52
|
KEYWORDS = makePredicate(KEYWORDS);
|
53
53
|
RESERVED_WORDS = makePredicate(RESERVED_WORDS);
|
@@ -195,19 +195,19 @@ function is_surrogate_pair_tail(code) {
|
|
195
195
|
|
196
196
|
function is_digit(code) {
|
197
197
|
return code >= 48 && code <= 57;
|
198
|
-
}
|
198
|
+
}
|
199
199
|
|
200
200
|
function is_identifier(name) {
|
201
201
|
if (typeof name !== "string" || RESERVED_WORDS(name))
|
202
202
|
return false;
|
203
203
|
|
204
204
|
return true;
|
205
|
-
}
|
205
|
+
}
|
206
206
|
|
207
207
|
function is_identifier_start(ch) {
|
208
208
|
var code = ch.charCodeAt(0);
|
209
209
|
return UNICODE.ID_Start.test(ch) || code == 36 || code == 95;
|
210
|
-
}
|
210
|
+
}
|
211
211
|
|
212
212
|
function is_identifier_char(ch) {
|
213
213
|
var code = ch.charCodeAt(0);
|
@@ -217,11 +217,11 @@ function is_identifier_char(ch) {
|
|
217
217
|
|| code == 8204 // \u200c: zero-width non-joiner <ZWNJ>
|
218
218
|
|| code == 8205 // \u200d: zero-width joiner <ZWJ> (in my ECMA-262 PDF, this is also 200c)
|
219
219
|
;
|
220
|
-
}
|
220
|
+
}
|
221
221
|
|
222
|
-
function is_identifier_string(str){
|
222
|
+
function is_identifier_string(str) {
|
223
223
|
return /^[a-z_$][a-z0-9_$]*$/i.test(str);
|
224
|
-
}
|
224
|
+
}
|
225
225
|
|
226
226
|
function parse_js_number(num) {
|
227
227
|
if (RE_HEX_NUMBER.test(num)) {
|
@@ -238,7 +238,7 @@ function parse_js_number(num) {
|
|
238
238
|
var val = parseFloat(num);
|
239
239
|
if (val == num) return val;
|
240
240
|
}
|
241
|
-
}
|
241
|
+
}
|
242
242
|
|
243
243
|
function JS_Parse_Error(message, filename, line, col, pos) {
|
244
244
|
this.message = message;
|
@@ -246,7 +246,7 @@ function JS_Parse_Error(message, filename, line, col, pos) {
|
|
246
246
|
this.line = line;
|
247
247
|
this.col = col;
|
248
248
|
this.pos = pos;
|
249
|
-
}
|
249
|
+
}
|
250
250
|
JS_Parse_Error.prototype = Object.create(Error.prototype);
|
251
251
|
JS_Parse_Error.prototype.constructor = JS_Parse_Error;
|
252
252
|
JS_Parse_Error.prototype.name = "SyntaxError";
|
@@ -254,11 +254,11 @@ configure_error_stack(JS_Parse_Error);
|
|
254
254
|
|
255
255
|
function js_error(message, filename, line, col, pos) {
|
256
256
|
throw new JS_Parse_Error(message, filename, line, col, pos);
|
257
|
-
}
|
257
|
+
}
|
258
258
|
|
259
259
|
function is_token(token, type, val) {
|
260
260
|
return token.type == type && (val == null || token.value == val);
|
261
|
-
}
|
261
|
+
}
|
262
262
|
|
263
263
|
var EX_EOF = {};
|
264
264
|
|
@@ -282,7 +282,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
282
282
|
directive_stack : []
|
283
283
|
};
|
284
284
|
|
285
|
-
function peek() { return get_full_char(S.text, S.pos); }
|
285
|
+
function peek() { return get_full_char(S.text, S.pos); }
|
286
286
|
|
287
287
|
function next(signal_eof, in_string) {
|
288
288
|
var ch = get_full_char(S.text, S.pos++);
|
@@ -305,15 +305,15 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
305
305
|
++S.col;
|
306
306
|
}
|
307
307
|
return ch;
|
308
|
-
}
|
308
|
+
}
|
309
309
|
|
310
310
|
function forward(i) {
|
311
311
|
while (i-- > 0) next();
|
312
|
-
}
|
312
|
+
}
|
313
313
|
|
314
314
|
function looking_at(str) {
|
315
315
|
return S.text.substr(S.pos, str.length) == str;
|
316
|
-
}
|
316
|
+
}
|
317
317
|
|
318
318
|
function find_eol() {
|
319
319
|
var text = S.text;
|
@@ -323,19 +323,19 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
323
323
|
return i;
|
324
324
|
}
|
325
325
|
return -1;
|
326
|
-
}
|
326
|
+
}
|
327
327
|
|
328
328
|
function find(what, signal_eof) {
|
329
329
|
var pos = S.text.indexOf(what, S.pos);
|
330
330
|
if (signal_eof && pos == -1) throw EX_EOF;
|
331
331
|
return pos;
|
332
|
-
}
|
332
|
+
}
|
333
333
|
|
334
334
|
function start_token() {
|
335
335
|
S.tokline = S.line;
|
336
336
|
S.tokcol = S.col;
|
337
337
|
S.tokpos = S.pos;
|
338
|
-
}
|
338
|
+
}
|
339
339
|
|
340
340
|
var prev_was_dot = false;
|
341
341
|
function token(type, value, is_comment) {
|
@@ -369,27 +369,27 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
369
369
|
}
|
370
370
|
S.newline_before = false;
|
371
371
|
return new AST_Token(ret);
|
372
|
-
}
|
372
|
+
}
|
373
373
|
|
374
374
|
function skip_whitespace() {
|
375
375
|
while (WHITESPACE_CHARS(peek()))
|
376
376
|
next();
|
377
|
-
}
|
377
|
+
}
|
378
378
|
|
379
379
|
function read_while(pred) {
|
380
380
|
var ret = "", ch, i = 0;
|
381
381
|
while ((ch = peek()) && pred(ch, i++))
|
382
382
|
ret += next();
|
383
383
|
return ret;
|
384
|
-
}
|
384
|
+
}
|
385
385
|
|
386
386
|
function parse_error(err) {
|
387
387
|
js_error(err, filename, S.tokline, S.tokcol, S.tokpos);
|
388
|
-
}
|
388
|
+
}
|
389
389
|
|
390
390
|
function read_num(prefix) {
|
391
391
|
var has_e = false, after_e = false, has_x = false, has_dot = prefix == ".";
|
392
|
-
var num = read_while(function(ch, i){
|
392
|
+
var num = read_while(function(ch, i) {
|
393
393
|
var code = ch.charCodeAt(0);
|
394
394
|
switch (code) {
|
395
395
|
case 98: case 66: // bB
|
@@ -418,7 +418,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
418
418
|
} else {
|
419
419
|
parse_error("Invalid syntax: " + num);
|
420
420
|
}
|
421
|
-
}
|
421
|
+
}
|
422
422
|
|
423
423
|
function read_escaped_char(in_string) {
|
424
424
|
var ch = next(true, in_string);
|
@@ -456,7 +456,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
456
456
|
if (ch >= "0" && ch <= "7")
|
457
457
|
return read_octal_escape_sequence(ch);
|
458
458
|
return ch;
|
459
|
-
}
|
459
|
+
}
|
460
460
|
|
461
461
|
function read_octal_escape_sequence(ch) {
|
462
462
|
// Read
|
@@ -483,9 +483,9 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
483
483
|
num = (num << 4) | digit;
|
484
484
|
}
|
485
485
|
return num;
|
486
|
-
}
|
486
|
+
}
|
487
487
|
|
488
|
-
var read_string = with_eof_error("Unterminated string constant", function(quote_char){
|
488
|
+
var read_string = with_eof_error("Unterminated string constant", function(quote_char) {
|
489
489
|
var quote = next(), ret = "";
|
490
490
|
for (;;) {
|
491
491
|
var ch = next(true, true);
|
@@ -499,7 +499,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
499
499
|
return tok;
|
500
500
|
});
|
501
501
|
|
502
|
-
var read_template_characters = with_eof_error("Unterminated template", function(begin){
|
502
|
+
var read_template_characters = with_eof_error("Unterminated template", function(begin) {
|
503
503
|
if (begin) {
|
504
504
|
S.template_braces.push(S.brace_counter);
|
505
505
|
}
|
@@ -550,12 +550,12 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
550
550
|
S.comments_before.push(token(type, ret, true));
|
551
551
|
S.regex_allowed = regex_allowed;
|
552
552
|
return next_token;
|
553
|
-
}
|
553
|
+
}
|
554
554
|
|
555
|
-
var skip_multiline_comment = with_eof_error("Unterminated multiline comment", function(){
|
555
|
+
var skip_multiline_comment = with_eof_error("Unterminated multiline comment", function() {
|
556
556
|
var regex_allowed = S.regex_allowed;
|
557
557
|
var i = find("*/", true);
|
558
|
-
var text = S.text.substring(S.pos, i).replace(/\r\n|\r|\u2028|\u2029/g,
|
558
|
+
var text = S.text.substring(S.pos, i).replace(/\r\n|\r|\u2028|\u2029/g, "\n");
|
559
559
|
// update stream position
|
560
560
|
forward(get_full_char_length(text) /* text length doesn't count \r\n as 2 char while S.pos - i does */ + 2);
|
561
561
|
S.comments_before.push(token("comment2", text, true));
|
@@ -573,7 +573,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
573
573
|
parse_error("Expecting UnicodeEscapeSequence -- uXXXX or u{XXXX}");
|
574
574
|
}
|
575
575
|
return read_escaped_char();
|
576
|
-
}
|
576
|
+
};
|
577
577
|
|
578
578
|
// Read first character (ID_Start)
|
579
579
|
if ((name = peek()) === "\\") {
|
@@ -581,7 +581,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
581
581
|
if (!is_identifier_start(name)) {
|
582
582
|
parse_error("First identifier char is an invalid identifier char");
|
583
583
|
}
|
584
|
-
} else if (is_identifier_start(name)){
|
584
|
+
} else if (is_identifier_start(name)) {
|
585
585
|
next();
|
586
586
|
} else {
|
587
587
|
return "";
|
@@ -648,9 +648,9 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
648
648
|
} else {
|
649
649
|
return op;
|
650
650
|
}
|
651
|
-
}
|
651
|
+
}
|
652
652
|
return token("operator", grow(prefix || next()));
|
653
|
-
}
|
653
|
+
}
|
654
654
|
|
655
655
|
function handle_slash() {
|
656
656
|
next();
|
@@ -663,7 +663,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
663
663
|
return skip_multiline_comment();
|
664
664
|
}
|
665
665
|
return S.regex_allowed ? read_regexp("") : read_operator("/");
|
666
|
-
}
|
666
|
+
}
|
667
667
|
|
668
668
|
function handle_eq_sign() {
|
669
669
|
next();
|
@@ -673,7 +673,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
673
673
|
} else {
|
674
674
|
return read_operator("=");
|
675
675
|
}
|
676
|
-
}
|
676
|
+
}
|
677
677
|
|
678
678
|
function handle_dot() {
|
679
679
|
next();
|
@@ -687,7 +687,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
687
687
|
}
|
688
688
|
|
689
689
|
return token("punc", ".");
|
690
|
-
}
|
690
|
+
}
|
691
691
|
|
692
692
|
function read_word() {
|
693
693
|
var word = read_name();
|
@@ -696,7 +696,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
696
696
|
: !KEYWORDS(word) ? token("name", word)
|
697
697
|
: OPERATORS(word) ? token("operator", word)
|
698
698
|
: token("keyword", word);
|
699
|
-
}
|
699
|
+
}
|
700
700
|
|
701
701
|
function with_eof_error(eof_error, cont) {
|
702
702
|
return function(x) {
|
@@ -707,7 +707,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
707
707
|
else throw ex;
|
708
708
|
}
|
709
709
|
};
|
710
|
-
}
|
710
|
+
}
|
711
711
|
|
712
712
|
function next_token(force_regexp) {
|
713
713
|
if (force_regexp != null)
|
@@ -762,7 +762,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
762
762
|
break;
|
763
763
|
}
|
764
764
|
parse_error("Unexpected character '" + ch + "'");
|
765
|
-
}
|
765
|
+
}
|
766
766
|
|
767
767
|
next_token.next = next;
|
768
768
|
next_token.peek = peek;
|
@@ -780,11 +780,11 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
780
780
|
} else {
|
781
781
|
S.directives[directive]++;
|
782
782
|
}
|
783
|
-
}
|
783
|
+
};
|
784
784
|
|
785
785
|
next_token.push_directives_stack = function() {
|
786
786
|
S.directive_stack.push([]);
|
787
|
-
}
|
787
|
+
};
|
788
788
|
|
789
789
|
next_token.pop_directives_stack = function() {
|
790
790
|
var directives = S.directive_stack[S.directive_stack.length - 1];
|
@@ -794,15 +794,15 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
794
794
|
}
|
795
795
|
|
796
796
|
S.directive_stack.pop();
|
797
|
-
}
|
797
|
+
};
|
798
798
|
|
799
799
|
next_token.has_directive = function(directive) {
|
800
800
|
return S.directives[directive] > 0;
|
801
|
-
}
|
801
|
+
};
|
802
802
|
|
803
803
|
return next_token;
|
804
804
|
|
805
|
-
}
|
805
|
+
}
|
806
806
|
|
807
807
|
/* -----[ Parser (constants) ]----- */
|
808
808
|
|
@@ -822,7 +822,7 @@ var UNARY_POSTFIX = makePredicate([ "--", "++" ]);
|
|
822
822
|
|
823
823
|
var ASSIGNMENT = makePredicate([ "=", "+=", "-=", "/=", "*=", "**=", "%=", ">>=", "<<=", ">>>=", "|=", "^=", "&=" ]);
|
824
824
|
|
825
|
-
var PRECEDENCE = (function(a, ret){
|
825
|
+
var PRECEDENCE = (function(a, ret) {
|
826
826
|
for (var i = 0; i < a.length; ++i) {
|
827
827
|
var b = a[i];
|
828
828
|
for (var j = 0; j < b.length; ++j) {
|
@@ -885,9 +885,9 @@ function parse($TEXT, options) {
|
|
885
885
|
|
886
886
|
function is(type, value) {
|
887
887
|
return is_token(S.token, type, value);
|
888
|
-
}
|
888
|
+
}
|
889
889
|
|
890
|
-
function peek() { return S.peeked || (S.peeked = S.input()); }
|
890
|
+
function peek() { return S.peeked || (S.peeked = S.input()); }
|
891
891
|
|
892
892
|
function next() {
|
893
893
|
S.prev = S.token;
|
@@ -901,11 +901,11 @@ function parse($TEXT, options) {
|
|
901
901
|
S.token.type == "string" || is("punc", ";")
|
902
902
|
);
|
903
903
|
return S.token;
|
904
|
-
}
|
904
|
+
}
|
905
905
|
|
906
906
|
function prev() {
|
907
907
|
return S.prev;
|
908
|
-
}
|
908
|
+
}
|
909
909
|
|
910
910
|
function croak(msg, line, col, pos) {
|
911
911
|
var ctx = S.input.context();
|
@@ -914,26 +914,26 @@ function parse($TEXT, options) {
|
|
914
914
|
line != null ? line : ctx.tokline,
|
915
915
|
col != null ? col : ctx.tokcol,
|
916
916
|
pos != null ? pos : ctx.tokpos);
|
917
|
-
}
|
917
|
+
}
|
918
918
|
|
919
919
|
function token_error(token, msg) {
|
920
920
|
croak(msg, token.line, token.col);
|
921
|
-
}
|
921
|
+
}
|
922
922
|
|
923
923
|
function unexpected(token) {
|
924
924
|
if (token == null)
|
925
925
|
token = S.token;
|
926
926
|
token_error(token, "Unexpected token: " + token.type + " (" + token.value + ")");
|
927
|
-
}
|
927
|
+
}
|
928
928
|
|
929
929
|
function expect_token(type, val) {
|
930
930
|
if (is(type, val)) {
|
931
931
|
return next();
|
932
932
|
}
|
933
933
|
token_error(S.token, "Unexpected token " + S.token.type + " «" + S.token.value + "»" + ", expected " + type + " «" + val + "»");
|
934
|
-
}
|
934
|
+
}
|
935
935
|
|
936
|
-
function expect(punc) { return expect_token("punc", punc); }
|
936
|
+
function expect(punc) { return expect_token("punc", punc); }
|
937
937
|
|
938
938
|
function has_newline_before(token) {
|
939
939
|
return token.nlb || !all(token.comments_before, function(comment) {
|
@@ -944,7 +944,7 @@ function parse($TEXT, options) {
|
|
944
944
|
function can_insert_semicolon() {
|
945
945
|
return !options.strict
|
946
946
|
&& (is("eof") || is("punc", "}") || has_newline_before(S.token));
|
947
|
-
}
|
947
|
+
}
|
948
948
|
|
949
949
|
function is_in_generator() {
|
950
950
|
return S.in_generator === S.in_function;
|
@@ -957,14 +957,14 @@ function parse($TEXT, options) {
|
|
957
957
|
function semicolon(optional) {
|
958
958
|
if (is("punc", ";")) next();
|
959
959
|
else if (!optional && !can_insert_semicolon()) unexpected();
|
960
|
-
}
|
960
|
+
}
|
961
961
|
|
962
962
|
function parenthesised() {
|
963
963
|
expect("(");
|
964
964
|
var exp = expression(true);
|
965
965
|
expect(")");
|
966
966
|
return exp;
|
967
|
-
}
|
967
|
+
}
|
968
968
|
|
969
969
|
function embed_tokens(parser) {
|
970
970
|
return function() {
|
@@ -975,14 +975,14 @@ function parse($TEXT, options) {
|
|
975
975
|
expr.end = end;
|
976
976
|
return expr;
|
977
977
|
};
|
978
|
-
}
|
978
|
+
}
|
979
979
|
|
980
980
|
function handle_regexp() {
|
981
981
|
if (is("operator", "/") || is("operator", "/=")) {
|
982
982
|
S.peeked = null;
|
983
983
|
S.token = S.input(S.token.value.substr(1)); // force regexp
|
984
984
|
}
|
985
|
-
}
|
985
|
+
}
|
986
986
|
|
987
987
|
var statement = embed_tokens(function(is_export_default, is_for_body, is_if_body) {
|
988
988
|
handle_regexp();
|
@@ -1186,7 +1186,7 @@ function parse($TEXT, options) {
|
|
1186
1186
|
if (label.name === "await" && is_in_async()) {
|
1187
1187
|
token_error(S.prev, "await cannot be used as label inside async function");
|
1188
1188
|
}
|
1189
|
-
if (find_if(function(l){ return l.name == label.name }, S.labels)) {
|
1189
|
+
if (find_if(function(l) { return l.name == label.name; }, S.labels)) {
|
1190
1190
|
// ECMA-262, 12.12: An ECMAScript program is considered
|
1191
1191
|
// syntactically incorrect if it contains a
|
1192
1192
|
// LabelledStatement that is enclosed by a
|
@@ -1201,7 +1201,7 @@ function parse($TEXT, options) {
|
|
1201
1201
|
// check for `continue` that refers to this label.
|
1202
1202
|
// those should be reported as syntax errors.
|
1203
1203
|
// https://github.com/mishoo/UglifyJS2/issues/287
|
1204
|
-
label.references.forEach(function(ref){
|
1204
|
+
label.references.forEach(function(ref) {
|
1205
1205
|
if (ref instanceof AST_Continue) {
|
1206
1206
|
ref = ref.label.start;
|
1207
1207
|
croak("Continue label `" + label.name + "` refers to non-IterationStatement.",
|
@@ -1210,11 +1210,11 @@ function parse($TEXT, options) {
|
|
1210
1210
|
});
|
1211
1211
|
}
|
1212
1212
|
return new AST_LabeledStatement({ body: stat, label: label });
|
1213
|
-
}
|
1213
|
+
}
|
1214
1214
|
|
1215
1215
|
function simple_statement(tmp) {
|
1216
1216
|
return new AST_SimpleStatement({ body: (tmp = expression(true), semicolon(), tmp) });
|
1217
|
-
}
|
1217
|
+
}
|
1218
1218
|
|
1219
1219
|
function break_cont(type) {
|
1220
1220
|
var label = null, ldef;
|
@@ -1222,7 +1222,7 @@ function parse($TEXT, options) {
|
|
1222
1222
|
label = as_symbol(AST_LabelRef, true);
|
1223
1223
|
}
|
1224
1224
|
if (label != null) {
|
1225
|
-
ldef = find_if(function(l){ return l.name == label.name }, S.labels);
|
1225
|
+
ldef = find_if(function(l) { return l.name == label.name; }, S.labels);
|
1226
1226
|
if (!ldef)
|
1227
1227
|
croak("Undefined label " + label.name);
|
1228
1228
|
label.thedef = ldef;
|
@@ -1233,9 +1233,19 @@ function parse($TEXT, options) {
|
|
1233
1233
|
var stat = new type({ label: label });
|
1234
1234
|
if (ldef) ldef.references.push(stat);
|
1235
1235
|
return stat;
|
1236
|
-
}
|
1236
|
+
}
|
1237
1237
|
|
1238
1238
|
function for_() {
|
1239
|
+
var for_await_error = "`for await` invalid in this context";
|
1240
|
+
var await_tok = S.token;
|
1241
|
+
if (await_tok.type == "name" && await_tok.value == "await") {
|
1242
|
+
if (!is_in_async()) {
|
1243
|
+
token_error(await_tok, for_await_error);
|
1244
|
+
}
|
1245
|
+
next();
|
1246
|
+
} else {
|
1247
|
+
await_tok = false;
|
1248
|
+
}
|
1239
1249
|
expect("(");
|
1240
1250
|
var init = null;
|
1241
1251
|
if (!is("punc", ";")) {
|
@@ -1246,23 +1256,28 @@ function parse($TEXT, options) {
|
|
1246
1256
|
expression(true, true);
|
1247
1257
|
var is_in = is("operator", "in");
|
1248
1258
|
var is_of = is("name", "of");
|
1259
|
+
if (await_tok && !is_of) {
|
1260
|
+
token_error(await_tok, for_await_error);
|
1261
|
+
}
|
1249
1262
|
if (is_in || is_of) {
|
1250
1263
|
if (init instanceof AST_Definitions) {
|
1251
1264
|
if (init.definitions.length > 1)
|
1252
|
-
|
1265
|
+
token_error(init.start, "Only one variable declaration allowed in for..in loop");
|
1253
1266
|
} else if (!(is_assignable(init) || (init = to_destructuring(init)) instanceof AST_Destructuring)) {
|
1254
|
-
|
1267
|
+
token_error(init.start, "Invalid left-hand side in for..in loop");
|
1255
1268
|
}
|
1256
1269
|
next();
|
1257
1270
|
if (is_in) {
|
1258
1271
|
return for_in(init);
|
1259
1272
|
} else {
|
1260
|
-
return for_of(init);
|
1273
|
+
return for_of(init, !!await_tok);
|
1261
1274
|
}
|
1262
1275
|
}
|
1276
|
+
} else if (await_tok) {
|
1277
|
+
token_error(await_tok, for_await_error);
|
1263
1278
|
}
|
1264
1279
|
return regular_for(init);
|
1265
|
-
}
|
1280
|
+
}
|
1266
1281
|
|
1267
1282
|
function regular_for(init) {
|
1268
1283
|
expect(";");
|
@@ -1276,19 +1291,20 @@ function parse($TEXT, options) {
|
|
1276
1291
|
step : step,
|
1277
1292
|
body : in_loop(function() { return statement(false, true); })
|
1278
1293
|
});
|
1279
|
-
}
|
1294
|
+
}
|
1280
1295
|
|
1281
|
-
function for_of(init) {
|
1296
|
+
function for_of(init, is_await) {
|
1282
1297
|
var lhs = init instanceof AST_Definitions ? init.definitions[0].name : null;
|
1283
1298
|
var obj = expression(true);
|
1284
1299
|
expect(")");
|
1285
1300
|
return new AST_ForOf({
|
1301
|
+
await : is_await,
|
1286
1302
|
init : init,
|
1287
1303
|
name : lhs,
|
1288
1304
|
object : obj,
|
1289
1305
|
body : in_loop(function() { return statement(false, true); })
|
1290
1306
|
});
|
1291
|
-
}
|
1307
|
+
}
|
1292
1308
|
|
1293
1309
|
function for_in(init) {
|
1294
1310
|
var obj = expression(true);
|
@@ -1298,7 +1314,7 @@ function parse($TEXT, options) {
|
|
1298
1314
|
object : obj,
|
1299
1315
|
body : in_loop(function() { return statement(false, true); })
|
1300
1316
|
});
|
1301
|
-
}
|
1317
|
+
}
|
1302
1318
|
|
1303
1319
|
var arrow_function = function(start, argnames, is_async) {
|
1304
1320
|
if (has_newline_before(S.token)) {
|
@@ -1404,7 +1420,7 @@ function parse($TEXT, options) {
|
|
1404
1420
|
strict_mode = true;
|
1405
1421
|
},
|
1406
1422
|
is_strict: function() {
|
1407
|
-
return default_assignment !== false || spread !== false || strict_mode
|
1423
|
+
return default_assignment !== false || spread !== false || strict_mode;
|
1408
1424
|
},
|
1409
1425
|
check_strict: function() {
|
1410
1426
|
if (tracker.is_strict() && duplicate !== false) {
|
@@ -1783,7 +1799,7 @@ function parse($TEXT, options) {
|
|
1783
1799
|
body : body,
|
1784
1800
|
alternative : belse
|
1785
1801
|
});
|
1786
|
-
}
|
1802
|
+
}
|
1787
1803
|
|
1788
1804
|
function block_() {
|
1789
1805
|
expect("{");
|
@@ -1794,7 +1810,7 @@ function parse($TEXT, options) {
|
|
1794
1810
|
}
|
1795
1811
|
next();
|
1796
1812
|
return a;
|
1797
|
-
}
|
1813
|
+
}
|
1798
1814
|
|
1799
1815
|
function switch_body_() {
|
1800
1816
|
expect("{");
|
@@ -1829,7 +1845,7 @@ function parse($TEXT, options) {
|
|
1829
1845
|
if (branch) branch.end = prev();
|
1830
1846
|
next();
|
1831
1847
|
return a;
|
1832
|
-
}
|
1848
|
+
}
|
1833
1849
|
|
1834
1850
|
function try_() {
|
1835
1851
|
var body = block_(), bcatch = null, bfinally = null;
|
@@ -1862,7 +1878,7 @@ function parse($TEXT, options) {
|
|
1862
1878
|
bcatch : bcatch,
|
1863
1879
|
bfinally : bfinally
|
1864
1880
|
});
|
1865
|
-
}
|
1881
|
+
}
|
1866
1882
|
|
1867
1883
|
function vardefs(no_in, kind) {
|
1868
1884
|
var a = [];
|
@@ -1897,7 +1913,7 @@ function parse($TEXT, options) {
|
|
1897
1913
|
next();
|
1898
1914
|
}
|
1899
1915
|
return a;
|
1900
|
-
}
|
1916
|
+
}
|
1901
1917
|
|
1902
1918
|
var var_ = function(no_in) {
|
1903
1919
|
return new AST_Var({
|
@@ -1969,7 +1985,7 @@ function parse($TEXT, options) {
|
|
1969
1985
|
});
|
1970
1986
|
break;
|
1971
1987
|
case "regexp":
|
1972
|
-
ret = new AST_RegExp({ start: tok, end: tok, value: tok.value
|
1988
|
+
ret = new AST_RegExp({ start: tok, end: tok, value: tok.value });
|
1973
1989
|
break;
|
1974
1990
|
case "atom":
|
1975
1991
|
switch (tok.value) {
|
@@ -1987,7 +2003,7 @@ function parse($TEXT, options) {
|
|
1987
2003
|
}
|
1988
2004
|
next();
|
1989
2005
|
return ret;
|
1990
|
-
}
|
2006
|
+
}
|
1991
2007
|
|
1992
2008
|
function to_fun_args(ex, _, __, default_seen_above) {
|
1993
2009
|
var insert_default = function(ex, default_value) {
|
@@ -2001,7 +2017,7 @@ function parse($TEXT, options) {
|
|
2001
2017
|
});
|
2002
2018
|
}
|
2003
2019
|
return ex;
|
2004
|
-
}
|
2020
|
+
};
|
2005
2021
|
if (ex instanceof AST_Object) {
|
2006
2022
|
return insert_default(new AST_Destructuring({
|
2007
2023
|
start: ex.start,
|
@@ -2048,7 +2064,11 @@ function parse($TEXT, options) {
|
|
2048
2064
|
return new_(allow_calls);
|
2049
2065
|
}
|
2050
2066
|
var start = S.token;
|
2051
|
-
var
|
2067
|
+
var peeked;
|
2068
|
+
var async = is("name", "async")
|
2069
|
+
&& (peeked = peek()).value != "["
|
2070
|
+
&& peeked.type != "arrow"
|
2071
|
+
&& as_atom_node();
|
2052
2072
|
if (is("punc")) {
|
2053
2073
|
switch (S.token.value) {
|
2054
2074
|
case "(":
|
@@ -2177,7 +2197,7 @@ function parse($TEXT, options) {
|
|
2177
2197
|
}
|
2178
2198
|
next();
|
2179
2199
|
return a;
|
2180
|
-
}
|
2200
|
+
}
|
2181
2201
|
|
2182
2202
|
var array_ = embed_tokens(function() {
|
2183
2203
|
expect("[");
|
@@ -2255,7 +2275,7 @@ function parse($TEXT, options) {
|
|
2255
2275
|
}));
|
2256
2276
|
}
|
2257
2277
|
next();
|
2258
|
-
return new AST_Object({ properties: a })
|
2278
|
+
return new AST_Object({ properties: a });
|
2259
2279
|
});
|
2260
2280
|
|
2261
2281
|
function class_(KindOfClass) {
|
@@ -2313,7 +2333,7 @@ function parse($TEXT, options) {
|
|
2313
2333
|
unexpected();
|
2314
2334
|
}
|
2315
2335
|
return name;
|
2316
|
-
}
|
2336
|
+
};
|
2317
2337
|
var is_async = false;
|
2318
2338
|
var is_static = false;
|
2319
2339
|
var is_generator = false;
|
@@ -2400,7 +2420,7 @@ function parse($TEXT, options) {
|
|
2400
2420
|
expect_token("name", "from");
|
2401
2421
|
}
|
2402
2422
|
var mod_str = S.token;
|
2403
|
-
if (mod_str.type !==
|
2423
|
+
if (mod_str.type !== "string") {
|
2404
2424
|
unexpected();
|
2405
2425
|
}
|
2406
2426
|
next();
|
@@ -2456,7 +2476,7 @@ function parse($TEXT, options) {
|
|
2456
2476
|
foreign_name: foreign_name,
|
2457
2477
|
name: name,
|
2458
2478
|
end: prev(),
|
2459
|
-
})
|
2479
|
+
});
|
2460
2480
|
}
|
2461
2481
|
|
2462
2482
|
function map_nameAsterisk(is_import, name) {
|
@@ -2467,13 +2487,13 @@ function parse($TEXT, options) {
|
|
2467
2487
|
var end = prev();
|
2468
2488
|
|
2469
2489
|
name = name || new type({
|
2470
|
-
name:
|
2490
|
+
name: "*",
|
2471
2491
|
start: start,
|
2472
2492
|
end: end,
|
2473
2493
|
});
|
2474
2494
|
|
2475
2495
|
foreign_name = new foreign_type({
|
2476
|
-
name:
|
2496
|
+
name: "*",
|
2477
2497
|
start: start,
|
2478
2498
|
end: end,
|
2479
2499
|
});
|
@@ -2483,7 +2503,7 @@ function parse($TEXT, options) {
|
|
2483
2503
|
foreign_name: foreign_name,
|
2484
2504
|
name: name,
|
2485
2505
|
end: end,
|
2486
|
-
})
|
2506
|
+
});
|
2487
2507
|
}
|
2488
2508
|
|
2489
2509
|
function map_names(is_import) {
|
@@ -2523,7 +2543,7 @@ function parse($TEXT, options) {
|
|
2523
2543
|
next();
|
2524
2544
|
|
2525
2545
|
var mod_str = S.token;
|
2526
|
-
if (mod_str.type !==
|
2546
|
+
if (mod_str.type !== "string") {
|
2527
2547
|
unexpected();
|
2528
2548
|
}
|
2529
2549
|
next();
|
@@ -2615,14 +2635,14 @@ function parse($TEXT, options) {
|
|
2615
2635
|
default:
|
2616
2636
|
unexpected(tmp);
|
2617
2637
|
}
|
2618
|
-
}
|
2638
|
+
}
|
2619
2639
|
|
2620
2640
|
function as_name() {
|
2621
2641
|
var tmp = S.token;
|
2622
2642
|
if (tmp.type != "name") unexpected();
|
2623
2643
|
next();
|
2624
2644
|
return tmp.value;
|
2625
|
-
}
|
2645
|
+
}
|
2626
2646
|
|
2627
2647
|
function _make_symbol(type) {
|
2628
2648
|
var name = S.token.value;
|
@@ -2633,7 +2653,7 @@ function parse($TEXT, options) {
|
|
2633
2653
|
start : S.token,
|
2634
2654
|
end : S.token
|
2635
2655
|
});
|
2636
|
-
}
|
2656
|
+
}
|
2637
2657
|
|
2638
2658
|
function _verify_symbol(sym) {
|
2639
2659
|
var name = sym.name;
|
@@ -2659,7 +2679,7 @@ function parse($TEXT, options) {
|
|
2659
2679
|
_verify_symbol(sym);
|
2660
2680
|
next();
|
2661
2681
|
return sym;
|
2662
|
-
}
|
2682
|
+
}
|
2663
2683
|
|
2664
2684
|
function mark_pure(call) {
|
2665
2685
|
var start = call.start;
|
@@ -2747,7 +2767,7 @@ function parse($TEXT, options) {
|
|
2747
2767
|
next();
|
2748
2768
|
return _await_expression();
|
2749
2769
|
} else if (S.input.has_directive("use strict")) {
|
2750
|
-
token_error(S.token, "Unexpected await identifier inside strict mode")
|
2770
|
+
token_error(S.token, "Unexpected await identifier inside strict mode");
|
2751
2771
|
}
|
2752
2772
|
}
|
2753
2773
|
if (is("operator") && UNARY_PREFIX(start.value)) {
|
@@ -2783,7 +2803,7 @@ function parse($TEXT, options) {
|
|
2783
2803
|
break;
|
2784
2804
|
}
|
2785
2805
|
return new ctor({ operator: op, expression: expr });
|
2786
|
-
}
|
2806
|
+
}
|
2787
2807
|
|
2788
2808
|
var expr_op = function(left, min_prec, no_in) {
|
2789
2809
|
var op = is("operator") ? S.token.value : null;
|
@@ -2810,7 +2830,7 @@ function parse($TEXT, options) {
|
|
2810
2830
|
|
2811
2831
|
function expr_ops(no_in) {
|
2812
2832
|
return expr_op(maybe_unary(true, true), 0, no_in);
|
2813
|
-
}
|
2833
|
+
}
|
2814
2834
|
|
2815
2835
|
var maybe_conditional = function(no_in) {
|
2816
2836
|
var start = S.token;
|
@@ -2832,7 +2852,7 @@ function parse($TEXT, options) {
|
|
2832
2852
|
|
2833
2853
|
function is_assignable(expr) {
|
2834
2854
|
return expr instanceof AST_PropAccess || expr instanceof AST_SymbolRef;
|
2835
|
-
}
|
2855
|
+
}
|
2836
2856
|
|
2837
2857
|
function to_destructuring(node) {
|
2838
2858
|
if (node instanceof AST_Object) {
|
@@ -2930,13 +2950,13 @@ function parse($TEXT, options) {
|
|
2930
2950
|
var ret = cont();
|
2931
2951
|
--S.in_loop;
|
2932
2952
|
return ret;
|
2933
|
-
}
|
2953
|
+
}
|
2934
2954
|
|
2935
2955
|
if (options.expression) {
|
2936
2956
|
return expression(true);
|
2937
2957
|
}
|
2938
2958
|
|
2939
|
-
return (function(){
|
2959
|
+
return (function() {
|
2940
2960
|
var start = S.token;
|
2941
2961
|
var body = [];
|
2942
2962
|
S.input.push_directives_stack();
|
@@ -2955,4 +2975,4 @@ function parse($TEXT, options) {
|
|
2955
2975
|
return toplevel;
|
2956
2976
|
})();
|
2957
2977
|
|
2958
|
-
}
|
2978
|
+
}
|