terser 5.3.6 → 5.5.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +20 -0
- package/README.md +9 -2
- package/dist/bundle.min.js +289 -129
- package/lib/ast.js +55 -6
- package/lib/compress/index.js +140 -34
- package/lib/mozilla-ast.js +24 -39
- package/lib/output.js +5 -2
- package/lib/parse.js +67 -49
- package/package.json +11 -12
- package/dist/bundle.min.js.map +0 -1
package/lib/parse.js
CHANGED
@@ -157,6 +157,9 @@ import {
|
|
157
157
|
_PURE
|
158
158
|
} from "./ast.js";
|
159
159
|
|
160
|
+
var LATEST_RAW = ""; // Only used for numbers and template strings
|
161
|
+
var LATEST_TEMPLATE_END = true;
|
162
|
+
|
160
163
|
var KEYWORDS = "break case catch class const continue debugger default delete do else export extends finally for function if in instanceof let new return switch throw try typeof var void while with";
|
161
164
|
var KEYWORDS_ATOM = "false null true";
|
162
165
|
var RESERVED_WORDS = "enum implements import interface package private protected public static super this " + KEYWORDS_ATOM + " " + KEYWORDS;
|
@@ -212,6 +215,9 @@ var OPERATORS = makePredicate([
|
|
212
215
|
"=",
|
213
216
|
"+=",
|
214
217
|
"-=",
|
218
|
+
"||=",
|
219
|
+
"&&=",
|
220
|
+
"??=",
|
215
221
|
"/=",
|
216
222
|
"*=",
|
217
223
|
"**=",
|
@@ -472,29 +478,23 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
472
478
|
} else if (!is_comment) {
|
473
479
|
prev_was_dot = false;
|
474
480
|
}
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
480
|
-
|
481
|
-
|
482
|
-
|
483
|
-
endpos : S.pos,
|
484
|
-
nlb : S.newline_before,
|
485
|
-
file : filename
|
486
|
-
};
|
487
|
-
if (/^(?:num|string|regexp)$/i.test(type)) {
|
488
|
-
ret.raw = $TEXT.substring(ret.pos, ret.endpos);
|
489
|
-
}
|
481
|
+
const line = S.tokline;
|
482
|
+
const col = S.tokcol;
|
483
|
+
const pos = S.tokpos;
|
484
|
+
const nlb = S.newline_before;
|
485
|
+
const file = filename;
|
486
|
+
let comments_before = [];
|
487
|
+
let comments_after = [];
|
488
|
+
|
490
489
|
if (!is_comment) {
|
491
|
-
|
492
|
-
|
490
|
+
comments_before = S.comments_before;
|
491
|
+
comments_after = S.comments_before = [];
|
493
492
|
}
|
494
493
|
S.newline_before = false;
|
495
|
-
|
496
|
-
|
497
|
-
|
494
|
+
const tok = new AST_Token(type, value, line, col, pos, nlb, comments_before, comments_after, file);
|
495
|
+
|
496
|
+
if (!is_comment) previous_token = tok;
|
497
|
+
return tok;
|
498
498
|
}
|
499
499
|
|
500
500
|
function skip_whitespace() {
|
@@ -546,6 +546,9 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
546
546
|
return RE_NUM_LITERAL.test(ch);
|
547
547
|
});
|
548
548
|
if (prefix) num = prefix + num;
|
549
|
+
|
550
|
+
LATEST_RAW = num;
|
551
|
+
|
549
552
|
if (RE_OCT_NUMBER.test(num) && next_token.has_directive("use strict")) {
|
550
553
|
parse_error("Legacy octal literals are not allowed in strict mode");
|
551
554
|
}
|
@@ -653,15 +656,17 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
653
656
|
}
|
654
657
|
|
655
658
|
var read_string = with_eof_error("Unterminated string constant", function() {
|
656
|
-
|
659
|
+
const start_pos = S.pos;
|
660
|
+
var quote = next(), ret = [];
|
657
661
|
for (;;) {
|
658
662
|
var ch = next(true, true);
|
659
663
|
if (ch == "\\") ch = read_escaped_char(true, true);
|
660
664
|
else if (ch == "\r" || ch == "\n") parse_error("Unterminated string constant");
|
661
665
|
else if (ch == quote) break;
|
662
|
-
ret
|
666
|
+
ret.push(ch);
|
663
667
|
}
|
664
|
-
var tok = token("string", ret);
|
668
|
+
var tok = token("string", ret.join(""));
|
669
|
+
LATEST_RAW = S.text.slice(start_pos, S.pos);
|
665
670
|
tok.quote = quote;
|
666
671
|
return tok;
|
667
672
|
});
|
@@ -680,7 +685,8 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
680
685
|
next(true, true);
|
681
686
|
S.brace_counter++;
|
682
687
|
tok = token(begin ? "template_head" : "template_substitution", content);
|
683
|
-
|
688
|
+
LATEST_RAW = raw;
|
689
|
+
LATEST_TEMPLATE_END = false;
|
684
690
|
return tok;
|
685
691
|
}
|
686
692
|
|
@@ -696,8 +702,8 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
696
702
|
}
|
697
703
|
S.template_braces.pop();
|
698
704
|
tok = token(begin ? "template_head" : "template_substitution", content);
|
699
|
-
|
700
|
-
|
705
|
+
LATEST_RAW = raw;
|
706
|
+
LATEST_TEMPLATE_END = true;
|
701
707
|
return tok;
|
702
708
|
});
|
703
709
|
|
@@ -730,7 +736,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
730
736
|
});
|
731
737
|
|
732
738
|
var read_name = with_eof_error("Unterminated identifier name", function() {
|
733
|
-
var name, ch, escaped = false;
|
739
|
+
var name = [], ch, escaped = false;
|
734
740
|
var read_escaped_identifier_char = function() {
|
735
741
|
escaped = true;
|
736
742
|
next();
|
@@ -741,17 +747,19 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
741
747
|
};
|
742
748
|
|
743
749
|
// Read first character (ID_Start)
|
744
|
-
if ((
|
745
|
-
|
746
|
-
if (!is_identifier_start(
|
750
|
+
if ((ch = peek()) === "\\") {
|
751
|
+
ch = read_escaped_identifier_char();
|
752
|
+
if (!is_identifier_start(ch)) {
|
747
753
|
parse_error("First identifier char is an invalid identifier char");
|
748
754
|
}
|
749
|
-
} else if (is_identifier_start(
|
755
|
+
} else if (is_identifier_start(ch)) {
|
750
756
|
next();
|
751
757
|
} else {
|
752
758
|
return "";
|
753
759
|
}
|
754
760
|
|
761
|
+
name.push(ch);
|
762
|
+
|
755
763
|
// Read ID_Continue
|
756
764
|
while ((ch = peek()) != null) {
|
757
765
|
if ((ch = peek()) === "\\") {
|
@@ -765,12 +773,13 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
765
773
|
}
|
766
774
|
next();
|
767
775
|
}
|
768
|
-
name
|
776
|
+
name.push(ch);
|
769
777
|
}
|
770
|
-
|
778
|
+
const name_str = name.join("");
|
779
|
+
if (RESERVED_WORDS.has(name_str) && escaped) {
|
771
780
|
parse_error("Escaped characters are not allowed in keywords");
|
772
781
|
}
|
773
|
-
return
|
782
|
+
return name_str;
|
774
783
|
});
|
775
784
|
|
776
785
|
var read_regexp = with_eof_error("Unterminated regular expression", function(source) {
|
@@ -794,7 +803,7 @@ function tokenizer($TEXT, filename, html5_comments, shebang) {
|
|
794
803
|
source += ch;
|
795
804
|
}
|
796
805
|
const flags = read_name();
|
797
|
-
return token("regexp",
|
806
|
+
return token("regexp", "/" + source + "/" + flags);
|
798
807
|
});
|
799
808
|
|
800
809
|
function read_operator(prefix) {
|
@@ -987,7 +996,9 @@ var UNARY_PREFIX = makePredicate([
|
|
987
996
|
|
988
997
|
var UNARY_POSTFIX = makePredicate([ "--", "++" ]);
|
989
998
|
|
990
|
-
var ASSIGNMENT = makePredicate([ "=", "+=", "-=", "/=", "*=", "**=", "%=", ">>=", "<<=", ">>>=", "|=", "^=", "&=" ]);
|
999
|
+
var ASSIGNMENT = makePredicate([ "=", "+=", "-=", "??=", "&&=", "||=", "/=", "*=", "**=", "%=", ">>=", "<<=", ">>>=", "|=", "^=", "&=" ]);
|
1000
|
+
|
1001
|
+
var LOGICAL_ASSIGNMENT = makePredicate([ "??=", "&&=", "||=" ]);
|
991
1002
|
|
992
1003
|
var PRECEDENCE = (function(a, ret) {
|
993
1004
|
for (var i = 0; i < a.length; ++i) {
|
@@ -1025,7 +1036,7 @@ function parse($TEXT, options) {
|
|
1025
1036
|
// Useful because comments_before property of call with parens outside
|
1026
1037
|
// contains both comments inside and outside these parens. Used to find the
|
1027
1038
|
// right #__PURE__ comments for an expression
|
1028
|
-
const outer_comments_before_counts = new
|
1039
|
+
const outer_comments_before_counts = new WeakMap();
|
1029
1040
|
|
1030
1041
|
options = defaults(options, {
|
1031
1042
|
bare_returns : false,
|
@@ -1159,7 +1170,7 @@ function parse($TEXT, options) {
|
|
1159
1170
|
case "string":
|
1160
1171
|
if (S.in_directives) {
|
1161
1172
|
var token = peek();
|
1162
|
-
if (!
|
1173
|
+
if (!LATEST_RAW.includes("\\")
|
1163
1174
|
&& (is_token(token, "punc", ";")
|
1164
1175
|
|| is_token(token, "punc", "}")
|
1165
1176
|
|| has_newline_before(token)
|
@@ -2142,7 +2153,12 @@ function parse($TEXT, options) {
|
|
2142
2153
|
ret = _make_symbol(AST_SymbolRef);
|
2143
2154
|
break;
|
2144
2155
|
case "num":
|
2145
|
-
ret = new AST_Number({
|
2156
|
+
ret = new AST_Number({
|
2157
|
+
start: tok,
|
2158
|
+
end: tok,
|
2159
|
+
value: tok.value,
|
2160
|
+
raw: LATEST_RAW
|
2161
|
+
});
|
2146
2162
|
break;
|
2147
2163
|
case "big_int":
|
2148
2164
|
ret = new AST_BigInt({ start: tok, end: tok, value: tok.value });
|
@@ -2156,7 +2172,9 @@ function parse($TEXT, options) {
|
|
2156
2172
|
});
|
2157
2173
|
break;
|
2158
2174
|
case "regexp":
|
2159
|
-
|
2175
|
+
const [_, source, flags] = tok.value.match(/^\/(.*)\/(\w*)$/);
|
2176
|
+
|
2177
|
+
ret = new AST_RegExp({ start: tok, end: tok, value: { source, flags } });
|
2160
2178
|
break;
|
2161
2179
|
case "atom":
|
2162
2180
|
switch (tok.value) {
|
@@ -2313,7 +2331,7 @@ function parse($TEXT, options) {
|
|
2313
2331
|
return subscripts(cls, allow_calls);
|
2314
2332
|
}
|
2315
2333
|
if (is("template_head")) {
|
2316
|
-
return subscripts(template_string(
|
2334
|
+
return subscripts(template_string(), allow_calls);
|
2317
2335
|
}
|
2318
2336
|
if (ATOMIC_START_TOKEN.has(S.token.type)) {
|
2319
2337
|
return subscripts(as_atom_node(), allow_calls);
|
@@ -2326,22 +2344,19 @@ function parse($TEXT, options) {
|
|
2326
2344
|
|
2327
2345
|
segments.push(new AST_TemplateSegment({
|
2328
2346
|
start: S.token,
|
2329
|
-
raw:
|
2347
|
+
raw: LATEST_RAW,
|
2330
2348
|
value: S.token.value,
|
2331
2349
|
end: S.token
|
2332
2350
|
}));
|
2333
|
-
|
2351
|
+
|
2352
|
+
while (!LATEST_TEMPLATE_END) {
|
2334
2353
|
next();
|
2335
2354
|
handle_regexp();
|
2336
2355
|
segments.push(expression(true));
|
2337
2356
|
|
2338
|
-
if (!is_token("template_substitution")) {
|
2339
|
-
unexpected();
|
2340
|
-
}
|
2341
|
-
|
2342
2357
|
segments.push(new AST_TemplateSegment({
|
2343
2358
|
start: S.token,
|
2344
|
-
raw:
|
2359
|
+
raw: LATEST_RAW,
|
2345
2360
|
value: S.token.value,
|
2346
2361
|
end: S.token
|
2347
2362
|
}));
|
@@ -2435,6 +2450,7 @@ function parse($TEXT, options) {
|
|
2435
2450
|
left: value,
|
2436
2451
|
operator: "=",
|
2437
2452
|
right: expression(false),
|
2453
|
+
logical: false,
|
2438
2454
|
end: prev()
|
2439
2455
|
});
|
2440
2456
|
}
|
@@ -3017,7 +3033,7 @@ function parse($TEXT, options) {
|
|
3017
3033
|
return subscripts(new AST_PrefixedTemplateString({
|
3018
3034
|
start: start,
|
3019
3035
|
prefix: expr,
|
3020
|
-
template_string: template_string(
|
3036
|
+
template_string: template_string(),
|
3021
3037
|
end: prev()
|
3022
3038
|
}), allow_calls);
|
3023
3039
|
}
|
@@ -3203,11 +3219,13 @@ function parse($TEXT, options) {
|
|
3203
3219
|
if (is("operator") && ASSIGNMENT.has(val)) {
|
3204
3220
|
if (is_assignable(left) || (left = to_destructuring(left)) instanceof AST_Destructuring) {
|
3205
3221
|
next();
|
3222
|
+
|
3206
3223
|
return new AST_Assign({
|
3207
3224
|
start : start,
|
3208
3225
|
left : left,
|
3209
3226
|
operator : val,
|
3210
3227
|
right : maybe_assign(no_in),
|
3228
|
+
logical : LOGICAL_ASSIGNMENT.has(val),
|
3211
3229
|
end : prev()
|
3212
3230
|
});
|
3213
3231
|
}
|
package/package.json
CHANGED
@@ -4,9 +4,9 @@
|
|
4
4
|
"homepage": "https://terser.org",
|
5
5
|
"author": "Mihai Bazon <mihai.bazon@gmail.com> (http://lisperator.net/)",
|
6
6
|
"license": "BSD-2-Clause",
|
7
|
-
"version": "5.
|
7
|
+
"version": "5.5.0",
|
8
8
|
"engines": {
|
9
|
-
"node": "
|
9
|
+
"node": ">=10"
|
10
10
|
},
|
11
11
|
"maintainers": [
|
12
12
|
"Fábio Santos <fabiosantosart@gmail.com>"
|
@@ -16,16 +16,15 @@
|
|
16
16
|
"type": "module",
|
17
17
|
"module": "./main.js",
|
18
18
|
"exports": {
|
19
|
-
".":
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
"
|
25
|
-
|
26
|
-
"./package.json"
|
27
|
-
|
28
|
-
}
|
19
|
+
".": [
|
20
|
+
{
|
21
|
+
"import": "./main.js",
|
22
|
+
"require": "./dist/bundle.min.js"
|
23
|
+
},
|
24
|
+
"./dist/bundle.min.js"
|
25
|
+
],
|
26
|
+
"./package": "./package.json",
|
27
|
+
"./package.json": "./package.json"
|
29
28
|
},
|
30
29
|
"types": "tools/terser.d.ts",
|
31
30
|
"bin": {
|