webpack 5.39.1 → 5.42.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webpack might be problematic. Click here for more details.
- package/README.md +13 -13
- package/bin/webpack.js +0 -0
- package/lib/Compiler.js +14 -1
- package/lib/ConditionalInitFragment.js +15 -12
- package/lib/DependencyTemplate.js +3 -2
- package/lib/ExternalModule.js +213 -33
- package/lib/ExternalModuleFactoryPlugin.js +2 -1
- package/lib/InitFragment.js +10 -7
- package/lib/MainTemplate.js +1 -1
- package/lib/ModuleTemplate.js +0 -9
- package/lib/NormalModuleFactory.js +12 -2
- package/lib/RuntimeTemplate.js +8 -0
- package/lib/Template.js +3 -2
- package/lib/TemplatedPathPlugin.js +24 -26
- package/lib/Watching.js +2 -1
- package/lib/WebpackOptionsApply.js +12 -8
- package/lib/async-modules/AwaitDependenciesInitFragment.js +4 -1
- package/lib/cache/IdleFileCachePlugin.js +60 -13
- package/lib/cache/PackFileCacheStrategy.js +27 -16
- package/lib/cli.js +1 -1
- package/lib/config/defaults.js +54 -12
- package/lib/config/normalization.js +2 -0
- package/lib/dependencies/HarmonyExportInitFragment.js +4 -1
- package/lib/dependencies/WorkerPlugin.js +25 -10
- package/lib/electron/ElectronTargetPlugin.js +3 -3
- package/lib/esm/ModuleChunkFormatPlugin.js +97 -0
- package/lib/esm/ModuleChunkLoadingPlugin.js +63 -0
- package/lib/esm/ModuleChunkLoadingRuntimeModule.js +208 -0
- package/lib/hmr/lazyCompilationBackend.js +17 -1
- package/lib/javascript/EnableChunkLoadingPlugin.js +5 -3
- package/lib/javascript/JavascriptModulesPlugin.js +80 -17
- package/lib/javascript/JavascriptParser.js +12 -4
- package/lib/node/NodeTargetPlugin.js +2 -1
- package/lib/node/ReadFileCompileAsyncWasmPlugin.js +44 -22
- package/lib/optimize/InnerGraphPlugin.js +33 -2
- package/lib/optimize/ModuleConcatenationPlugin.js +1 -1
- package/lib/runtime/AsyncModuleRuntimeModule.js +8 -4
- package/lib/serialization/BinaryMiddleware.js +50 -35
- package/lib/serialization/FileMiddleware.js +112 -12
- package/lib/serialization/PlainObjectSerializer.js +17 -8
- package/lib/serialization/Serializer.js +2 -2
- package/lib/serialization/SerializerMiddleware.js +7 -4
- package/lib/util/LazySet.js +26 -17
- package/lib/wasm/EnableWasmLoadingPlugin.js +10 -1
- package/lib/wasm-sync/WasmChunkLoadingRuntimeModule.js +2 -2
- package/lib/wasm-sync/WebAssemblyModulesPlugin.js +1 -1
- package/package.json +18 -18
- package/schemas/WebpackOptions.check.js +1 -1
- package/schemas/WebpackOptions.json +22 -8
- package/schemas/plugins/container/ContainerReferencePlugin.check.js +1 -1
- package/schemas/plugins/container/ContainerReferencePlugin.json +2 -1
- package/schemas/plugins/container/ExternalsType.check.js +1 -1
- package/schemas/plugins/container/ModuleFederationPlugin.check.js +1 -1
- package/schemas/plugins/container/ModuleFederationPlugin.json +2 -1
- package/types.d.ts +120 -162
@@ -5,6 +5,14 @@
|
|
5
5
|
"use strict";
|
6
6
|
|
7
7
|
const { constants } = require("buffer");
|
8
|
+
const { pipeline } = require("stream");
|
9
|
+
const {
|
10
|
+
createBrotliCompress,
|
11
|
+
createBrotliDecompress,
|
12
|
+
createGzip,
|
13
|
+
createGunzip,
|
14
|
+
constants: zConstants
|
15
|
+
} = require("zlib");
|
8
16
|
const createHash = require("../util/createHash");
|
9
17
|
const { dirname, join, mkdirp } = require("../util/fs");
|
10
18
|
const memoize = require("../util/memoize");
|
@@ -37,6 +45,9 @@ const hashForName = buffers => {
|
|
37
45
|
return /** @type {string} */ (hash.digest("hex"));
|
38
46
|
};
|
39
47
|
|
48
|
+
const COMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
|
49
|
+
const DECOMPRESSION_CHUNK_SIZE = 100 * 1024 * 1024;
|
50
|
+
|
40
51
|
const writeUInt64LE = Buffer.prototype.writeBigUInt64LE
|
41
52
|
? (buf, value, offset) => {
|
42
53
|
buf.writeBigUInt64LE(BigInt(value), offset);
|
@@ -69,7 +80,7 @@ const readUInt64LE = Buffer.prototype.readBigUInt64LE
|
|
69
80
|
* @param {FileMiddleware} middleware this
|
70
81
|
* @param {BufferSerializableType[] | Promise<BufferSerializableType[]>} data data to be serialized
|
71
82
|
* @param {string | boolean} name file base name
|
72
|
-
* @param {function(string | false, Buffer[]): Promise} writeFile writes a file
|
83
|
+
* @param {function(string | false, Buffer[]): Promise<void>} writeFile writes a file
|
73
84
|
* @returns {Promise<SerializeResult>} resulting file pointer and promise
|
74
85
|
*/
|
75
86
|
const serialize = async (middleware, data, name, writeFile) => {
|
@@ -280,8 +291,16 @@ const deserialize = async (middleware, name, readFile) => {
|
|
280
291
|
}
|
281
292
|
const sectionCount = readUInt32LE();
|
282
293
|
const lengths = [];
|
294
|
+
let lastLengthPositive = false;
|
283
295
|
for (let i = 0; i < sectionCount; i++) {
|
284
|
-
|
296
|
+
const value = readInt32LE();
|
297
|
+
const valuePositive = value >= 0;
|
298
|
+
if (lastLengthPositive && valuePositive) {
|
299
|
+
lengths[lengths.length - 1] += value;
|
300
|
+
} else {
|
301
|
+
lengths.push(value);
|
302
|
+
lastLengthPositive = valuePositive;
|
303
|
+
}
|
285
304
|
}
|
286
305
|
const result = [];
|
287
306
|
for (let length of lengths) {
|
@@ -307,13 +326,24 @@ const deserialize = async (middleware, name, readFile) => {
|
|
307
326
|
} else if (contentPosition !== 0) {
|
308
327
|
if (length <= contentItemLength - contentPosition) {
|
309
328
|
result.push(
|
310
|
-
|
329
|
+
Buffer.from(
|
330
|
+
contentItem.buffer,
|
331
|
+
contentItem.byteOffset + contentPosition,
|
332
|
+
length
|
333
|
+
)
|
311
334
|
);
|
312
335
|
contentPosition += length;
|
313
336
|
length = 0;
|
314
337
|
} else {
|
315
|
-
|
316
|
-
|
338
|
+
const l = contentItemLength - contentPosition;
|
339
|
+
result.push(
|
340
|
+
Buffer.from(
|
341
|
+
contentItem.buffer,
|
342
|
+
contentItem.byteOffset + contentPosition,
|
343
|
+
l
|
344
|
+
)
|
345
|
+
);
|
346
|
+
length -= l;
|
317
347
|
contentPosition = contentItemLength;
|
318
348
|
}
|
319
349
|
} else {
|
@@ -322,7 +352,9 @@ const deserialize = async (middleware, name, readFile) => {
|
|
322
352
|
length -= contentItemLength;
|
323
353
|
contentPosition = contentItemLength;
|
324
354
|
} else {
|
325
|
-
result.push(
|
355
|
+
result.push(
|
356
|
+
Buffer.from(contentItem.buffer, contentItem.byteOffset, length)
|
357
|
+
);
|
326
358
|
contentPosition += length;
|
327
359
|
length = 0;
|
328
360
|
}
|
@@ -334,7 +366,9 @@ const deserialize = async (middleware, name, readFile) => {
|
|
334
366
|
length -= contentItemLength;
|
335
367
|
contentPosition = contentItemLength;
|
336
368
|
} else {
|
337
|
-
result.push(
|
369
|
+
result.push(
|
370
|
+
Buffer.from(contentItem.buffer, contentItem.byteOffset, length)
|
371
|
+
);
|
338
372
|
contentPosition += length;
|
339
373
|
length = 0;
|
340
374
|
}
|
@@ -376,11 +410,37 @@ class FileMiddleware extends SerializerMiddleware {
|
|
376
410
|
? join(this.fs, filename, `../${name}${extension}`)
|
377
411
|
: filename;
|
378
412
|
await new Promise((resolve, reject) => {
|
379
|
-
|
413
|
+
let stream = this.fs.createWriteStream(file + "_");
|
414
|
+
let compression;
|
415
|
+
if (file.endsWith(".gz")) {
|
416
|
+
compression = createGzip({
|
417
|
+
chunkSize: COMPRESSION_CHUNK_SIZE,
|
418
|
+
level: zConstants.Z_BEST_SPEED
|
419
|
+
});
|
420
|
+
} else if (file.endsWith(".br")) {
|
421
|
+
compression = createBrotliCompress({
|
422
|
+
chunkSize: COMPRESSION_CHUNK_SIZE,
|
423
|
+
params: {
|
424
|
+
[zConstants.BROTLI_PARAM_MODE]: zConstants.BROTLI_MODE_TEXT,
|
425
|
+
[zConstants.BROTLI_PARAM_QUALITY]: 2,
|
426
|
+
[zConstants.BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING]: true,
|
427
|
+
[zConstants.BROTLI_PARAM_SIZE_HINT]: content.reduce(
|
428
|
+
(size, b) => size + b.length,
|
429
|
+
0
|
430
|
+
)
|
431
|
+
}
|
432
|
+
});
|
433
|
+
}
|
434
|
+
if (compression) {
|
435
|
+
pipeline(compression, stream, reject);
|
436
|
+
stream = compression;
|
437
|
+
stream.on("finish", () => resolve());
|
438
|
+
} else {
|
439
|
+
stream.on("error", err => reject(err));
|
440
|
+
stream.on("finish", () => resolve());
|
441
|
+
}
|
380
442
|
for (const b of content) stream.write(b);
|
381
443
|
stream.end();
|
382
|
-
stream.on("error", err => reject(err));
|
383
|
-
stream.on("finish", () => resolve());
|
384
444
|
});
|
385
445
|
if (name) allWrittenFiles.add(file);
|
386
446
|
};
|
@@ -447,6 +507,34 @@ class FileMiddleware extends SerializerMiddleware {
|
|
447
507
|
let currentBuffer;
|
448
508
|
let currentBufferUsed;
|
449
509
|
const buf = [];
|
510
|
+
let decompression;
|
511
|
+
if (file.endsWith(".gz")) {
|
512
|
+
decompression = createGunzip({
|
513
|
+
chunkSize: DECOMPRESSION_CHUNK_SIZE
|
514
|
+
});
|
515
|
+
} else if (file.endsWith(".br")) {
|
516
|
+
decompression = createBrotliDecompress({
|
517
|
+
chunkSize: DECOMPRESSION_CHUNK_SIZE
|
518
|
+
});
|
519
|
+
}
|
520
|
+
if (decompression) {
|
521
|
+
let newResolve, newReject;
|
522
|
+
resolve(
|
523
|
+
Promise.all([
|
524
|
+
new Promise((rs, rj) => {
|
525
|
+
newResolve = rs;
|
526
|
+
newReject = rj;
|
527
|
+
}),
|
528
|
+
new Promise((resolve, reject) => {
|
529
|
+
decompression.on("data", chunk => buf.push(chunk));
|
530
|
+
decompression.on("end", () => resolve());
|
531
|
+
decompression.on("error", err => reject(err));
|
532
|
+
})
|
533
|
+
]).then(() => buf)
|
534
|
+
);
|
535
|
+
resolve = newResolve;
|
536
|
+
reject = newReject;
|
537
|
+
}
|
450
538
|
this.fs.open(file, "r", (err, fd) => {
|
451
539
|
if (err) {
|
452
540
|
reject(err);
|
@@ -455,13 +543,18 @@ class FileMiddleware extends SerializerMiddleware {
|
|
455
543
|
const read = () => {
|
456
544
|
if (currentBuffer === undefined) {
|
457
545
|
currentBuffer = Buffer.allocUnsafeSlow(
|
458
|
-
Math.min(
|
546
|
+
Math.min(
|
547
|
+
constants.MAX_LENGTH,
|
548
|
+
remaining,
|
549
|
+
decompression ? DECOMPRESSION_CHUNK_SIZE : Infinity
|
550
|
+
)
|
459
551
|
);
|
460
552
|
currentBufferUsed = 0;
|
461
553
|
}
|
462
554
|
let readBuffer = currentBuffer;
|
463
555
|
let readOffset = currentBufferUsed;
|
464
556
|
let readLength = currentBuffer.length - currentBufferUsed;
|
557
|
+
// values passed to fs.read must be valid int32 values
|
465
558
|
if (readOffset > 0x7fffffff) {
|
466
559
|
readBuffer = currentBuffer.slice(readOffset);
|
467
560
|
readOffset = 0;
|
@@ -485,9 +578,16 @@ class FileMiddleware extends SerializerMiddleware {
|
|
485
578
|
currentBufferUsed += bytesRead;
|
486
579
|
remaining -= bytesRead;
|
487
580
|
if (currentBufferUsed === currentBuffer.length) {
|
488
|
-
|
581
|
+
if (decompression) {
|
582
|
+
decompression.write(currentBuffer);
|
583
|
+
} else {
|
584
|
+
buf.push(currentBuffer);
|
585
|
+
}
|
489
586
|
currentBuffer = undefined;
|
490
587
|
if (remaining === 0) {
|
588
|
+
if (decompression) {
|
589
|
+
decompression.end();
|
590
|
+
}
|
491
591
|
this.fs.close(fd, err => {
|
492
592
|
if (err) {
|
493
593
|
reject(err);
|
@@ -7,19 +7,21 @@
|
|
7
7
|
const cache = new WeakMap();
|
8
8
|
|
9
9
|
class ObjectStructure {
|
10
|
-
constructor(
|
11
|
-
this.keys =
|
12
|
-
this.children =
|
10
|
+
constructor() {
|
11
|
+
this.keys = undefined;
|
12
|
+
this.children = undefined;
|
13
13
|
}
|
14
14
|
|
15
|
-
getKeys() {
|
15
|
+
getKeys(keys) {
|
16
|
+
if (this.keys === undefined) this.keys = keys;
|
16
17
|
return this.keys;
|
17
18
|
}
|
18
19
|
|
19
20
|
key(key) {
|
21
|
+
if (this.children === undefined) this.children = new Map();
|
20
22
|
const child = this.children.get(key);
|
21
23
|
if (child !== undefined) return child;
|
22
|
-
const newChild = new ObjectStructure(
|
24
|
+
const newChild = new ObjectStructure();
|
23
25
|
this.children.set(key, newChild);
|
24
26
|
return newChild;
|
25
27
|
}
|
@@ -28,20 +30,27 @@ class ObjectStructure {
|
|
28
30
|
const getCachedKeys = (keys, cacheAssoc) => {
|
29
31
|
let root = cache.get(cacheAssoc);
|
30
32
|
if (root === undefined) {
|
31
|
-
root = new ObjectStructure(
|
33
|
+
root = new ObjectStructure();
|
32
34
|
cache.set(cacheAssoc, root);
|
33
35
|
}
|
34
36
|
let current = root;
|
35
37
|
for (const key of keys) {
|
36
38
|
current = current.key(key);
|
37
39
|
}
|
38
|
-
return current.getKeys();
|
40
|
+
return current.getKeys(keys);
|
39
41
|
};
|
40
42
|
|
41
43
|
class PlainObjectSerializer {
|
42
44
|
serialize(obj, { write }) {
|
43
45
|
const keys = Object.keys(obj);
|
44
|
-
if (keys.length >
|
46
|
+
if (keys.length > 128) {
|
47
|
+
// Objects with so many keys are unlikely to share structure
|
48
|
+
// with other objects
|
49
|
+
write(keys);
|
50
|
+
for (const key of keys) {
|
51
|
+
write(obj[key]);
|
52
|
+
}
|
53
|
+
} else if (keys.length > 1) {
|
45
54
|
write(getCachedKeys(keys, write));
|
46
55
|
for (const key of keys) {
|
47
56
|
write(obj[key]);
|
@@ -15,7 +15,7 @@ class Serializer {
|
|
15
15
|
const ctx = { ...context, ...this.context };
|
16
16
|
let current = obj;
|
17
17
|
for (const middleware of this.serializeMiddlewares) {
|
18
|
-
if (current
|
18
|
+
if (current && typeof current.then === "function") {
|
19
19
|
current = current.then(
|
20
20
|
data => data && middleware.serialize(data, context)
|
21
21
|
);
|
@@ -35,7 +35,7 @@ class Serializer {
|
|
35
35
|
/** @type {any} */
|
36
36
|
let current = value;
|
37
37
|
for (const middleware of this.deserializeMiddlewares) {
|
38
|
-
if (current
|
38
|
+
if (current && typeof current.then === "function") {
|
39
39
|
current = current.then(data => middleware.deserialize(data, context));
|
40
40
|
} else {
|
41
41
|
current = middleware.deserialize(current, ctx);
|
@@ -100,9 +100,10 @@ class SerializerMiddleware {
|
|
100
100
|
static serializeLazy(lazy, serialize) {
|
101
101
|
const fn = memoize(() => {
|
102
102
|
const r = lazy();
|
103
|
-
if (r
|
104
|
-
|
105
|
-
|
103
|
+
if (r && typeof r.then === "function") {
|
104
|
+
return r.then(data => data && serialize(data));
|
105
|
+
}
|
106
|
+
return serialize(r);
|
106
107
|
});
|
107
108
|
fn[LAZY_TARGET] = lazy[LAZY_TARGET];
|
108
109
|
/** @type {any} */ (fn).options = /** @type {any} */ (lazy).options;
|
@@ -118,7 +119,9 @@ class SerializerMiddleware {
|
|
118
119
|
static deserializeLazy(lazy, deserialize) {
|
119
120
|
const fn = memoize(() => {
|
120
121
|
const r = lazy();
|
121
|
-
if (r
|
122
|
+
if (r && typeof r.then === "function") {
|
123
|
+
return r.then(data => deserialize(data));
|
124
|
+
}
|
122
125
|
return deserialize(r);
|
123
126
|
});
|
124
127
|
fn[LAZY_TARGET] = lazy[LAZY_TARGET];
|
package/lib/util/LazySet.js
CHANGED
@@ -24,21 +24,17 @@ const merge = (targetSet, toMerge) => {
|
|
24
24
|
/**
|
25
25
|
* @template T
|
26
26
|
* @param {Set<Iterable<T>>} targetSet set where iterables should be added
|
27
|
-
* @param {Array<
|
27
|
+
* @param {Array<LazySet<T>>} toDeepMerge lazy sets to be flattened
|
28
28
|
* @returns {void}
|
29
29
|
*/
|
30
30
|
const flatten = (targetSet, toDeepMerge) => {
|
31
31
|
for (const set of toDeepMerge) {
|
32
|
-
if (set
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
targetSet.add(mergedSet);
|
37
|
-
}
|
38
|
-
flatten(targetSet, set._toDeepMerge);
|
32
|
+
if (set._set.size > 0) targetSet.add(set._set);
|
33
|
+
if (set._needMerge) {
|
34
|
+
for (const mergedSet of set._toMerge) {
|
35
|
+
targetSet.add(mergedSet);
|
39
36
|
}
|
40
|
-
|
41
|
-
targetSet.add(set);
|
37
|
+
flatten(targetSet, set._toDeepMerge);
|
42
38
|
}
|
43
39
|
}
|
44
40
|
};
|
@@ -58,7 +54,7 @@ class LazySet {
|
|
58
54
|
this._set = new Set(iterable);
|
59
55
|
/** @type {Set<Iterable<T>>} */
|
60
56
|
this._toMerge = new Set();
|
61
|
-
/** @type {Array<
|
57
|
+
/** @type {Array<LazySet<T>>} */
|
62
58
|
this._toDeepMerge = [];
|
63
59
|
this._needMerge = false;
|
64
60
|
this._deopt = false;
|
@@ -76,6 +72,14 @@ class LazySet {
|
|
76
72
|
this._needMerge = false;
|
77
73
|
}
|
78
74
|
|
75
|
+
_isEmpty() {
|
76
|
+
return (
|
77
|
+
this._set.size === 0 &&
|
78
|
+
this._toMerge.size === 0 &&
|
79
|
+
this._toDeepMerge.length === 0
|
80
|
+
);
|
81
|
+
}
|
82
|
+
|
79
83
|
get size() {
|
80
84
|
if (this._needMerge) this._merge();
|
81
85
|
return this._set.size;
|
@@ -101,13 +105,18 @@ class LazySet {
|
|
101
105
|
_set.add(item);
|
102
106
|
}
|
103
107
|
} else {
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
this.
|
109
|
-
|
108
|
+
if (iterable instanceof LazySet) {
|
109
|
+
if (iterable._isEmpty()) return this;
|
110
|
+
this._toDeepMerge.push(iterable);
|
111
|
+
this._needMerge = true;
|
112
|
+
if (this._toDeepMerge.length > 100000) {
|
113
|
+
this._flatten();
|
114
|
+
}
|
115
|
+
} else {
|
116
|
+
this._toMerge.add(iterable);
|
117
|
+
this._needMerge = true;
|
110
118
|
}
|
119
|
+
if (this._toMerge.size > 100000) this._merge();
|
111
120
|
}
|
112
121
|
return this;
|
113
122
|
}
|
@@ -84,11 +84,20 @@ class EnableWasmLoadingPlugin {
|
|
84
84
|
case "async-node": {
|
85
85
|
// TODO webpack 6 remove ReadFileCompileWasmPlugin
|
86
86
|
const ReadFileCompileWasmPlugin = require("../node/ReadFileCompileWasmPlugin");
|
87
|
+
// @ts-expect-error typescript bug for duplicate require
|
87
88
|
const ReadFileCompileAsyncWasmPlugin = require("../node/ReadFileCompileAsyncWasmPlugin");
|
88
89
|
new ReadFileCompileWasmPlugin({
|
89
90
|
mangleImports: compiler.options.optimization.mangleWasmImports
|
90
91
|
}).apply(compiler);
|
91
|
-
new ReadFileCompileAsyncWasmPlugin().apply(compiler);
|
92
|
+
new ReadFileCompileAsyncWasmPlugin({ type }).apply(compiler);
|
93
|
+
break;
|
94
|
+
}
|
95
|
+
case "async-node-module": {
|
96
|
+
// @ts-expect-error typescript bug for duplicate require
|
97
|
+
const ReadFileCompileAsyncWasmPlugin = require("../node/ReadFileCompileAsyncWasmPlugin");
|
98
|
+
new ReadFileCompileAsyncWasmPlugin({ type, import: true }).apply(
|
99
|
+
compiler
|
100
|
+
);
|
92
101
|
break;
|
93
102
|
}
|
94
103
|
case "universal":
|
@@ -291,7 +291,7 @@ class WasmChunkLoadingRuntimeModule extends RuntimeModule {
|
|
291
291
|
"var promise;",
|
292
292
|
this.supportsStreaming
|
293
293
|
? Template.asString([
|
294
|
-
"if(importObject
|
294
|
+
"if(importObject && typeof importObject.then === 'function' && typeof WebAssembly.compileStreaming === 'function') {",
|
295
295
|
Template.indent([
|
296
296
|
"promise = Promise.all([WebAssembly.compileStreaming(req), importObject]).then(function(items) {",
|
297
297
|
Template.indent([
|
@@ -309,7 +309,7 @@ class WasmChunkLoadingRuntimeModule extends RuntimeModule {
|
|
309
309
|
])
|
310
310
|
])
|
311
311
|
: Template.asString([
|
312
|
-
"if(importObject
|
312
|
+
"if(importObject && typeof importObject.then === 'function') {",
|
313
313
|
Template.indent([
|
314
314
|
"var bytesPromise = req.then(function(x) { return x.arrayBuffer(); });",
|
315
315
|
"promise = Promise.all([",
|
@@ -16,7 +16,7 @@ const WebAssemblyInInitialChunkError = require("./WebAssemblyInInitialChunkError
|
|
16
16
|
/** @typedef {import("../Compiler")} Compiler */
|
17
17
|
/** @typedef {import("../Module")} Module */
|
18
18
|
/** @typedef {import("../ModuleTemplate")} ModuleTemplate */
|
19
|
-
/** @typedef {import("../
|
19
|
+
/** @typedef {import("../javascript/JavascriptModulesPlugin").RenderContext} RenderContext */
|
20
20
|
|
21
21
|
const getWebAssemblyGenerator = memoize(() =>
|
22
22
|
require("./WebAssemblyGenerator")
|
package/package.json
CHANGED
@@ -1,20 +1,20 @@
|
|
1
1
|
{
|
2
2
|
"name": "webpack",
|
3
|
-
"version": "5.
|
3
|
+
"version": "5.42.0",
|
4
4
|
"author": "Tobias Koppers @sokra",
|
5
5
|
"description": "Packs CommonJs/AMD modules for the browser. Allows to split your codebase into multiple bundles, which can be loaded on demand. Support loaders to preprocess files, i.e. json, jsx, es7, css, less, ... and your custom stuff.",
|
6
6
|
"license": "MIT",
|
7
7
|
"dependencies": {
|
8
8
|
"@types/eslint-scope": "^3.7.0",
|
9
|
-
"@types/estree": "^0.0.
|
9
|
+
"@types/estree": "^0.0.48",
|
10
10
|
"@webassemblyjs/ast": "1.11.0",
|
11
11
|
"@webassemblyjs/wasm-edit": "1.11.0",
|
12
12
|
"@webassemblyjs/wasm-parser": "1.11.0",
|
13
|
-
"acorn": "^8.
|
13
|
+
"acorn": "^8.4.1",
|
14
14
|
"browserslist": "^4.14.5",
|
15
15
|
"chrome-trace-event": "^1.0.2",
|
16
16
|
"enhanced-resolve": "^5.8.0",
|
17
|
-
"es-module-lexer": "^0.
|
17
|
+
"es-module-lexer": "^0.6.0",
|
18
18
|
"eslint-scope": "5.1.1",
|
19
19
|
"events": "^3.2.0",
|
20
20
|
"glob-to-regexp": "^0.4.1",
|
@@ -25,7 +25,7 @@
|
|
25
25
|
"neo-async": "^2.6.2",
|
26
26
|
"schema-utils": "^3.0.0",
|
27
27
|
"tapable": "^2.1.1",
|
28
|
-
"terser-webpack-plugin": "^5.1.
|
28
|
+
"terser-webpack-plugin": "^5.1.3",
|
29
29
|
"watchpack": "^2.2.0",
|
30
30
|
"webpack-sources": "^2.3.0"
|
31
31
|
},
|
@@ -37,7 +37,7 @@
|
|
37
37
|
"devDependencies": {
|
38
38
|
"@babel/core": "^7.11.1",
|
39
39
|
"@babel/preset-react": "^7.10.4",
|
40
|
-
"@types/es-module-lexer": "^0.
|
40
|
+
"@types/es-module-lexer": "^0.4.1",
|
41
41
|
"@types/jest": "^26.0.15",
|
42
42
|
"@types/node": "^15.0.1",
|
43
43
|
"babel-loader": "^8.1.0",
|
@@ -64,7 +64,7 @@
|
|
64
64
|
"is-ci": "^3.0.0",
|
65
65
|
"istanbul": "^0.4.5",
|
66
66
|
"jest": "^26.6.3",
|
67
|
-
"jest-diff": "^
|
67
|
+
"jest-diff": "^27.0.2",
|
68
68
|
"jest-junit": "^12.0.0",
|
69
69
|
"json-loader": "^0.5.7",
|
70
70
|
"json5": "^2.1.3",
|
@@ -80,7 +80,7 @@
|
|
80
80
|
"nyc": "^15.1.0",
|
81
81
|
"open-cli": "^6.0.1",
|
82
82
|
"prettier": "^2.2.0",
|
83
|
-
"pretty-format": "^
|
83
|
+
"pretty-format": "^27.0.2",
|
84
84
|
"pug": "^3.0.0",
|
85
85
|
"pug-loader": "^2.4.0",
|
86
86
|
"raw-loader": "^4.0.1",
|
@@ -91,7 +91,7 @@
|
|
91
91
|
"simple-git": "^2.17.0",
|
92
92
|
"strip-ansi": "^6.0.0",
|
93
93
|
"style-loader": "^2.0.0",
|
94
|
-
"terser": "^5.
|
94
|
+
"terser": "^5.7.0",
|
95
95
|
"toml": "^3.0.0",
|
96
96
|
"tooling": "webpack/tooling#v1.19.0",
|
97
97
|
"ts-loader": "^8.0.2",
|
@@ -131,11 +131,11 @@
|
|
131
131
|
],
|
132
132
|
"scripts": {
|
133
133
|
"setup": "node ./setup/setup.js",
|
134
|
-
"test": "node --max-old-space-size=4096 --trace-deprecation node_modules/jest-cli/bin/jest",
|
134
|
+
"test": "node --max-old-space-size=4096 --experimental-vm-modules --trace-deprecation node_modules/jest-cli/bin/jest",
|
135
135
|
"test:update-snapshots": "yarn jest -u",
|
136
|
-
"test:integration": "node --max-old-space-size=4096 --trace-deprecation node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.test.js\"",
|
137
|
-
"test:basic": "node --max-old-space-size=4096 --trace-deprecation node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/te{st/TestCasesNormal,st/StatsTestCases,st/ConfigTestCases}.test.js\"",
|
138
|
-
"test:unit": "node --max-old-space-size=4096 --trace-deprecation node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.unittest.js\"",
|
136
|
+
"test:integration": "node --max-old-space-size=4096 --experimental-vm-modules --trace-deprecation node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.test.js\"",
|
137
|
+
"test:basic": "node --max-old-space-size=4096 --experimental-vm-modules --trace-deprecation node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/te{st/TestCasesNormal,st/StatsTestCases,st/ConfigTestCases}.test.js\"",
|
138
|
+
"test:unit": "node --max-old-space-size=4096 --experimental-vm-modules --trace-deprecation node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.unittest.js\"",
|
139
139
|
"travis:integration": "yarn cover:integration --ci $JEST",
|
140
140
|
"travis:basic": "yarn cover:basic --ci $JEST",
|
141
141
|
"travis:lintunit": "yarn lint && yarn cover:unit --ci $JEST",
|
@@ -162,13 +162,13 @@
|
|
162
162
|
"pretty-lint": "yarn pretty-lint-base --check",
|
163
163
|
"yarn-lint": "yarn-deduplicate --fail --list -s highest yarn.lock",
|
164
164
|
"yarn-lint-fix": "yarn-deduplicate -s highest yarn.lock",
|
165
|
-
"benchmark": "node --max-old-space-size=4096 --trace-deprecation node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.benchmark.js\" --runInBand",
|
165
|
+
"benchmark": "node --max-old-space-size=4096 --experimental-vm-modules --trace-deprecation node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.benchmark.js\" --runInBand",
|
166
166
|
"cover": "yarn cover:all && yarn cover:report",
|
167
167
|
"cover:clean": "rimraf .nyc_output coverage",
|
168
|
-
"cover:all": "node --max-old-space-size=4096 node_modules/jest-cli/bin/jest --coverage",
|
169
|
-
"cover:basic": "node --max-old-space-size=4096 node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/te{st/TestCasesNormal,st/StatsTestCases,st/ConfigTestCases}.test.js\" --coverage",
|
170
|
-
"cover:integration": "node --max-old-space-size=4096 node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.test.js\" --coverage",
|
171
|
-
"cover:unit": "node --max-old-space-size=4096 node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.unittest.js\" --coverage",
|
168
|
+
"cover:all": "node --max-old-space-size=4096 --experimental-vm-modules node_modules/jest-cli/bin/jest --coverage",
|
169
|
+
"cover:basic": "node --max-old-space-size=4096 --experimental-vm-modules node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/te{st/TestCasesNormal,st/StatsTestCases,st/ConfigTestCases}.test.js\" --coverage",
|
170
|
+
"cover:integration": "node --max-old-space-size=4096 --experimental-vm-modules node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.test.js\" --coverage",
|
171
|
+
"cover:unit": "node --max-old-space-size=4096 --experimental-vm-modules node_modules/jest-cli/bin/jest --testMatch \"<rootDir>/test/*.unittest.js\" --coverage",
|
172
172
|
"cover:types": "node node_modules/tooling/type-coverage",
|
173
173
|
"cover:merge": "nyc merge .nyc_output coverage/coverage-nyc.json && rimraf .nyc_output",
|
174
174
|
"cover:report": "nyc report -t coverage"
|