webpack 5.0.0 → 5.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webpack might be problematic. Click here for more details.
- package/lib/Compilation.js +49 -13
- package/lib/Compiler.js +3 -0
- package/lib/Dependency.js +1 -1
- package/lib/EntryOptionPlugin.js +28 -17
- package/lib/ExportsInfo.js +62 -16
- package/lib/FlagDependencyUsagePlugin.js +38 -42
- package/lib/ModuleTemplate.js +1 -1
- package/lib/NormalModule.js +1 -1
- package/lib/buildChunkGraph.js +111 -25
- package/lib/dependencies/CommonJsImportsParserPlugin.js +2 -0
- package/lib/index.js +9 -0
- package/lib/optimize/ConcatenatedModule.js +17 -9
- package/lib/optimize/InnerGraph.js +10 -1
- package/lib/stats/StatsPrinter.js +2 -2
- package/lib/util/Hash.js +2 -2
- package/lib/util/LazyBucketSortedSet.js +8 -8
- package/lib/util/cleverMerge.js +1 -1
- package/lib/util/runtime.js +1 -1
- package/package.json +3 -3
- package/types.d.ts +54 -5
package/lib/Compilation.js
CHANGED
@@ -2660,17 +2660,44 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|
2660
2660
|
* this is needed as the "hasRuntime()" chunks are dependent on the
|
2661
2661
|
* hashes of the non-runtime chunks.
|
2662
2662
|
*/
|
2663
|
-
|
2664
|
-
|
2665
|
-
|
2666
|
-
if (
|
2667
|
-
|
2668
|
-
|
2663
|
+
const runtimeChunks = [];
|
2664
|
+
const otherChunks = [];
|
2665
|
+
for (const c of chunks) {
|
2666
|
+
if (c.hasRuntime()) {
|
2667
|
+
runtimeChunks.push({
|
2668
|
+
chunk: c,
|
2669
|
+
referencedChunks: new Set(
|
2670
|
+
Array.from(c.getAllReferencedAsyncEntrypoints()).map(
|
2671
|
+
e => e.chunks[e.chunks.length - 1]
|
2672
|
+
)
|
2673
|
+
)
|
2674
|
+
});
|
2675
|
+
} else {
|
2676
|
+
otherChunks.push(c);
|
2677
|
+
}
|
2678
|
+
}
|
2679
|
+
otherChunks.sort(byId);
|
2680
|
+
runtimeChunks.sort((a, b) => {
|
2681
|
+
const aDependOnB = a.referencedChunks.has(b.chunk);
|
2682
|
+
const bDependOnA = b.referencedChunks.has(a.chunk);
|
2683
|
+
if (aDependOnB && bDependOnA) {
|
2684
|
+
const err = new WebpackError(
|
2685
|
+
`Circular dependency between chunks with runtime (${
|
2686
|
+
a.chunk.name || a.chunk.id
|
2687
|
+
} and ${b.chunk.name || b.chunk.id}).
|
2688
|
+
This prevents using hashes of each other and should be avoided.`
|
2689
|
+
);
|
2690
|
+
err.chunk = a.chunk;
|
2691
|
+
this.warnings.push(err);
|
2692
|
+
return byId(a.chunk, b.chunk);
|
2693
|
+
}
|
2694
|
+
if (aDependOnB) return 1;
|
2695
|
+
if (bDependOnA) return -1;
|
2696
|
+
return byId(a.chunk, b.chunk);
|
2669
2697
|
});
|
2670
2698
|
this.logger.timeEnd("hashing: sort chunks");
|
2671
2699
|
const fullHashChunks = new Set();
|
2672
|
-
|
2673
|
-
const chunk = chunks[i];
|
2700
|
+
const processChunk = chunk => {
|
2674
2701
|
// Last minute module hash generation for modules that depend on chunk hashes
|
2675
2702
|
this.logger.time("hashing: hash runtime modules");
|
2676
2703
|
for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
|
@@ -2722,7 +2749,10 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|
2722
2749
|
this.errors.push(new ChunkRenderError(chunk, "", err));
|
2723
2750
|
}
|
2724
2751
|
this.logger.timeAggregate("hashing: hash chunks");
|
2725
|
-
}
|
2752
|
+
};
|
2753
|
+
otherChunks.forEach(processChunk);
|
2754
|
+
for (const { chunk } of runtimeChunks) processChunk(chunk);
|
2755
|
+
|
2726
2756
|
this.logger.timeAggregateEnd("hashing: hash runtime modules");
|
2727
2757
|
this.logger.timeAggregateEnd("hashing: hash chunks");
|
2728
2758
|
this.logger.time("hashing: hash digest");
|
@@ -2965,6 +2995,12 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|
2965
2995
|
}
|
2966
2996
|
}
|
2967
2997
|
}
|
2998
|
+
// TODO If this becomes a performance problem
|
2999
|
+
// store a reverse mapping from asset to chunk
|
3000
|
+
for (const chunk of this.chunks) {
|
3001
|
+
chunk.files.delete(file);
|
3002
|
+
chunk.auxiliaryFiles.delete(file);
|
3003
|
+
}
|
2968
3004
|
}
|
2969
3005
|
|
2970
3006
|
getAssets() {
|
@@ -3407,14 +3443,14 @@ Compilation.PROCESS_ASSETS_STAGE_SUMMARIZE = 1000;
|
|
3407
3443
|
Compilation.PROCESS_ASSETS_STAGE_DEV_TOOLING = 2000;
|
3408
3444
|
|
3409
3445
|
/**
|
3410
|
-
* Optimize the
|
3446
|
+
* Optimize the hashes of the assets, e. g. by generating real hashes of the asset content.
|
3411
3447
|
*/
|
3412
|
-
Compilation.
|
3448
|
+
Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_HASH = 2500;
|
3413
3449
|
|
3414
3450
|
/**
|
3415
|
-
* Optimize the
|
3451
|
+
* Optimize the transfer of existing assets, e. g. by preparing a compressed (gzip) file as separate asset.
|
3416
3452
|
*/
|
3417
|
-
Compilation.
|
3453
|
+
Compilation.PROCESS_ASSETS_STAGE_OPTIMIZE_TRANSFER = 3000;
|
3418
3454
|
|
3419
3455
|
/**
|
3420
3456
|
* Analyse existing assets.
|
package/lib/Compiler.js
CHANGED
@@ -14,6 +14,7 @@ const {
|
|
14
14
|
AsyncSeriesHook
|
15
15
|
} = require("tapable");
|
16
16
|
const { SizeOnlySource } = require("webpack-sources");
|
17
|
+
const webpack = require("./");
|
17
18
|
const Cache = require("./Cache");
|
18
19
|
const CacheFacade = require("./CacheFacade");
|
19
20
|
const Compilation = require("./Compilation");
|
@@ -170,6 +171,8 @@ class Compiler {
|
|
170
171
|
entryOption: new SyncBailHook(["context", "entry"])
|
171
172
|
});
|
172
173
|
|
174
|
+
this.webpack = webpack;
|
175
|
+
|
173
176
|
/** @type {string=} */
|
174
177
|
this.name = undefined;
|
175
178
|
/** @type {Compilation=} */
|
package/lib/Dependency.js
CHANGED
package/lib/EntryOptionPlugin.js
CHANGED
@@ -6,6 +6,7 @@
|
|
6
6
|
"use strict";
|
7
7
|
|
8
8
|
/** @typedef {import("../declarations/WebpackOptions").EntryDescriptionNormalized} EntryDescription */
|
9
|
+
/** @typedef {import("../declarations/WebpackOptions").EntryNormalized} Entry */
|
9
10
|
/** @typedef {import("./Compiler")} Compiler */
|
10
11
|
/** @typedef {import("./Entrypoint").EntryOptions} EntryOptions */
|
11
12
|
|
@@ -16,27 +17,37 @@ class EntryOptionPlugin {
|
|
16
17
|
*/
|
17
18
|
apply(compiler) {
|
18
19
|
compiler.hooks.entryOption.tap("EntryOptionPlugin", (context, entry) => {
|
19
|
-
|
20
|
-
const DynamicEntryPlugin = require("./DynamicEntryPlugin");
|
21
|
-
new DynamicEntryPlugin(context, entry).apply(compiler);
|
22
|
-
} else {
|
23
|
-
const EntryPlugin = require("./EntryPlugin");
|
24
|
-
for (const name of Object.keys(entry)) {
|
25
|
-
const desc = entry[name];
|
26
|
-
const options = EntryOptionPlugin.entryDescriptionToOptions(
|
27
|
-
compiler,
|
28
|
-
name,
|
29
|
-
desc
|
30
|
-
);
|
31
|
-
for (const entry of desc.import) {
|
32
|
-
new EntryPlugin(context, entry, options).apply(compiler);
|
33
|
-
}
|
34
|
-
}
|
35
|
-
}
|
20
|
+
EntryOptionPlugin.applyEntryOption(compiler, context, entry);
|
36
21
|
return true;
|
37
22
|
});
|
38
23
|
}
|
39
24
|
|
25
|
+
/**
|
26
|
+
* @param {Compiler} compiler the compiler
|
27
|
+
* @param {string} context context directory
|
28
|
+
* @param {Entry} entry request
|
29
|
+
* @returns {void}
|
30
|
+
*/
|
31
|
+
static applyEntryOption(compiler, context, entry) {
|
32
|
+
if (typeof entry === "function") {
|
33
|
+
const DynamicEntryPlugin = require("./DynamicEntryPlugin");
|
34
|
+
new DynamicEntryPlugin(context, entry).apply(compiler);
|
35
|
+
} else {
|
36
|
+
const EntryPlugin = require("./EntryPlugin");
|
37
|
+
for (const name of Object.keys(entry)) {
|
38
|
+
const desc = entry[name];
|
39
|
+
const options = EntryOptionPlugin.entryDescriptionToOptions(
|
40
|
+
compiler,
|
41
|
+
name,
|
42
|
+
desc
|
43
|
+
);
|
44
|
+
for (const entry of desc.import) {
|
45
|
+
new EntryPlugin(context, entry, options).apply(compiler);
|
46
|
+
}
|
47
|
+
}
|
48
|
+
}
|
49
|
+
}
|
50
|
+
|
40
51
|
/**
|
41
52
|
* @param {Compiler} compiler the compiler
|
42
53
|
* @param {string} name entry name
|
package/lib/ExportsInfo.js
CHANGED
@@ -416,7 +416,7 @@ class ExportsInfo {
|
|
416
416
|
|
417
417
|
/**
|
418
418
|
* @param {RuntimeSpec} runtime the runtime
|
419
|
-
* @returns {boolean} true, when the module
|
419
|
+
* @returns {boolean} true, when the module exports are used in any way
|
420
420
|
*/
|
421
421
|
isUsed(runtime) {
|
422
422
|
if (this._redirectTo !== undefined) {
|
@@ -436,6 +436,17 @@ class ExportsInfo {
|
|
436
436
|
return false;
|
437
437
|
}
|
438
438
|
|
439
|
+
/**
|
440
|
+
* @param {RuntimeSpec} runtime the runtime
|
441
|
+
* @returns {boolean} true, when the module is used in any way
|
442
|
+
*/
|
443
|
+
isModuleUsed(runtime) {
|
444
|
+
if (this.isUsed(runtime)) return true;
|
445
|
+
if (this._sideEffectsOnlyInfo.getUsed(runtime) !== UsageState.Unused)
|
446
|
+
return true;
|
447
|
+
return false;
|
448
|
+
}
|
449
|
+
|
439
450
|
/**
|
440
451
|
* @param {RuntimeSpec} runtime the runtime
|
441
452
|
* @returns {SortableSet<string> | boolean | null} set of used exports, or true (when namespace object is used), or false (when unused), or null (when unknown)
|
@@ -752,6 +763,8 @@ class ExportInfo {
|
|
752
763
|
this.name = name;
|
753
764
|
/** @private @type {string | null} */
|
754
765
|
this._usedName = initFrom ? initFrom._usedName : null;
|
766
|
+
/** @private @type {UsageStateType} */
|
767
|
+
this._globalUsed = initFrom ? initFrom._globalUsed : undefined;
|
755
768
|
/** @private @type {Map<string, RuntimeUsageStateType>} */
|
756
769
|
this._usedInRuntime =
|
757
770
|
initFrom && initFrom._usedInRuntime
|
@@ -907,7 +920,17 @@ class ExportInfo {
|
|
907
920
|
* @returns {boolean} true when something has changed
|
908
921
|
*/
|
909
922
|
setUsedConditionally(condition, newValue, runtime) {
|
910
|
-
if (
|
923
|
+
if (runtime === undefined) {
|
924
|
+
if (this._globalUsed === undefined) {
|
925
|
+
this._globalUsed = newValue;
|
926
|
+
return true;
|
927
|
+
} else {
|
928
|
+
if (this._globalUsed !== newValue && condition(this._globalUsed)) {
|
929
|
+
this._globalUsed = newValue;
|
930
|
+
return true;
|
931
|
+
}
|
932
|
+
}
|
933
|
+
} else if (this._usedInRuntime === undefined) {
|
911
934
|
if (newValue !== UsageState.Unused && condition(UsageState.Unused)) {
|
912
935
|
this._usedInRuntime = new Map();
|
913
936
|
forEachRuntime(runtime, runtime =>
|
@@ -944,7 +967,12 @@ class ExportInfo {
|
|
944
967
|
* @returns {boolean} true when something has changed
|
945
968
|
*/
|
946
969
|
setUsed(newValue, runtime) {
|
947
|
-
if (
|
970
|
+
if (runtime === undefined) {
|
971
|
+
if (this._globalUsed !== newValue) {
|
972
|
+
this._globalUsed = newValue;
|
973
|
+
return true;
|
974
|
+
}
|
975
|
+
} else if (this._usedInRuntime === undefined) {
|
948
976
|
if (newValue !== UsageState.Unused) {
|
949
977
|
this._usedInRuntime = new Map();
|
950
978
|
forEachRuntime(runtime, runtime =>
|
@@ -1023,6 +1051,7 @@ class ExportInfo {
|
|
1023
1051
|
*/
|
1024
1052
|
getUsed(runtime) {
|
1025
1053
|
if (!this._hasUseInRuntimeInfo) return UsageState.NoInfo;
|
1054
|
+
if (this._globalUsed !== undefined) return this._globalUsed;
|
1026
1055
|
if (this._usedInRuntime === undefined) {
|
1027
1056
|
return UsageState.Unused;
|
1028
1057
|
} else if (typeof runtime === "string") {
|
@@ -1062,18 +1091,22 @@ class ExportInfo {
|
|
1062
1091
|
*/
|
1063
1092
|
getUsedName(fallbackName, runtime) {
|
1064
1093
|
if (this._hasUseInRuntimeInfo) {
|
1065
|
-
if (this.
|
1066
|
-
|
1067
|
-
|
1068
|
-
|
1069
|
-
|
1070
|
-
|
1071
|
-
|
1072
|
-
|
1073
|
-
|
1074
|
-
|
1075
|
-
|
1076
|
-
|
1094
|
+
if (this._globalUsed !== undefined) {
|
1095
|
+
if (this._globalUsed === UsageState.Unused) return false;
|
1096
|
+
} else {
|
1097
|
+
if (this._usedInRuntime === undefined) return false;
|
1098
|
+
if (typeof runtime === "string") {
|
1099
|
+
if (!this._usedInRuntime.has(runtime)) {
|
1100
|
+
return false;
|
1101
|
+
}
|
1102
|
+
} else if (runtime !== undefined) {
|
1103
|
+
if (
|
1104
|
+
Array.from(runtime).every(
|
1105
|
+
runtime => !this._usedInRuntime.has(runtime)
|
1106
|
+
)
|
1107
|
+
) {
|
1108
|
+
return false;
|
1109
|
+
}
|
1077
1110
|
}
|
1078
1111
|
}
|
1079
1112
|
}
|
@@ -1282,7 +1315,20 @@ class ExportInfo {
|
|
1282
1315
|
}
|
1283
1316
|
|
1284
1317
|
getUsedInfo() {
|
1285
|
-
if (this.
|
1318
|
+
if (this._globalUsed !== undefined) {
|
1319
|
+
switch (this._globalUsed) {
|
1320
|
+
case UsageState.Unused:
|
1321
|
+
return "unused";
|
1322
|
+
case UsageState.NoInfo:
|
1323
|
+
return "no usage info";
|
1324
|
+
case UsageState.Unknown:
|
1325
|
+
return "maybe used (runtime-defined)";
|
1326
|
+
case UsageState.Used:
|
1327
|
+
return "used";
|
1328
|
+
case UsageState.OnlyPropertiesUsed:
|
1329
|
+
return "only properties used";
|
1330
|
+
}
|
1331
|
+
} else if (this._usedInRuntime !== undefined) {
|
1286
1332
|
/** @type {Map<RuntimeUsageStateType, string[]>} */
|
1287
1333
|
const map = new Map();
|
1288
1334
|
for (const [runtime, used] of this._usedInRuntime) {
|
@@ -7,13 +7,10 @@
|
|
7
7
|
|
8
8
|
const Dependency = require("./Dependency");
|
9
9
|
const { UsageState } = require("./ExportsInfo");
|
10
|
+
const ModuleGraphConnection = require("./ModuleGraphConnection");
|
10
11
|
const { STAGE_DEFAULT } = require("./OptimizationStages");
|
11
12
|
const TupleQueue = require("./util/TupleQueue");
|
12
|
-
const {
|
13
|
-
getEntryRuntime,
|
14
|
-
mergeRuntime,
|
15
|
-
mergeRuntimeOwned
|
16
|
-
} = require("./util/runtime");
|
13
|
+
const { getEntryRuntime, mergeRuntimeOwned } = require("./util/runtime");
|
17
14
|
|
18
15
|
/** @typedef {import("./Chunk")} Chunk */
|
19
16
|
/** @typedef {import("./ChunkGroup")} ChunkGroup */
|
@@ -63,9 +60,15 @@ class FlagDependencyUsagePlugin {
|
|
63
60
|
* @param {Module} module module to process
|
64
61
|
* @param {(string[] | ReferencedExport)[]} usedExports list of used exports
|
65
62
|
* @param {RuntimeSpec} runtime part of which runtime
|
63
|
+
* @param {boolean} forceSideEffects always apply side effects
|
66
64
|
* @returns {void}
|
67
65
|
*/
|
68
|
-
const processReferencedModule = (
|
66
|
+
const processReferencedModule = (
|
67
|
+
module,
|
68
|
+
usedExports,
|
69
|
+
runtime,
|
70
|
+
forceSideEffects
|
71
|
+
) => {
|
69
72
|
const exportsInfo = moduleGraph.getExportsInfo(module);
|
70
73
|
if (usedExports.length > 0) {
|
71
74
|
if (!module.buildMeta || !module.buildMeta.exportsType) {
|
@@ -143,10 +146,12 @@ class FlagDependencyUsagePlugin {
|
|
143
146
|
// This module won't be evaluated in this case
|
144
147
|
// TODO webpack 6 remove this check
|
145
148
|
if (
|
149
|
+
!forceSideEffects &&
|
146
150
|
module.factoryMeta !== undefined &&
|
147
151
|
module.factoryMeta.sideEffectFree
|
148
|
-
)
|
152
|
+
) {
|
149
153
|
return;
|
154
|
+
}
|
150
155
|
if (exportsInfo.setUsedForSideEffectsOnly(runtime)) {
|
151
156
|
queue.enqueue(module, runtime);
|
152
157
|
}
|
@@ -166,7 +171,11 @@ class FlagDependencyUsagePlugin {
|
|
166
171
|
const queue = [module];
|
167
172
|
for (const block of queue) {
|
168
173
|
for (const b of block.blocks) {
|
169
|
-
if (
|
174
|
+
if (
|
175
|
+
!this.global &&
|
176
|
+
b.groupOptions &&
|
177
|
+
b.groupOptions.entryOptions
|
178
|
+
) {
|
170
179
|
processModule(b, b.groupOptions.entryOptions.runtime);
|
171
180
|
} else {
|
172
181
|
queue.push(b);
|
@@ -174,14 +183,16 @@ class FlagDependencyUsagePlugin {
|
|
174
183
|
}
|
175
184
|
for (const dep of block.dependencies) {
|
176
185
|
const connection = moduleGraph.getConnection(dep);
|
177
|
-
if (
|
178
|
-
!connection ||
|
179
|
-
!connection.module ||
|
180
|
-
!connection.isTargetActive(runtime)
|
181
|
-
) {
|
186
|
+
if (!connection || !connection.module) {
|
182
187
|
continue;
|
183
188
|
}
|
189
|
+
const activeState = connection.getActiveState(runtime);
|
190
|
+
if (activeState === false) continue;
|
184
191
|
const { module } = connection;
|
192
|
+
if (activeState === ModuleGraphConnection.TRANSITIVE_ONLY) {
|
193
|
+
processModule(module, runtime);
|
194
|
+
continue;
|
195
|
+
}
|
185
196
|
const oldReferencedExports = map.get(module);
|
186
197
|
if (oldReferencedExports === EXPORTS_OBJECT_REFERENCED) {
|
187
198
|
continue;
|
@@ -242,12 +253,18 @@ class FlagDependencyUsagePlugin {
|
|
242
253
|
|
243
254
|
for (const [module, referencedExports] of map) {
|
244
255
|
if (Array.isArray(referencedExports)) {
|
245
|
-
processReferencedModule(
|
256
|
+
processReferencedModule(
|
257
|
+
module,
|
258
|
+
referencedExports,
|
259
|
+
runtime,
|
260
|
+
false
|
261
|
+
);
|
246
262
|
} else {
|
247
263
|
processReferencedModule(
|
248
264
|
module,
|
249
265
|
Array.from(referencedExports.values()),
|
250
|
-
runtime
|
266
|
+
runtime,
|
267
|
+
false
|
251
268
|
);
|
252
269
|
}
|
253
270
|
}
|
@@ -270,8 +287,12 @@ class FlagDependencyUsagePlugin {
|
|
270
287
|
const processEntryDependency = (dep, runtime) => {
|
271
288
|
const module = moduleGraph.getModule(dep);
|
272
289
|
if (module) {
|
273
|
-
processReferencedModule(
|
274
|
-
|
290
|
+
processReferencedModule(
|
291
|
+
module,
|
292
|
+
NO_EXPORTS_REFERENCED,
|
293
|
+
runtime,
|
294
|
+
true
|
295
|
+
);
|
275
296
|
}
|
276
297
|
};
|
277
298
|
/** @type {RuntimeSpec} */
|
@@ -305,31 +326,6 @@ class FlagDependencyUsagePlugin {
|
|
305
326
|
logger.timeEnd("trace exports usage in graph");
|
306
327
|
}
|
307
328
|
);
|
308
|
-
if (!this.global) {
|
309
|
-
compilation.hooks.afterChunks.tap("FlagDependencyUsagePlugin", () => {
|
310
|
-
/** @type {Map<Chunk, string>} */
|
311
|
-
const runtimeChunks = new Map();
|
312
|
-
for (const entrypoint of compilation.entrypoints.values()) {
|
313
|
-
runtimeChunks.set(
|
314
|
-
entrypoint.getRuntimeChunk(),
|
315
|
-
entrypoint.options.runtime
|
316
|
-
);
|
317
|
-
}
|
318
|
-
for (const entrypoint of compilation.asyncEntrypoints) {
|
319
|
-
runtimeChunks.set(
|
320
|
-
entrypoint.getRuntimeChunk(),
|
321
|
-
entrypoint.options.runtime
|
322
|
-
);
|
323
|
-
}
|
324
|
-
|
325
|
-
for (const [runtimeChunk, runtimeName] of runtimeChunks) {
|
326
|
-
const runtime = runtimeName || runtimeChunk.name;
|
327
|
-
for (const chunk of runtimeChunk.getAllReferencedChunks()) {
|
328
|
-
chunk.runtime = mergeRuntime(chunk.runtime, runtime);
|
329
|
-
}
|
330
|
-
}
|
331
|
-
});
|
332
|
-
}
|
333
329
|
});
|
334
330
|
}
|
335
331
|
}
|
package/lib/ModuleTemplate.js
CHANGED
@@ -126,7 +126,7 @@ class ModuleTemplate {
|
|
126
126
|
(options, fn) => {
|
127
127
|
compilation.hooks.fullHash.tap(options, fn);
|
128
128
|
},
|
129
|
-
"ModuleTemplate.hooks.
|
129
|
+
"ModuleTemplate.hooks.hash is deprecated (use Compilation.hooks.fullHash instead)",
|
130
130
|
"DEP_MODULE_TEMPLATE_HASH"
|
131
131
|
)
|
132
132
|
}
|
package/lib/NormalModule.js
CHANGED
@@ -154,7 +154,7 @@ makeSerializable(
|
|
154
154
|
/**
|
155
155
|
* @typedef {Object} NormalModuleCompilationHooks
|
156
156
|
* @property {SyncHook<[object, NormalModule]>} loader
|
157
|
-
* @property {SyncHook<[LoaderItem[],
|
157
|
+
* @property {SyncHook<[LoaderItem[], NormalModule, object]>} beforeLoaders
|
158
158
|
* @property {HookMap<AsyncSeriesBailHook<[string, NormalModule], string | Buffer>>} readResourceForScheme
|
159
159
|
*/
|
160
160
|
|
package/lib/buildChunkGraph.js
CHANGED
@@ -8,6 +8,7 @@
|
|
8
8
|
const AsyncDependencyToInitialChunkError = require("./AsyncDependencyToInitialChunkError");
|
9
9
|
const { connectChunkGroupParentAndChild } = require("./GraphHelpers");
|
10
10
|
const ModuleGraphConnection = require("./ModuleGraphConnection");
|
11
|
+
const { getEntryRuntime, mergeRuntime } = require("./util/runtime");
|
11
12
|
|
12
13
|
/** @typedef {import("./AsyncDependenciesBlock")} AsyncDependenciesBlock */
|
13
14
|
/** @typedef {import("./Chunk")} Chunk */
|
@@ -20,6 +21,7 @@ const ModuleGraphConnection = require("./ModuleGraphConnection");
|
|
20
21
|
/** @typedef {import("./ModuleGraph")} ModuleGraph */
|
21
22
|
/** @typedef {import("./ModuleGraphConnection").ConnectionState} ConnectionState */
|
22
23
|
/** @typedef {import("./logging/Logger").Logger} Logger */
|
24
|
+
/** @typedef {import("./util/runtime").RuntimeSpec} RuntimeSpec */
|
23
25
|
|
24
26
|
/**
|
25
27
|
* @typedef {Object} QueueItem
|
@@ -36,10 +38,12 @@ const ModuleGraphConnection = require("./ModuleGraphConnection");
|
|
36
38
|
/**
|
37
39
|
* @typedef {Object} ChunkGroupInfo
|
38
40
|
* @property {ChunkGroup} chunkGroup the chunk group
|
41
|
+
* @property {RuntimeSpec} runtime the runtimes
|
39
42
|
* @property {ModuleSetPlus} minAvailableModules current minimal set of modules available at this point
|
40
43
|
* @property {boolean} minAvailableModulesOwned true, if minAvailableModules is owned and can be modified
|
41
44
|
* @property {ModuleSetPlus[]} availableModulesToBeMerged enqueued updates to the minimal set of available modules
|
42
45
|
* @property {Set<Module>=} skippedItems modules that were skipped because module is already available in parent chunks (need to reconsider when minAvailableModules is shrinking)
|
46
|
+
* @property {Set<[Module, ModuleGraphConnection[]]>=} skippedModuleConnections referenced modules that where skipped because they were not active in this runtime
|
43
47
|
* @property {ModuleSetPlus} resultingAvailableModules set of modules available including modules from this chunk group
|
44
48
|
* @property {Set<ChunkGroupInfo>} children set of children chunk groups, that will be revisited when availableModules shrink
|
45
49
|
* @property {Set<ChunkGroupInfo>} availableSources set of chunk groups that are the source for minAvailableModules
|
@@ -66,21 +70,41 @@ const bySetSize = (a, b) => {
|
|
66
70
|
return b.size + b.plus.size - a.size - a.plus.size;
|
67
71
|
};
|
68
72
|
|
73
|
+
/**
|
74
|
+
*
|
75
|
+
* @param {ModuleGraphConnection[]} connections list of connections
|
76
|
+
* @param {RuntimeSpec} runtime for which runtime
|
77
|
+
* @returns {ConnectionState} connection state
|
78
|
+
*/
|
79
|
+
const getActiveStateOfConnections = (connections, runtime) => {
|
80
|
+
let merged = connections[0].getActiveState(runtime);
|
81
|
+
if (merged === true) return true;
|
82
|
+
for (let i = 1; i < connections.length; i++) {
|
83
|
+
const c = connections[i];
|
84
|
+
merged = ModuleGraphConnection.addConnectionStates(
|
85
|
+
merged,
|
86
|
+
c.getActiveState(runtime)
|
87
|
+
);
|
88
|
+
if (merged === true) return true;
|
89
|
+
}
|
90
|
+
return merged;
|
91
|
+
};
|
92
|
+
|
69
93
|
/**
|
70
94
|
* Extracts block to modules mapping from all modules
|
71
95
|
* @param {Compilation} compilation the compilation
|
72
|
-
* @returns {Map<DependenciesBlock, Map<Module,
|
96
|
+
* @returns {Map<DependenciesBlock, Map<Module, ModuleGraphConnection[]>>} the mapping block to modules
|
73
97
|
*/
|
74
98
|
const extractBlockModulesMap = compilation => {
|
75
99
|
const { moduleGraph } = compilation;
|
76
100
|
|
77
|
-
/** @type {Map<DependenciesBlock, Map<Module,
|
101
|
+
/** @type {Map<DependenciesBlock, Map<Module, ModuleGraphConnection[]>>} */
|
78
102
|
const blockModulesMap = new Map();
|
79
103
|
|
80
104
|
const blockQueue = new Set();
|
81
105
|
|
82
106
|
for (const module of compilation.modules) {
|
83
|
-
/** @type {WeakMap<Dependency,
|
107
|
+
/** @type {WeakMap<Dependency, ModuleGraphConnection>} */
|
84
108
|
let moduleMap;
|
85
109
|
|
86
110
|
for (const connection of moduleGraph.getOutgoingConnections(module)) {
|
@@ -101,7 +125,7 @@ const extractBlockModulesMap = compilation => {
|
|
101
125
|
if (moduleMap === undefined) {
|
102
126
|
moduleMap = new WeakMap();
|
103
127
|
}
|
104
|
-
moduleMap.set(connection.dependency,
|
128
|
+
moduleMap.set(connection.dependency, connection);
|
105
129
|
}
|
106
130
|
|
107
131
|
blockQueue.clear();
|
@@ -111,27 +135,19 @@ const extractBlockModulesMap = compilation => {
|
|
111
135
|
|
112
136
|
if (moduleMap !== undefined && block.dependencies) {
|
113
137
|
for (const dep of block.dependencies) {
|
114
|
-
const
|
115
|
-
if (
|
116
|
-
const
|
138
|
+
const connection = moduleMap.get(dep);
|
139
|
+
if (connection !== undefined) {
|
140
|
+
const { module } = connection;
|
117
141
|
if (modules === undefined) {
|
118
142
|
modules = new Map();
|
119
143
|
blockModulesMap.set(block, modules);
|
120
144
|
}
|
121
|
-
|
122
|
-
if (
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
old,
|
127
|
-
merged
|
128
|
-
));
|
129
|
-
if (merged === old) {
|
130
|
-
continue;
|
131
|
-
}
|
132
|
-
}
|
145
|
+
const old = modules.get(module);
|
146
|
+
if (old !== undefined) {
|
147
|
+
old.push(connection);
|
148
|
+
} else {
|
149
|
+
modules.set(module, [connection]);
|
133
150
|
}
|
134
|
-
modules.set(module, merged);
|
135
151
|
}
|
136
152
|
}
|
137
153
|
}
|
@@ -216,9 +232,15 @@ const visitModules = (
|
|
216
232
|
// Fill queue with entrypoint modules
|
217
233
|
// Create ChunkGroupInfo for entrypoints
|
218
234
|
for (const [chunkGroup, modules] of inputEntrypointsAndModules) {
|
235
|
+
const runtime = getEntryRuntime(
|
236
|
+
compilation,
|
237
|
+
chunkGroup.name,
|
238
|
+
chunkGroup.options
|
239
|
+
);
|
219
240
|
/** @type {ChunkGroupInfo} */
|
220
241
|
const chunkGroupInfo = {
|
221
242
|
chunkGroup,
|
243
|
+
runtime,
|
222
244
|
minAvailableModules: undefined,
|
223
245
|
minAvailableModulesOwned: false,
|
224
246
|
availableModulesToBeMerged: [],
|
@@ -288,6 +310,8 @@ const visitModules = (
|
|
288
310
|
|
289
311
|
logger.timeEnd("visitModules: prepare");
|
290
312
|
|
313
|
+
/** @type {[Module, ModuleGraphConnection[]][]} */
|
314
|
+
const skipConnectionBuffer = [];
|
291
315
|
/** @type {Module[]} */
|
292
316
|
const skipBuffer = [];
|
293
317
|
/** @type {QueueItem[]} */
|
@@ -332,6 +356,7 @@ const visitModules = (
|
|
332
356
|
entrypoint.index = nextChunkGroupIndex++;
|
333
357
|
cgi = {
|
334
358
|
chunkGroup: entrypoint,
|
359
|
+
runtime: entrypoint.options.runtime || entrypoint.name,
|
335
360
|
minAvailableModules: EMPTY_SET,
|
336
361
|
minAvailableModulesOwned: false,
|
337
362
|
availableModulesToBeMerged: [],
|
@@ -377,6 +402,7 @@ const visitModules = (
|
|
377
402
|
c.index = nextChunkGroupIndex++;
|
378
403
|
cgi = {
|
379
404
|
chunkGroup: c,
|
405
|
+
runtime: chunkGroupInfo.runtime,
|
380
406
|
minAvailableModules: undefined,
|
381
407
|
minAvailableModulesOwned: undefined,
|
382
408
|
availableModulesToBeMerged: [],
|
@@ -455,17 +481,24 @@ const visitModules = (
|
|
455
481
|
const blockModules = blockModulesMap.get(block);
|
456
482
|
|
457
483
|
if (blockModules !== undefined) {
|
458
|
-
const { minAvailableModules } = chunkGroupInfo;
|
484
|
+
const { minAvailableModules, runtime } = chunkGroupInfo;
|
459
485
|
// Buffer items because order need to be reversed to get indices correct
|
460
486
|
// Traverse all referenced modules
|
461
|
-
for (const
|
487
|
+
for (const entry of blockModules) {
|
488
|
+
const [refModule, connections] = entry;
|
462
489
|
if (chunkGraph.isModuleInChunk(refModule, chunk)) {
|
463
490
|
// skip early if already connected
|
464
491
|
continue;
|
465
492
|
}
|
493
|
+
const activeState = getActiveStateOfConnections(connections, runtime);
|
494
|
+
if (activeState !== true) {
|
495
|
+
skipConnectionBuffer.push(entry);
|
496
|
+
if (activeState === false) continue;
|
497
|
+
}
|
466
498
|
if (
|
467
|
-
|
468
|
-
minAvailableModules.
|
499
|
+
activeState === true &&
|
500
|
+
(minAvailableModules.has(refModule) ||
|
501
|
+
minAvailableModules.plus.has(refModule))
|
469
502
|
) {
|
470
503
|
// already in parent chunks, skip it for now
|
471
504
|
skipBuffer.push(refModule);
|
@@ -483,6 +516,16 @@ const visitModules = (
|
|
483
516
|
});
|
484
517
|
}
|
485
518
|
// Add buffered items in reverse order
|
519
|
+
if (skipConnectionBuffer.length > 0) {
|
520
|
+
let { skippedModuleConnections } = chunkGroupInfo;
|
521
|
+
if (skippedModuleConnections === undefined) {
|
522
|
+
chunkGroupInfo.skippedModuleConnections = skippedModuleConnections = new Set();
|
523
|
+
}
|
524
|
+
for (let i = skipConnectionBuffer.length - 1; i >= 0; i--) {
|
525
|
+
skippedModuleConnections.add(skipConnectionBuffer[i]);
|
526
|
+
}
|
527
|
+
skipConnectionBuffer.length = 0;
|
528
|
+
}
|
486
529
|
if (skipBuffer.length > 0) {
|
487
530
|
let { skippedItems } = chunkGroupInfo;
|
488
531
|
if (skippedItems === undefined) {
|
@@ -522,7 +565,8 @@ const visitModules = (
|
|
522
565
|
|
523
566
|
if (blockModules !== undefined) {
|
524
567
|
// Traverse all referenced modules
|
525
|
-
for (const [refModule,
|
568
|
+
for (const [refModule, connections] of blockModules) {
|
569
|
+
const activeState = getActiveStateOfConnections(connections, undefined);
|
526
570
|
// enqueue, then add and enter to be in the correct order
|
527
571
|
// this is relevant with circular dependencies
|
528
572
|
queueBuffer.push({
|
@@ -692,10 +736,18 @@ const visitModules = (
|
|
692
736
|
chunkGroupInfo
|
693
737
|
);
|
694
738
|
|
739
|
+
const runtime = chunkGroupInfo.runtime;
|
740
|
+
|
695
741
|
// 3. Update chunk group info
|
696
742
|
for (const target of targets) {
|
697
743
|
target.availableModulesToBeMerged.push(resultingAvailableModules);
|
698
744
|
chunkGroupsForMerging.add(target);
|
745
|
+
const oldRuntime = target.runtime;
|
746
|
+
const newRuntime = mergeRuntime(oldRuntime, runtime);
|
747
|
+
if (oldRuntime !== newRuntime) {
|
748
|
+
target.runtime = newRuntime;
|
749
|
+
outdatedChunkGroupInfo.add(target);
|
750
|
+
}
|
699
751
|
}
|
700
752
|
|
701
753
|
statConnectedChunkGroups += targets.size;
|
@@ -1000,6 +1052,35 @@ const visitModules = (
|
|
1000
1052
|
}
|
1001
1053
|
}
|
1002
1054
|
|
1055
|
+
// 2. Reconsider skipped connections
|
1056
|
+
if (info.skippedModuleConnections !== undefined) {
|
1057
|
+
const { minAvailableModules, runtime } = info;
|
1058
|
+
for (const entry of info.skippedModuleConnections) {
|
1059
|
+
const [module, connections] = entry;
|
1060
|
+
const activeState = getActiveStateOfConnections(connections, runtime);
|
1061
|
+
if (activeState === false) continue;
|
1062
|
+
if (activeState === true) {
|
1063
|
+
info.skippedModuleConnections.delete(entry);
|
1064
|
+
}
|
1065
|
+
if (
|
1066
|
+
activeState === true &&
|
1067
|
+
(minAvailableModules.has(module) ||
|
1068
|
+
minAvailableModules.plus.has(module))
|
1069
|
+
) {
|
1070
|
+
info.skippedItems.add(module);
|
1071
|
+
continue;
|
1072
|
+
}
|
1073
|
+
queue.push({
|
1074
|
+
action: activeState === true ? ADD_AND_ENTER_MODULE : PROCESS_BLOCK,
|
1075
|
+
block: module,
|
1076
|
+
module,
|
1077
|
+
chunk: info.chunkGroup.chunks[0],
|
1078
|
+
chunkGroup: info.chunkGroup,
|
1079
|
+
chunkGroupInfo: info
|
1080
|
+
});
|
1081
|
+
}
|
1082
|
+
}
|
1083
|
+
|
1003
1084
|
// 2. Reconsider children chunk groups
|
1004
1085
|
if (info.children !== undefined) {
|
1005
1086
|
statChildChunkGroupsReconnected += info.children.size;
|
@@ -1213,6 +1294,11 @@ const buildChunkGraph = (compilation, inputEntrypointsAndModules) => {
|
|
1213
1294
|
);
|
1214
1295
|
logger.timeEnd("connectChunkGroups");
|
1215
1296
|
|
1297
|
+
for (const [chunkGroup, chunkGroupInfo] of chunkGroupInfoMap) {
|
1298
|
+
for (const chunk of chunkGroup.chunks)
|
1299
|
+
chunk.runtime = mergeRuntime(chunk.runtime, chunkGroupInfo.runtime);
|
1300
|
+
}
|
1301
|
+
|
1216
1302
|
// Cleanup work
|
1217
1303
|
|
1218
1304
|
logger.time("cleanup");
|
@@ -238,6 +238,7 @@ class CommonJsImportsParserPlugin {
|
|
238
238
|
members
|
239
239
|
);
|
240
240
|
dep.asiSafe = !parser.isAsiPosition(expr.range[0]);
|
241
|
+
dep.optional = !!parser.scope.inTry;
|
241
242
|
dep.loc = expr.loc;
|
242
243
|
parser.state.module.addDependency(dep);
|
243
244
|
return true;
|
@@ -254,6 +255,7 @@ class CommonJsImportsParserPlugin {
|
|
254
255
|
);
|
255
256
|
dep.call = true;
|
256
257
|
dep.asiSafe = !parser.isAsiPosition(expr.range[0]);
|
258
|
+
dep.optional = !!parser.scope.inTry;
|
257
259
|
dep.loc = expr.callee.loc;
|
258
260
|
parser.state.module.addDependency(dep);
|
259
261
|
parser.walkExpressions(expr.arguments);
|
package/lib/index.js
CHANGED
@@ -137,6 +137,12 @@ module.exports = mergeExports(fn, {
|
|
137
137
|
get DllReferencePlugin() {
|
138
138
|
return require("./DllReferencePlugin");
|
139
139
|
},
|
140
|
+
get DynamicEntryPlugin() {
|
141
|
+
return require("./DynamicEntryPlugin");
|
142
|
+
},
|
143
|
+
get EntryOptionPlugin() {
|
144
|
+
return require("./EntryOptionPlugin");
|
145
|
+
},
|
140
146
|
get EntryPlugin() {
|
141
147
|
return require("./EntryPlugin");
|
142
148
|
},
|
@@ -480,6 +486,9 @@ module.exports = mergeExports(fn, {
|
|
480
486
|
},
|
481
487
|
get serialization() {
|
482
488
|
return require("./util/serialization");
|
489
|
+
},
|
490
|
+
get cleverMerge() {
|
491
|
+
return require("./util/cleverMerge").cachedCleverMerge;
|
483
492
|
}
|
484
493
|
},
|
485
494
|
|
@@ -308,14 +308,6 @@ const getFinalBinding = (
|
|
308
308
|
};
|
309
309
|
}
|
310
310
|
alreadyVisited.add(exportInfo);
|
311
|
-
if (exportInfo.getUsed(runtime) === UsageState.Unused) {
|
312
|
-
return {
|
313
|
-
info,
|
314
|
-
rawName: "/* unused export */ undefined",
|
315
|
-
ids: exportName.slice(1),
|
316
|
-
exportName
|
317
|
-
};
|
318
|
-
}
|
319
311
|
switch (info.type) {
|
320
312
|
case "concatenated": {
|
321
313
|
const exportId = exportName[0];
|
@@ -335,6 +327,14 @@ const getFinalBinding = (
|
|
335
327
|
exportName,
|
336
328
|
runtime
|
337
329
|
));
|
330
|
+
if (!usedName) {
|
331
|
+
return {
|
332
|
+
info,
|
333
|
+
rawName: "/* unused export */ undefined",
|
334
|
+
ids: exportName.slice(1),
|
335
|
+
exportName
|
336
|
+
};
|
337
|
+
}
|
338
338
|
return {
|
339
339
|
info,
|
340
340
|
name: directExport,
|
@@ -404,6 +404,14 @@ const getFinalBinding = (
|
|
404
404
|
exportName,
|
405
405
|
runtime
|
406
406
|
));
|
407
|
+
if (!used) {
|
408
|
+
return {
|
409
|
+
info,
|
410
|
+
rawName: "/* unused export */ undefined",
|
411
|
+
ids: exportName.slice(1),
|
412
|
+
exportName
|
413
|
+
};
|
414
|
+
}
|
407
415
|
const comment = equals(used, exportName)
|
408
416
|
? ""
|
409
417
|
: Template.toNormalComment(`${exportName.join(".")}`);
|
@@ -1500,7 +1508,7 @@ ${defineGetters}`
|
|
1500
1508
|
const orderedConcatenationList = this._createConcatenationList(
|
1501
1509
|
this.rootModule,
|
1502
1510
|
this._modules,
|
1503
|
-
|
1511
|
+
runtime,
|
1504
1512
|
moduleGraph
|
1505
1513
|
);
|
1506
1514
|
return orderedConcatenationList.map((info, index) => {
|
@@ -116,6 +116,7 @@ exports.inferDependencyUsage = state => {
|
|
116
116
|
}
|
117
117
|
|
118
118
|
const { innerGraph, usageCallbackMap } = innerGraphState;
|
119
|
+
const processed = new Map();
|
119
120
|
// flatten graph to terminal nodes (string, undefined or true)
|
120
121
|
const nonTerminal = new Set(innerGraph.keys());
|
121
122
|
while (nonTerminal.size > 0) {
|
@@ -124,7 +125,15 @@ exports.inferDependencyUsage = state => {
|
|
124
125
|
let newSet = new Set();
|
125
126
|
let isTerminal = true;
|
126
127
|
const value = innerGraph.get(key);
|
128
|
+
let alreadyProcessed = processed.get(key);
|
129
|
+
if (alreadyProcessed === undefined) {
|
130
|
+
alreadyProcessed = new Set();
|
131
|
+
processed.set(key, alreadyProcessed);
|
132
|
+
}
|
127
133
|
if (value !== true && value !== undefined) {
|
134
|
+
for (const item of value) {
|
135
|
+
alreadyProcessed.add(item);
|
136
|
+
}
|
128
137
|
for (const item of value) {
|
129
138
|
if (typeof item === "string") {
|
130
139
|
newSet.add(item);
|
@@ -137,7 +146,7 @@ exports.inferDependencyUsage = state => {
|
|
137
146
|
if (itemValue !== undefined) {
|
138
147
|
for (const i of itemValue) {
|
139
148
|
if (i === key) continue;
|
140
|
-
if (
|
149
|
+
if (alreadyProcessed.has(i)) continue;
|
141
150
|
newSet.add(i);
|
142
151
|
if (typeof i !== "string") {
|
143
152
|
isTerminal = false;
|
@@ -46,9 +46,9 @@ class StatsPrinter {
|
|
46
46
|
}
|
47
47
|
|
48
48
|
/**
|
49
|
+
* get all level hooks
|
49
50
|
* @private
|
50
51
|
* @template {Hook} T
|
51
|
-
* get all level hooks
|
52
52
|
* @param {HookMap<T>} hookMap HookMap
|
53
53
|
* @param {string} type type
|
54
54
|
* @returns {T[]} hooks
|
@@ -79,10 +79,10 @@ class StatsPrinter {
|
|
79
79
|
}
|
80
80
|
|
81
81
|
/**
|
82
|
+
* Run `fn` for each level
|
82
83
|
* @private
|
83
84
|
* @template T
|
84
85
|
* @template R
|
85
|
-
* Run `fn` for each level
|
86
86
|
* @param {HookMap<SyncBailHook<T, R>>} hookMap HookMap
|
87
87
|
* @param {string} type type
|
88
88
|
* @param {(hook: SyncBailHook<T, R>) => R} fn function
|
package/lib/util/Hash.js
CHANGED
@@ -8,8 +8,8 @@
|
|
8
8
|
class Hash {
|
9
9
|
/* istanbul ignore next */
|
10
10
|
/**
|
11
|
-
* @abstract
|
12
11
|
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
12
|
+
* @abstract
|
13
13
|
* @param {string|Buffer} data data
|
14
14
|
* @param {string=} inputEncoding data encoding
|
15
15
|
* @returns {this} updated hash
|
@@ -21,8 +21,8 @@ class Hash {
|
|
21
21
|
|
22
22
|
/* istanbul ignore next */
|
23
23
|
/**
|
24
|
-
* @abstract
|
25
24
|
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
25
|
+
* @abstract
|
26
26
|
* @param {string=} encoding encoding of the return value
|
27
27
|
* @returns {string|Buffer} digest
|
28
28
|
*/
|
@@ -8,16 +8,16 @@
|
|
8
8
|
const SortableSet = require("./SortableSet");
|
9
9
|
|
10
10
|
/**
|
11
|
+
* Multi layer bucket sorted set:
|
12
|
+
* Supports adding non-existing items (DO NOT ADD ITEM TWICE),
|
13
|
+
* Supports removing exiting items (DO NOT REMOVE ITEM NOT IN SET),
|
14
|
+
* Supports popping the first items according to defined order,
|
15
|
+
* Supports iterating all items without order,
|
16
|
+
* Supports updating an item in an efficient way,
|
17
|
+
* Supports size property, which is the number of items,
|
18
|
+
* Items are lazy partially sorted when needed
|
11
19
|
* @template T
|
12
20
|
* @template K
|
13
|
-
* Multi layer bucket sorted set
|
14
|
-
* Supports adding non-existing items (DO NOT ADD ITEM TWICE)
|
15
|
-
* Supports removing exiting items (DO NOT REMOVE ITEM NOT IN SET)
|
16
|
-
* Supports popping the first items according to defined order
|
17
|
-
* Supports iterating all items without order
|
18
|
-
* Supports updating an item in an efficient way
|
19
|
-
* Supports size property, which is the number of items
|
20
|
-
* Items are lazy partially sorted when needed
|
21
21
|
*/
|
22
22
|
class LazyBucketSortedSet {
|
23
23
|
/**
|
package/lib/util/cleverMerge.js
CHANGED
@@ -12,9 +12,9 @@ const setPropertyCache = new WeakMap();
|
|
12
12
|
const DELETE = Symbol("DELETE");
|
13
13
|
|
14
14
|
/**
|
15
|
+
* Merges two given objects and caches the result to avoid computation if same objects passed as arguments again.
|
15
16
|
* @template T
|
16
17
|
* @template O
|
17
|
-
* Merges two given objects and caches the result to avoid computation if same objects passed as arguments again.
|
18
18
|
* @example
|
19
19
|
* // performs cleverMerge(first, second), stores the result in WeakMap and returns result
|
20
20
|
* cachedCleverMerge({a: 1}, {a: 2})
|
package/lib/util/runtime.js
CHANGED
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "webpack",
|
3
|
-
"version": "5.
|
3
|
+
"version": "5.1.3",
|
4
4
|
"author": "Tobias Koppers @sokra",
|
5
5
|
"description": "Packs CommonJs/AMD modules for the browser. Allows to split your codebase into multiple bundles, which can be loaded on demand. Support loaders to preprocess files, i.e. json, jsx, es7, css, less, ... and your custom stuff.",
|
6
6
|
"license": "MIT",
|
@@ -26,7 +26,7 @@
|
|
26
26
|
"pkg-dir": "^4.2.0",
|
27
27
|
"schema-utils": "^3.0.0",
|
28
28
|
"tapable": "^2.0.0",
|
29
|
-
"terser-webpack-plugin": "^
|
29
|
+
"terser-webpack-plugin": "^5.0.0",
|
30
30
|
"watchpack": "^2.0.0",
|
31
31
|
"webpack-sources": "^2.0.1"
|
32
32
|
},
|
@@ -75,7 +75,7 @@
|
|
75
75
|
"lodash": "^4.17.19",
|
76
76
|
"lodash-es": "^4.17.15",
|
77
77
|
"memfs": "^3.2.0",
|
78
|
-
"mini-css-extract-plugin": "^0.
|
78
|
+
"mini-css-extract-plugin": "^1.0.0",
|
79
79
|
"mini-svg-data-uri": "^1.2.3",
|
80
80
|
"open-cli": "^6.0.1",
|
81
81
|
"prettier": "^2.0.5",
|
package/types.d.ts
CHANGED
@@ -1547,14 +1547,14 @@ declare class Compilation {
|
|
1547
1547
|
static PROCESS_ASSETS_STAGE_DEV_TOOLING: number;
|
1548
1548
|
|
1549
1549
|
/**
|
1550
|
-
* Optimize the
|
1550
|
+
* Optimize the hashes of the assets, e. g. by generating real hashes of the asset content.
|
1551
1551
|
*/
|
1552
|
-
static
|
1552
|
+
static PROCESS_ASSETS_STAGE_OPTIMIZE_HASH: number;
|
1553
1553
|
|
1554
1554
|
/**
|
1555
|
-
* Optimize the
|
1555
|
+
* Optimize the transfer of existing assets, e. g. by preparing a compressed (gzip) file as separate asset.
|
1556
1556
|
*/
|
1557
|
-
static
|
1557
|
+
static PROCESS_ASSETS_STAGE_OPTIMIZE_TRANSFER: number;
|
1558
1558
|
|
1559
1559
|
/**
|
1560
1560
|
* Analyse existing assets.
|
@@ -1618,6 +1618,7 @@ declare class Compiler {
|
|
1618
1618
|
afterResolvers: SyncHook<[Compiler], void>;
|
1619
1619
|
entryOption: SyncBailHook<[string, EntryNormalized], boolean>;
|
1620
1620
|
}>;
|
1621
|
+
webpack: typeof exports;
|
1621
1622
|
name: string;
|
1622
1623
|
parentCompilation: Compilation;
|
1623
1624
|
root: Compiler;
|
@@ -2222,6 +2223,10 @@ declare class Dependency {
|
|
2222
2223
|
readonly type: string;
|
2223
2224
|
readonly category: string;
|
2224
2225
|
getResourceIdentifier(): string;
|
2226
|
+
|
2227
|
+
/**
|
2228
|
+
* Returns the referenced module and export
|
2229
|
+
*/
|
2225
2230
|
getReference(moduleGraph: ModuleGraph): never;
|
2226
2231
|
|
2227
2232
|
/**
|
@@ -2545,6 +2550,16 @@ type DllReferencePluginOptionsSourceType =
|
|
2545
2550
|
| "umd2"
|
2546
2551
|
| "jsonp"
|
2547
2552
|
| "system";
|
2553
|
+
declare class DynamicEntryPlugin {
|
2554
|
+
constructor(context: string, entry: () => Promise<EntryStaticNormalized>);
|
2555
|
+
context: string;
|
2556
|
+
entry: () => Promise<EntryStaticNormalized>;
|
2557
|
+
|
2558
|
+
/**
|
2559
|
+
* Apply the plugin
|
2560
|
+
*/
|
2561
|
+
apply(compiler: Compiler): void;
|
2562
|
+
}
|
2548
2563
|
declare interface Effect {
|
2549
2564
|
type: string;
|
2550
2565
|
value: any;
|
@@ -2700,6 +2715,28 @@ type EntryNormalized =
|
|
2700
2715
|
declare interface EntryObject {
|
2701
2716
|
[index: string]: string | [string, ...string[]] | EntryDescription;
|
2702
2717
|
}
|
2718
|
+
declare class EntryOptionPlugin {
|
2719
|
+
constructor();
|
2720
|
+
apply(compiler: Compiler): void;
|
2721
|
+
static applyEntryOption(
|
2722
|
+
compiler: Compiler,
|
2723
|
+
context: string,
|
2724
|
+
entry: EntryNormalized
|
2725
|
+
): void;
|
2726
|
+
static entryDescriptionToOptions(
|
2727
|
+
compiler: Compiler,
|
2728
|
+
name: string,
|
2729
|
+
desc: EntryDescriptionNormalized
|
2730
|
+
): { name?: string } & Pick<
|
2731
|
+
EntryDescriptionNormalized,
|
2732
|
+
| "filename"
|
2733
|
+
| "chunkLoading"
|
2734
|
+
| "dependOn"
|
2735
|
+
| "library"
|
2736
|
+
| "runtime"
|
2737
|
+
| "wasmLoading"
|
2738
|
+
>;
|
2739
|
+
}
|
2703
2740
|
declare class EntryPlugin {
|
2704
2741
|
/**
|
2705
2742
|
* An entry plugin which will handle
|
@@ -3039,6 +3076,7 @@ declare abstract class ExportsInfo {
|
|
3039
3076
|
setAllKnownExportsUsed(runtime: string | SortableSet<string>): boolean;
|
3040
3077
|
setUsedForSideEffectsOnly(runtime: string | SortableSet<string>): boolean;
|
3041
3078
|
isUsed(runtime: string | SortableSet<string>): boolean;
|
3079
|
+
isModuleUsed(runtime: string | SortableSet<string>): boolean;
|
3042
3080
|
getUsedExports(
|
3043
3081
|
runtime: string | SortableSet<string>
|
3044
3082
|
): boolean | SortableSet<string>;
|
@@ -3566,7 +3604,15 @@ declare interface HandleModuleCreationOptions {
|
|
3566
3604
|
}
|
3567
3605
|
declare class Hash {
|
3568
3606
|
constructor();
|
3607
|
+
|
3608
|
+
/**
|
3609
|
+
* Update hash {@link https://nodejs.org/api/crypto.html#crypto_hash_update_data_inputencoding}
|
3610
|
+
*/
|
3569
3611
|
update(data: string | Buffer, inputEncoding?: string): Hash;
|
3612
|
+
|
3613
|
+
/**
|
3614
|
+
* Calculates the digest {@link https://nodejs.org/api/crypto.html#crypto_hash_digest_encoding}
|
3615
|
+
*/
|
3570
3616
|
digest(encoding?: string): string | Buffer;
|
3571
3617
|
}
|
3572
3618
|
type HashFunction = string | typeof Hash;
|
@@ -5551,7 +5597,7 @@ declare class NormalModule extends Module {
|
|
5551
5597
|
}
|
5552
5598
|
declare interface NormalModuleCompilationHooks {
|
5553
5599
|
loader: SyncHook<[any, NormalModule], void>;
|
5554
|
-
beforeLoaders: SyncHook<[LoaderItem[],
|
5600
|
+
beforeLoaders: SyncHook<[LoaderItem[], NormalModule, any], void>;
|
5555
5601
|
readResourceForScheme: HookMap<
|
5556
5602
|
AsyncSeriesBailHook<[string, NormalModule], string | Buffer>
|
5557
5603
|
>;
|
@@ -10254,6 +10300,7 @@ declare namespace exports {
|
|
10254
10300
|
export let createFileSerializer: (fs?: any) => Serializer;
|
10255
10301
|
export { MEASURE_START_OPERATION, MEASURE_END_OPERATION };
|
10256
10302
|
}
|
10303
|
+
export const cleverMerge: <T, O>(first: T, second: O) => T & O;
|
10257
10304
|
}
|
10258
10305
|
export namespace sources {
|
10259
10306
|
export {
|
@@ -10295,6 +10342,8 @@ declare namespace exports {
|
|
10295
10342
|
Dependency,
|
10296
10343
|
DllPlugin,
|
10297
10344
|
DllReferencePlugin,
|
10345
|
+
DynamicEntryPlugin,
|
10346
|
+
EntryOptionPlugin,
|
10298
10347
|
EntryPlugin,
|
10299
10348
|
EnvironmentPlugin,
|
10300
10349
|
EvalDevToolModulePlugin,
|