webpack 5.58.0 → 5.58.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of webpack might be problematic. Click here for more details.
- package/lib/ChunkGraph.js +16 -30
- package/lib/Compilation.js +1 -1
- package/lib/NormalModuleFactory.js +57 -55
- package/lib/optimize/SplitChunksPlugin.js +235 -135
- package/lib/wasm-sync/WebAssemblyInInitialChunkError.js +1 -6
- package/package.json +1 -1
- package/types.d.ts +0 -12
- package/lib/ChunkCombination.js +0 -187
package/lib/ChunkGraph.js
CHANGED
@@ -6,7 +6,6 @@
|
|
6
6
|
"use strict";
|
7
7
|
|
8
8
|
const util = require("util");
|
9
|
-
const ChunkCombination = require("./ChunkCombination");
|
10
9
|
const Entrypoint = require("./Entrypoint");
|
11
10
|
const ModuleGraphConnection = require("./ModuleGraphConnection");
|
12
11
|
const { first } = require("./util/SetHelpers");
|
@@ -41,8 +40,6 @@ const {
|
|
41
40
|
/** @type {ReadonlySet<string>} */
|
42
41
|
const EMPTY_SET = new Set();
|
43
42
|
|
44
|
-
const EMPTY_RUNTIME_SPEC_SET = new RuntimeSpecSet();
|
45
|
-
|
46
43
|
const ZERO_BIG_INT = BigInt(0);
|
47
44
|
|
48
45
|
const compareModuleIterables = compareIterables(compareModulesByIdentifier);
|
@@ -180,7 +177,8 @@ const isAvailableChunk = (a, b) => {
|
|
180
177
|
|
181
178
|
class ChunkGraphModule {
|
182
179
|
constructor() {
|
183
|
-
|
180
|
+
/** @type {SortableSet<Chunk>} */
|
181
|
+
this.chunks = new SortableSet();
|
184
182
|
/** @type {Set<Chunk> | undefined} */
|
185
183
|
this.entryInChunks = undefined;
|
186
184
|
/** @type {Set<Chunk> | undefined} */
|
@@ -303,7 +301,7 @@ class ChunkGraph {
|
|
303
301
|
connectChunkAndModule(chunk, module) {
|
304
302
|
const cgm = this._getChunkGraphModule(module);
|
305
303
|
const cgc = this._getChunkGraphChunk(chunk);
|
306
|
-
cgm.
|
304
|
+
cgm.chunks.add(chunk);
|
307
305
|
cgc.modules.add(module);
|
308
306
|
}
|
309
307
|
|
@@ -316,7 +314,7 @@ class ChunkGraph {
|
|
316
314
|
const cgm = this._getChunkGraphModule(module);
|
317
315
|
const cgc = this._getChunkGraphChunk(chunk);
|
318
316
|
cgc.modules.delete(module);
|
319
|
-
cgm.
|
317
|
+
cgm.chunks.delete(chunk);
|
320
318
|
}
|
321
319
|
|
322
320
|
/**
|
@@ -327,7 +325,7 @@ class ChunkGraph {
|
|
327
325
|
const cgc = this._getChunkGraphChunk(chunk);
|
328
326
|
for (const module of cgc.modules) {
|
329
327
|
const cgm = this._getChunkGraphModule(module);
|
330
|
-
cgm.
|
328
|
+
cgm.chunks.delete(chunk);
|
331
329
|
}
|
332
330
|
cgc.modules.clear();
|
333
331
|
chunk.disconnectFromGroups();
|
@@ -394,13 +392,13 @@ class ChunkGraph {
|
|
394
392
|
const oldCgm = this._getChunkGraphModule(oldModule);
|
395
393
|
const newCgm = this._getChunkGraphModule(newModule);
|
396
394
|
|
397
|
-
for (const chunk of oldCgm.
|
395
|
+
for (const chunk of oldCgm.chunks) {
|
398
396
|
const cgc = this._getChunkGraphChunk(chunk);
|
399
397
|
cgc.modules.delete(oldModule);
|
400
398
|
cgc.modules.add(newModule);
|
401
|
-
newCgm.
|
399
|
+
newCgm.chunks.add(chunk);
|
402
400
|
}
|
403
|
-
oldCgm.
|
401
|
+
oldCgm.chunks.clear();
|
404
402
|
|
405
403
|
if (oldCgm.entryInChunks !== undefined) {
|
406
404
|
if (newCgm.entryInChunks === undefined) {
|
@@ -487,22 +485,13 @@ class ChunkGraph {
|
|
487
485
|
return cgm.entryInChunks !== undefined;
|
488
486
|
}
|
489
487
|
|
490
|
-
/**
|
491
|
-
* @param {Module} module the module
|
492
|
-
* @returns {ChunkCombination} chunk combination (do not modify)
|
493
|
-
*/
|
494
|
-
getModuleChunkCombination(module) {
|
495
|
-
const cgm = this._getChunkGraphModule(module);
|
496
|
-
return cgm.chunkCombination;
|
497
|
-
}
|
498
|
-
|
499
488
|
/**
|
500
489
|
* @param {Module} module the module
|
501
490
|
* @returns {Iterable<Chunk>} iterable of chunks (do not modify)
|
502
491
|
*/
|
503
492
|
getModuleChunksIterable(module) {
|
504
493
|
const cgm = this._getChunkGraphModule(module);
|
505
|
-
return cgm.
|
494
|
+
return cgm.chunks;
|
506
495
|
}
|
507
496
|
|
508
497
|
/**
|
@@ -512,9 +501,8 @@ class ChunkGraph {
|
|
512
501
|
*/
|
513
502
|
getOrderedModuleChunksIterable(module, sortFn) {
|
514
503
|
const cgm = this._getChunkGraphModule(module);
|
515
|
-
|
516
|
-
chunks
|
517
|
-
return chunks;
|
504
|
+
cgm.chunks.sortWith(sortFn);
|
505
|
+
return cgm.chunks;
|
518
506
|
}
|
519
507
|
|
520
508
|
/**
|
@@ -523,7 +511,7 @@ class ChunkGraph {
|
|
523
511
|
*/
|
524
512
|
getModuleChunks(module) {
|
525
513
|
const cgm = this._getChunkGraphModule(module);
|
526
|
-
return cgm.
|
514
|
+
return cgm.chunks.getFromCache(getArray);
|
527
515
|
}
|
528
516
|
|
529
517
|
/**
|
@@ -532,7 +520,7 @@ class ChunkGraph {
|
|
532
520
|
*/
|
533
521
|
getNumberOfModuleChunks(module) {
|
534
522
|
const cgm = this._getChunkGraphModule(module);
|
535
|
-
return cgm.
|
523
|
+
return cgm.chunks.size;
|
536
524
|
}
|
537
525
|
|
538
526
|
/**
|
@@ -541,10 +529,7 @@ class ChunkGraph {
|
|
541
529
|
*/
|
542
530
|
getModuleRuntimes(module) {
|
543
531
|
const cgm = this._getChunkGraphModule(module);
|
544
|
-
|
545
|
-
return cgm.chunkCombination._chunks.getFromUnorderedCache(
|
546
|
-
getModuleRuntimes
|
547
|
-
);
|
532
|
+
return cgm.chunks.getFromUnorderedCache(getModuleRuntimes);
|
548
533
|
}
|
549
534
|
|
550
535
|
/**
|
@@ -908,7 +893,8 @@ class ChunkGraph {
|
|
908
893
|
// Merge runtime
|
909
894
|
chunkA.runtime = mergeRuntime(chunkA.runtime, chunkB.runtime);
|
910
895
|
|
911
|
-
|
896
|
+
// getChunkModules is used here to create a clone, because disconnectChunkAndModule modifies
|
897
|
+
for (const module of this.getChunkModules(chunkB)) {
|
912
898
|
this.disconnectChunkAndModule(chunkB, module);
|
913
899
|
this.connectChunkAndModule(chunkA, module);
|
914
900
|
}
|
package/lib/Compilation.js
CHANGED
@@ -3633,7 +3633,7 @@ Or do you want to use the entrypoints '${name}' and '${runtime}' independently o
|
|
3633
3633
|
this.moduleGraph.removeConnection(dep);
|
3634
3634
|
|
3635
3635
|
if (this.chunkGraph) {
|
3636
|
-
for (const chunk of this.chunkGraph.
|
3636
|
+
for (const chunk of this.chunkGraph.getModuleChunks(
|
3637
3637
|
originalModule
|
3638
3638
|
)) {
|
3639
3639
|
this.patchChunksAfterReasonRemoval(originalModule, chunk);
|
@@ -466,49 +466,68 @@ class NormalModuleFactory extends ModuleFactory {
|
|
466
466
|
: "") +
|
467
467
|
stringifyLoadersAndResource(loaders, resourceData.resource);
|
468
468
|
|
469
|
-
const resourceDataForRules = matchResourceData || resourceData;
|
470
|
-
const result = this.ruleSet.exec({
|
471
|
-
resource: resourceDataForRules.path,
|
472
|
-
realResource: resourceData.path,
|
473
|
-
resourceQuery: resourceDataForRules.query,
|
474
|
-
resourceFragment: resourceDataForRules.fragment,
|
475
|
-
scheme,
|
476
|
-
assertions,
|
477
|
-
mimetype: matchResourceData ? "" : resourceData.data.mimetype || "",
|
478
|
-
dependency: dependencyType,
|
479
|
-
descriptionData: matchResourceData
|
480
|
-
? undefined
|
481
|
-
: resourceData.data.descriptionFileData,
|
482
|
-
issuer: contextInfo.issuer,
|
483
|
-
compiler: contextInfo.compiler,
|
484
|
-
issuerLayer: contextInfo.issuerLayer || ""
|
485
|
-
});
|
486
469
|
const settings = {};
|
487
470
|
const useLoadersPost = [];
|
488
471
|
const useLoaders = [];
|
489
472
|
const useLoadersPre = [];
|
490
|
-
|
491
|
-
|
492
|
-
|
493
|
-
|
494
|
-
|
495
|
-
|
496
|
-
|
497
|
-
|
498
|
-
|
499
|
-
|
500
|
-
|
501
|
-
|
473
|
+
|
474
|
+
// handle .webpack[] suffix
|
475
|
+
let resource;
|
476
|
+
let match;
|
477
|
+
if (
|
478
|
+
matchResourceData &&
|
479
|
+
typeof (resource = matchResourceData.resource) === "string" &&
|
480
|
+
(match = /\.webpack\[([^\]]+)\]$/.exec(resource))
|
481
|
+
) {
|
482
|
+
settings.type = match[1];
|
483
|
+
matchResourceData.resource = matchResourceData.resource.slice(
|
484
|
+
0,
|
485
|
+
-settings.type.length - 10
|
486
|
+
);
|
487
|
+
} else {
|
488
|
+
settings.type = "javascript/auto";
|
489
|
+
const resourceDataForRules = matchResourceData || resourceData;
|
490
|
+
const result = this.ruleSet.exec({
|
491
|
+
resource: resourceDataForRules.path,
|
492
|
+
realResource: resourceData.path,
|
493
|
+
resourceQuery: resourceDataForRules.query,
|
494
|
+
resourceFragment: resourceDataForRules.fragment,
|
495
|
+
scheme,
|
496
|
+
assertions,
|
497
|
+
mimetype: matchResourceData
|
498
|
+
? ""
|
499
|
+
: resourceData.data.mimetype || "",
|
500
|
+
dependency: dependencyType,
|
501
|
+
descriptionData: matchResourceData
|
502
|
+
? undefined
|
503
|
+
: resourceData.data.descriptionFileData,
|
504
|
+
issuer: contextInfo.issuer,
|
505
|
+
compiler: contextInfo.compiler,
|
506
|
+
issuerLayer: contextInfo.issuerLayer || ""
|
507
|
+
});
|
508
|
+
for (const r of result) {
|
509
|
+
if (r.type === "use") {
|
510
|
+
if (!noAutoLoaders && !noPrePostAutoLoaders) {
|
511
|
+
useLoaders.push(r.value);
|
512
|
+
}
|
513
|
+
} else if (r.type === "use-post") {
|
514
|
+
if (!noPrePostAutoLoaders) {
|
515
|
+
useLoadersPost.push(r.value);
|
516
|
+
}
|
517
|
+
} else if (r.type === "use-pre") {
|
518
|
+
if (!noPreAutoLoaders && !noPrePostAutoLoaders) {
|
519
|
+
useLoadersPre.push(r.value);
|
520
|
+
}
|
521
|
+
} else if (
|
522
|
+
typeof r.value === "object" &&
|
523
|
+
r.value !== null &&
|
524
|
+
typeof settings[r.type] === "object" &&
|
525
|
+
settings[r.type] !== null
|
526
|
+
) {
|
527
|
+
settings[r.type] = cachedCleverMerge(settings[r.type], r.value);
|
528
|
+
} else {
|
529
|
+
settings[r.type] = r.value;
|
502
530
|
}
|
503
|
-
} else if (
|
504
|
-
typeof r.value === "object" &&
|
505
|
-
r.value !== null &&
|
506
|
-
typeof settings[r.type] === "object" &&
|
507
|
-
settings[r.type] !== null
|
508
|
-
) {
|
509
|
-
settings[r.type] = cachedCleverMerge(settings[r.type], r.value);
|
510
|
-
} else {
|
511
|
-
settings[r.type] = r.value;
|
512
531
|
}
|
513
532
|
}
|
514
533
|
|
@@ -528,23 +547,6 @@ class NormalModuleFactory extends ModuleFactory {
|
|
528
547
|
}
|
529
548
|
for (const loader of preLoaders) allLoaders.push(loader);
|
530
549
|
let type = settings.type;
|
531
|
-
if (!type) {
|
532
|
-
let resource;
|
533
|
-
let match;
|
534
|
-
if (
|
535
|
-
matchResourceData &&
|
536
|
-
typeof (resource = matchResourceData.resource) === "string" &&
|
537
|
-
(match = /\.webpack\[([^\]]+)\]$/.exec(resource))
|
538
|
-
) {
|
539
|
-
type = match[1];
|
540
|
-
matchResourceData.resource = matchResourceData.resource.slice(
|
541
|
-
0,
|
542
|
-
-type.length - 10
|
543
|
-
);
|
544
|
-
} else {
|
545
|
-
type = "javascript/auto";
|
546
|
-
}
|
547
|
-
}
|
548
550
|
const resolveOptions = settings.resolve;
|
549
551
|
const layer = settings.layer;
|
550
552
|
if (layer !== undefined && !layers) {
|
@@ -5,10 +5,11 @@
|
|
5
5
|
|
6
6
|
"use strict";
|
7
7
|
|
8
|
-
const
|
8
|
+
const Chunk = require("../Chunk");
|
9
9
|
const { STAGE_ADVANCED } = require("../OptimizationStages");
|
10
10
|
const WebpackError = require("../WebpackError");
|
11
11
|
const { requestToId } = require("../ids/IdHelpers");
|
12
|
+
const { isSubset } = require("../util/SetHelpers");
|
12
13
|
const SortableSet = require("../util/SortableSet");
|
13
14
|
const {
|
14
15
|
compareModulesByIdentifier,
|
@@ -25,7 +26,6 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
|
25
26
|
/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksOptions} OptimizationSplitChunksOptions */
|
26
27
|
/** @typedef {import("../../declarations/WebpackOptions").OptimizationSplitChunksSizes} OptimizationSplitChunksSizes */
|
27
28
|
/** @typedef {import("../../declarations/WebpackOptions").Output} OutputOptions */
|
28
|
-
/** @typedef {import("../Chunk")} Chunk */
|
29
29
|
/** @typedef {import("../ChunkGraph")} ChunkGraph */
|
30
30
|
/** @typedef {import("../ChunkGroup")} ChunkGroup */
|
31
31
|
/** @typedef {import("../Compilation").AssetInfo} AssetInfo */
|
@@ -155,9 +155,9 @@ const MinMaxSizeWarning = require("./MinMaxSizeWarning");
|
|
155
155
|
* @property {number} cacheGroupIndex
|
156
156
|
* @property {string} name
|
157
157
|
* @property {Record<string, number>} sizes
|
158
|
-
* @property {
|
158
|
+
* @property {Set<Chunk>} chunks
|
159
159
|
* @property {Set<Chunk>} reuseableChunks
|
160
|
-
* @property {Set<
|
160
|
+
* @property {Set<bigint | Chunk>} chunksKeys
|
161
161
|
*/
|
162
162
|
|
163
163
|
const defaultGetName = /** @type {GetName} */ (() => {});
|
@@ -204,6 +204,19 @@ const mapObject = (obj, fn) => {
|
|
204
204
|
return newObj;
|
205
205
|
};
|
206
206
|
|
207
|
+
/**
|
208
|
+
* @template T
|
209
|
+
* @param {Set<T>} a set
|
210
|
+
* @param {Set<T>} b other set
|
211
|
+
* @returns {boolean} true if at least one item of a is in b
|
212
|
+
*/
|
213
|
+
const isOverlap = (a, b) => {
|
214
|
+
for (const item of a) {
|
215
|
+
if (b.has(item)) return true;
|
216
|
+
}
|
217
|
+
return false;
|
218
|
+
};
|
219
|
+
|
207
220
|
const compareModuleIterables = compareIterables(compareModulesByIdentifier);
|
208
221
|
|
209
222
|
/**
|
@@ -756,132 +769,207 @@ module.exports = class SplitChunksPlugin {
|
|
756
769
|
logger.time("prepare");
|
757
770
|
const chunkGraph = compilation.chunkGraph;
|
758
771
|
const moduleGraph = compilation.moduleGraph;
|
772
|
+
// Give each selected chunk an index (to create strings from chunks)
|
773
|
+
/** @type {Map<Chunk, bigint>} */
|
774
|
+
const chunkIndexMap = new Map();
|
775
|
+
const ZERO = BigInt("0");
|
776
|
+
const ONE = BigInt("1");
|
777
|
+
let index = ONE;
|
778
|
+
for (const chunk of chunks) {
|
779
|
+
chunkIndexMap.set(chunk, index);
|
780
|
+
index = index << ONE;
|
781
|
+
}
|
782
|
+
/**
|
783
|
+
* @param {Iterable<Chunk>} chunks list of chunks
|
784
|
+
* @returns {bigint | Chunk} key of the chunks
|
785
|
+
*/
|
786
|
+
const getKey = chunks => {
|
787
|
+
const iterator = chunks[Symbol.iterator]();
|
788
|
+
let result = iterator.next();
|
789
|
+
if (result.done) return ZERO;
|
790
|
+
const first = result.value;
|
791
|
+
result = iterator.next();
|
792
|
+
if (result.done) return first;
|
793
|
+
let key =
|
794
|
+
chunkIndexMap.get(first) | chunkIndexMap.get(result.value);
|
795
|
+
while (!(result = iterator.next()).done) {
|
796
|
+
key = key | chunkIndexMap.get(result.value);
|
797
|
+
}
|
798
|
+
return key;
|
799
|
+
};
|
800
|
+
const keyToString = key => {
|
801
|
+
if (typeof key === "bigint") return key.toString(16);
|
802
|
+
return chunkIndexMap.get(key).toString(16);
|
803
|
+
};
|
759
804
|
|
760
|
-
const
|
761
|
-
/** @type {Set<
|
762
|
-
const
|
805
|
+
const getChunkSetsInGraph = memoize(() => {
|
806
|
+
/** @type {Map<bigint, Set<Chunk>>} */
|
807
|
+
const chunkSetsInGraph = new Map();
|
808
|
+
/** @type {Set<Chunk>} */
|
809
|
+
const singleChunkSets = new Set();
|
763
810
|
for (const module of compilation.modules) {
|
764
|
-
const
|
765
|
-
|
766
|
-
|
811
|
+
const chunks = chunkGraph.getModuleChunksIterable(module);
|
812
|
+
const chunksKey = getKey(chunks);
|
813
|
+
if (typeof chunksKey === "bigint") {
|
814
|
+
if (!chunkSetsInGraph.has(chunksKey)) {
|
815
|
+
chunkSetsInGraph.set(chunksKey, new Set(chunks));
|
816
|
+
}
|
817
|
+
} else {
|
818
|
+
singleChunkSets.add(chunksKey);
|
819
|
+
}
|
767
820
|
}
|
768
|
-
return
|
821
|
+
return { chunkSetsInGraph, singleChunkSets };
|
769
822
|
});
|
770
823
|
|
771
824
|
/**
|
772
825
|
* @param {Module} module the module
|
773
|
-
* @returns {Iterable<
|
826
|
+
* @returns {Iterable<Chunk[]>} groups of chunks with equal exports
|
774
827
|
*/
|
775
828
|
const groupChunksByExports = module => {
|
776
829
|
const exportsInfo = moduleGraph.getExportsInfo(module);
|
777
830
|
const groupedByUsedExports = new Map();
|
778
831
|
for (const chunk of chunkGraph.getModuleChunksIterable(module)) {
|
779
832
|
const key = exportsInfo.getUsageKey(chunk.runtime);
|
780
|
-
const
|
781
|
-
|
782
|
-
|
833
|
+
const list = groupedByUsedExports.get(key);
|
834
|
+
if (list !== undefined) {
|
835
|
+
list.push(chunk);
|
836
|
+
} else {
|
837
|
+
groupedByUsedExports.set(key, [chunk]);
|
838
|
+
}
|
783
839
|
}
|
784
840
|
return groupedByUsedExports.values();
|
785
841
|
};
|
786
842
|
|
787
|
-
/** @type {Map<Module, Iterable<
|
843
|
+
/** @type {Map<Module, Iterable<Chunk[]>>} */
|
788
844
|
const groupedByExportsMap = new Map();
|
789
845
|
|
790
|
-
const
|
791
|
-
/** @type {Set<
|
792
|
-
const
|
846
|
+
const getExportsChunkSetsInGraph = memoize(() => {
|
847
|
+
/** @type {Map<bigint, Set<Chunk>>} */
|
848
|
+
const chunkSetsInGraph = new Map();
|
849
|
+
/** @type {Set<Chunk>} */
|
850
|
+
const singleChunkSets = new Set();
|
793
851
|
for (const module of compilation.modules) {
|
794
852
|
const groupedChunks = Array.from(groupChunksByExports(module));
|
795
853
|
groupedByExportsMap.set(module, groupedChunks);
|
796
|
-
for (const
|
797
|
-
|
854
|
+
for (const chunks of groupedChunks) {
|
855
|
+
if (chunks.length === 1) {
|
856
|
+
singleChunkSets.add(chunks[0]);
|
857
|
+
} else {
|
858
|
+
const chunksKey = /** @type {bigint} */ (getKey(chunks));
|
859
|
+
if (!chunkSetsInGraph.has(chunksKey)) {
|
860
|
+
chunkSetsInGraph.set(chunksKey, new Set(chunks));
|
861
|
+
}
|
862
|
+
}
|
798
863
|
}
|
799
864
|
}
|
800
|
-
return
|
865
|
+
return { chunkSetsInGraph, singleChunkSets };
|
801
866
|
});
|
802
867
|
|
803
868
|
// group these set of chunks by count
|
804
869
|
// to allow to check less sets via isSubset
|
805
870
|
// (only smaller sets can be subset)
|
806
|
-
const
|
807
|
-
/** @type {Map<number,
|
808
|
-
const
|
809
|
-
for (const chunksSet of
|
871
|
+
const groupChunkSetsByCount = chunkSets => {
|
872
|
+
/** @type {Map<number, Array<Set<Chunk>>>} */
|
873
|
+
const chunkSetsByCount = new Map();
|
874
|
+
for (const chunksSet of chunkSets) {
|
810
875
|
const count = chunksSet.size;
|
811
|
-
let array =
|
876
|
+
let array = chunkSetsByCount.get(count);
|
812
877
|
if (array === undefined) {
|
813
878
|
array = [];
|
814
|
-
|
879
|
+
chunkSetsByCount.set(count, array);
|
815
880
|
}
|
816
881
|
array.push(chunksSet);
|
817
882
|
}
|
818
|
-
return
|
883
|
+
return chunkSetsByCount;
|
819
884
|
};
|
820
|
-
const
|
821
|
-
|
885
|
+
const getChunkSetsByCount = memoize(() =>
|
886
|
+
groupChunkSetsByCount(
|
887
|
+
getChunkSetsInGraph().chunkSetsInGraph.values()
|
888
|
+
)
|
822
889
|
);
|
823
|
-
const
|
824
|
-
|
890
|
+
const getExportsChunkSetsByCount = memoize(() =>
|
891
|
+
groupChunkSetsByCount(
|
892
|
+
getExportsChunkSetsInGraph().chunkSetsInGraph.values()
|
893
|
+
)
|
825
894
|
);
|
826
895
|
|
827
|
-
|
828
|
-
|
829
|
-
|
830
|
-
|
831
|
-
|
832
|
-
|
833
|
-
/** @type {Map<
|
896
|
+
// Create a list of possible combinations
|
897
|
+
const createGetCombinations = (
|
898
|
+
chunkSets,
|
899
|
+
singleChunkSets,
|
900
|
+
chunkSetsByCount
|
901
|
+
) => {
|
902
|
+
/** @type {Map<bigint | Chunk, (Set<Chunk> | Chunk)[]>} */
|
834
903
|
const combinationsCache = new Map();
|
835
904
|
|
836
|
-
|
837
|
-
|
838
|
-
* @returns {ChunkCombination[]} combinations
|
839
|
-
*/
|
840
|
-
return chunkCombination => {
|
841
|
-
const cacheEntry = combinationsCache.get(chunkCombination);
|
905
|
+
return key => {
|
906
|
+
const cacheEntry = combinationsCache.get(key);
|
842
907
|
if (cacheEntry !== undefined) return cacheEntry;
|
843
|
-
if (
|
844
|
-
const result = [
|
845
|
-
combinationsCache.set(
|
908
|
+
if (key instanceof Chunk) {
|
909
|
+
const result = [key];
|
910
|
+
combinationsCache.set(key, result);
|
846
911
|
return result;
|
847
912
|
}
|
848
|
-
|
849
|
-
|
850
|
-
|
913
|
+
const chunksSet = chunkSets.get(key);
|
914
|
+
/** @type {(Set<Chunk> | Chunk)[]} */
|
915
|
+
const array = [chunksSet];
|
916
|
+
for (const [count, setArray] of chunkSetsByCount) {
|
851
917
|
// "equal" is not needed because they would have been merge in the first step
|
852
|
-
if (count <
|
918
|
+
if (count < chunksSet.size) {
|
853
919
|
for (const set of setArray) {
|
854
|
-
if (
|
920
|
+
if (isSubset(chunksSet, set)) {
|
855
921
|
array.push(set);
|
856
922
|
}
|
857
923
|
}
|
858
924
|
}
|
859
925
|
}
|
860
|
-
|
926
|
+
for (const chunk of singleChunkSets) {
|
927
|
+
if (chunksSet.has(chunk)) {
|
928
|
+
array.push(chunk);
|
929
|
+
}
|
930
|
+
}
|
931
|
+
combinationsCache.set(key, array);
|
861
932
|
return array;
|
862
933
|
};
|
863
934
|
};
|
864
935
|
|
865
936
|
const getCombinationsFactory = memoize(() => {
|
866
|
-
|
937
|
+
const { chunkSetsInGraph, singleChunkSets } = getChunkSetsInGraph();
|
938
|
+
return createGetCombinations(
|
939
|
+
chunkSetsInGraph,
|
940
|
+
singleChunkSets,
|
941
|
+
getChunkSetsByCount()
|
942
|
+
);
|
867
943
|
});
|
868
944
|
const getCombinations = key => getCombinationsFactory()(key);
|
869
945
|
|
870
946
|
const getExportsCombinationsFactory = memoize(() => {
|
871
|
-
|
947
|
+
const { chunkSetsInGraph, singleChunkSets } =
|
948
|
+
getExportsChunkSetsInGraph();
|
949
|
+
return createGetCombinations(
|
950
|
+
chunkSetsInGraph,
|
951
|
+
singleChunkSets,
|
952
|
+
getExportsChunkSetsByCount()
|
953
|
+
);
|
872
954
|
});
|
873
955
|
const getExportsCombinations = key =>
|
874
956
|
getExportsCombinationsFactory()(key);
|
875
957
|
|
876
|
-
/**
|
958
|
+
/**
|
959
|
+
* @typedef {Object} SelectedChunksResult
|
960
|
+
* @property {Chunk[]} chunks the list of chunks
|
961
|
+
* @property {bigint | Chunk} key a key of the list
|
962
|
+
*/
|
963
|
+
|
964
|
+
/** @type {WeakMap<Set<Chunk> | Chunk, WeakMap<ChunkFilterFunction, SelectedChunksResult>>} */
|
877
965
|
const selectedChunksCacheByChunksSet = new WeakMap();
|
878
966
|
|
879
967
|
/**
|
880
|
-
* get
|
968
|
+
* get list and key by applying the filter function to the list
|
881
969
|
* It is cached for performance reasons
|
882
|
-
* @param {
|
970
|
+
* @param {Set<Chunk> | Chunk} chunks list of chunks
|
883
971
|
* @param {ChunkFilterFunction} chunkFilter filter function for chunks
|
884
|
-
* @returns {
|
972
|
+
* @returns {SelectedChunksResult} list and key
|
885
973
|
*/
|
886
974
|
const getSelectedChunks = (chunks, chunkFilter) => {
|
887
975
|
let entry = selectedChunksCacheByChunksSet.get(chunks);
|
@@ -889,16 +977,22 @@ module.exports = class SplitChunksPlugin {
|
|
889
977
|
entry = new WeakMap();
|
890
978
|
selectedChunksCacheByChunksSet.set(chunks, entry);
|
891
979
|
}
|
892
|
-
/** @type {
|
980
|
+
/** @type {SelectedChunksResult} */
|
893
981
|
let entry2 = entry.get(chunkFilter);
|
894
982
|
if (entry2 === undefined) {
|
895
|
-
/** @type {
|
896
|
-
|
897
|
-
|
898
|
-
if (chunkFilter(
|
899
|
-
|
983
|
+
/** @type {Chunk[]} */
|
984
|
+
const selectedChunks = [];
|
985
|
+
if (chunks instanceof Chunk) {
|
986
|
+
if (chunkFilter(chunks)) selectedChunks.push(chunks);
|
987
|
+
} else {
|
988
|
+
for (const chunk of chunks) {
|
989
|
+
if (chunkFilter(chunk)) selectedChunks.push(chunk);
|
990
|
+
}
|
900
991
|
}
|
901
|
-
entry2 =
|
992
|
+
entry2 = {
|
993
|
+
chunks: selectedChunks,
|
994
|
+
key: getKey(selectedChunks)
|
995
|
+
};
|
902
996
|
entry.set(chunkFilter, entry2);
|
903
997
|
}
|
904
998
|
return entry2;
|
@@ -917,7 +1011,8 @@ module.exports = class SplitChunksPlugin {
|
|
917
1011
|
/**
|
918
1012
|
* @param {CacheGroup} cacheGroup the current cache group
|
919
1013
|
* @param {number} cacheGroupIndex the index of the cache group of ordering
|
920
|
-
* @param {
|
1014
|
+
* @param {Chunk[]} selectedChunks chunks selected for this module
|
1015
|
+
* @param {bigint | Chunk} selectedChunksKey a key of selectedChunks
|
921
1016
|
* @param {Module} module the current module
|
922
1017
|
* @returns {void}
|
923
1018
|
*/
|
@@ -925,20 +1020,25 @@ module.exports = class SplitChunksPlugin {
|
|
925
1020
|
cacheGroup,
|
926
1021
|
cacheGroupIndex,
|
927
1022
|
selectedChunks,
|
1023
|
+
selectedChunksKey,
|
928
1024
|
module
|
929
1025
|
) => {
|
930
1026
|
// Break if minimum number of chunks is not reached
|
931
|
-
if (selectedChunks.
|
1027
|
+
if (selectedChunks.length < cacheGroup.minChunks) return;
|
932
1028
|
// Determine name for split chunk
|
933
1029
|
const name = cacheGroup.getName(
|
934
1030
|
module,
|
935
|
-
selectedChunks
|
1031
|
+
selectedChunks,
|
936
1032
|
cacheGroup.key
|
937
1033
|
);
|
938
1034
|
// Check if the name is ok
|
939
1035
|
const existingChunk = compilation.namedChunks.get(name);
|
940
1036
|
if (existingChunk) {
|
941
|
-
const parentValidationKey = `${name}|${
|
1037
|
+
const parentValidationKey = `${name}|${
|
1038
|
+
typeof selectedChunksKey === "bigint"
|
1039
|
+
? selectedChunksKey
|
1040
|
+
: selectedChunksKey.debugId
|
1041
|
+
}`;
|
942
1042
|
const valid = alreadyValidatedParents.get(parentValidationKey);
|
943
1043
|
if (valid === false) return;
|
944
1044
|
if (valid === undefined) {
|
@@ -947,7 +1047,7 @@ module.exports = class SplitChunksPlugin {
|
|
947
1047
|
let isInAllParents = true;
|
948
1048
|
/** @type {Set<ChunkGroup>} */
|
949
1049
|
const queue = new Set();
|
950
|
-
for (const chunk of selectedChunks
|
1050
|
+
for (const chunk of selectedChunks) {
|
951
1051
|
for (const group of chunk.groupsIterable) {
|
952
1052
|
queue.add(group);
|
953
1053
|
}
|
@@ -993,7 +1093,9 @@ module.exports = class SplitChunksPlugin {
|
|
993
1093
|
// This automatically merges equal names
|
994
1094
|
const key =
|
995
1095
|
cacheGroup.key +
|
996
|
-
(name
|
1096
|
+
(name
|
1097
|
+
? ` name:${name}`
|
1098
|
+
: ` chunks:${keyToString(selectedChunksKey)}`);
|
997
1099
|
// Add module to maps
|
998
1100
|
let info = chunksInfoMap.get(key);
|
999
1101
|
if (info === undefined) {
|
@@ -1008,9 +1110,9 @@ module.exports = class SplitChunksPlugin {
|
|
1008
1110
|
cacheGroupIndex,
|
1009
1111
|
name,
|
1010
1112
|
sizes: {},
|
1011
|
-
chunks:
|
1113
|
+
chunks: new Set(),
|
1012
1114
|
reuseableChunks: new Set(),
|
1013
|
-
|
1115
|
+
chunksKeys: new Set()
|
1014
1116
|
})
|
1015
1117
|
);
|
1016
1118
|
}
|
@@ -1021,10 +1123,12 @@ module.exports = class SplitChunksPlugin {
|
|
1021
1123
|
info.sizes[type] = (info.sizes[type] || 0) + module.size(type);
|
1022
1124
|
}
|
1023
1125
|
}
|
1024
|
-
const oldChunksKeysSize = info.
|
1025
|
-
info.
|
1026
|
-
if (oldChunksKeysSize !== info.
|
1027
|
-
|
1126
|
+
const oldChunksKeysSize = info.chunksKeys.size;
|
1127
|
+
info.chunksKeys.add(selectedChunksKey);
|
1128
|
+
if (oldChunksKeysSize !== info.chunksKeys.size) {
|
1129
|
+
for (const chunk of selectedChunks) {
|
1130
|
+
info.chunks.add(chunk);
|
1131
|
+
}
|
1028
1132
|
}
|
1029
1133
|
};
|
1030
1134
|
|
@@ -1045,56 +1149,50 @@ module.exports = class SplitChunksPlugin {
|
|
1045
1149
|
continue;
|
1046
1150
|
}
|
1047
1151
|
|
1048
|
-
|
1049
|
-
|
1152
|
+
// Prepare some values (usedExports = false)
|
1153
|
+
const getCombs = memoize(() => {
|
1154
|
+
const chunks = chunkGraph.getModuleChunksIterable(module);
|
1155
|
+
const chunksKey = getKey(chunks);
|
1156
|
+
return getCombinations(chunksKey);
|
1157
|
+
});
|
1158
|
+
|
1159
|
+
// Prepare some values (usedExports = true)
|
1160
|
+
const getCombsByUsedExports = memoize(() => {
|
1161
|
+
// fill the groupedByExportsMap
|
1162
|
+
getExportsChunkSetsInGraph();
|
1163
|
+
/** @type {Set<Set<Chunk> | Chunk>} */
|
1164
|
+
const set = new Set();
|
1165
|
+
const groupedByUsedExports = groupedByExportsMap.get(module);
|
1166
|
+
for (const chunks of groupedByUsedExports) {
|
1167
|
+
const chunksKey = getKey(chunks);
|
1168
|
+
for (const comb of getExportsCombinations(chunksKey))
|
1169
|
+
set.add(comb);
|
1170
|
+
}
|
1171
|
+
return set;
|
1172
|
+
});
|
1050
1173
|
|
1051
1174
|
let cacheGroupIndex = 0;
|
1052
1175
|
for (const cacheGroupSource of cacheGroups) {
|
1053
1176
|
const cacheGroup = this._getCacheGroup(cacheGroupSource);
|
1054
1177
|
|
1055
|
-
|
1056
|
-
|
1057
|
-
|
1058
|
-
/** @type {Iterable<ChunkCombination>} */
|
1059
|
-
let combs;
|
1060
|
-
if (cacheGroup.usedExports) {
|
1061
|
-
// fill the groupedByExportsMap
|
1062
|
-
getExportsChunkCombinationsInGraph();
|
1063
|
-
/** @type {Set<ChunkCombination>} */
|
1064
|
-
const set = new Set();
|
1065
|
-
const groupedByUsedExports = groupedByExportsMap.get(module);
|
1066
|
-
for (const chunkCombination of groupedByUsedExports) {
|
1067
|
-
const preSelectedChunks = getSelectedChunks(
|
1068
|
-
chunkCombination,
|
1069
|
-
cacheGroup.chunksFilter
|
1070
|
-
);
|
1071
|
-
// Break if minimum number of chunks is not reached
|
1072
|
-
if (preSelectedChunks.size < cacheGroup.minChunks) continue;
|
1073
|
-
|
1074
|
-
for (const comb of getExportsCombinations(preSelectedChunks))
|
1075
|
-
set.add(comb);
|
1076
|
-
}
|
1077
|
-
combs = set;
|
1078
|
-
} else {
|
1079
|
-
const preSelectedChunks = getSelectedChunks(
|
1080
|
-
chunkCombination,
|
1081
|
-
cacheGroup.chunksFilter
|
1082
|
-
);
|
1083
|
-
// Break if minimum number of chunks is not reached
|
1084
|
-
if (preSelectedChunks.size < cacheGroup.minChunks) continue;
|
1085
|
-
|
1086
|
-
combs = getCombinations(preSelectedChunks);
|
1087
|
-
}
|
1178
|
+
const combs = cacheGroup.usedExports
|
1179
|
+
? getCombsByUsedExports()
|
1180
|
+
: getCombs();
|
1088
1181
|
// For all combination of chunk selection
|
1089
|
-
for (const
|
1182
|
+
for (const chunkCombination of combs) {
|
1090
1183
|
// Break if minimum number of chunks is not reached
|
1091
|
-
const count =
|
1184
|
+
const count =
|
1185
|
+
chunkCombination instanceof Chunk ? 1 : chunkCombination.size;
|
1092
1186
|
if (count < cacheGroup.minChunks) continue;
|
1187
|
+
// Select chunks by configuration
|
1188
|
+
const { chunks: selectedChunks, key: selectedChunksKey } =
|
1189
|
+
getSelectedChunks(chunkCombination, cacheGroup.chunksFilter);
|
1093
1190
|
|
1094
1191
|
addModuleToChunksInfoMap(
|
1095
1192
|
cacheGroup,
|
1096
1193
|
cacheGroupIndex,
|
1097
1194
|
selectedChunks,
|
1195
|
+
selectedChunksKey,
|
1098
1196
|
module
|
1099
1197
|
);
|
1100
1198
|
}
|
@@ -1186,12 +1284,12 @@ module.exports = class SplitChunksPlugin {
|
|
1186
1284
|
const chunkByName = compilation.namedChunks.get(chunkName);
|
1187
1285
|
if (chunkByName !== undefined) {
|
1188
1286
|
newChunk = chunkByName;
|
1189
|
-
const
|
1190
|
-
|
1191
|
-
|
1287
|
+
const oldSize = item.chunks.size;
|
1288
|
+
item.chunks.delete(newChunk);
|
1289
|
+
isExistingChunk = item.chunks.size !== oldSize;
|
1192
1290
|
}
|
1193
1291
|
} else if (item.cacheGroup.reuseExistingChunk) {
|
1194
|
-
outer: for (const chunk of item.chunks
|
1292
|
+
outer: for (const chunk of item.chunks) {
|
1195
1293
|
if (
|
1196
1294
|
chunkGraph.getNumberOfChunkModules(chunk) !==
|
1197
1295
|
item.modules.size
|
@@ -1225,7 +1323,7 @@ module.exports = class SplitChunksPlugin {
|
|
1225
1323
|
}
|
1226
1324
|
}
|
1227
1325
|
if (newChunk) {
|
1228
|
-
item.chunks
|
1326
|
+
item.chunks.delete(newChunk);
|
1229
1327
|
chunkName = undefined;
|
1230
1328
|
isExistingChunk = true;
|
1231
1329
|
isReusedWithAllModules = true;
|
@@ -1236,7 +1334,7 @@ module.exports = class SplitChunksPlugin {
|
|
1236
1334
|
item.cacheGroup._conditionalEnforce &&
|
1237
1335
|
checkMinSize(item.sizes, item.cacheGroup.enforceSizeThreshold);
|
1238
1336
|
|
1239
|
-
|
1337
|
+
const usedChunks = new Set(item.chunks);
|
1240
1338
|
|
1241
1339
|
// Check if maxRequests condition can be fulfilled
|
1242
1340
|
if (
|
@@ -1244,7 +1342,7 @@ module.exports = class SplitChunksPlugin {
|
|
1244
1342
|
(Number.isFinite(item.cacheGroup.maxInitialRequests) ||
|
1245
1343
|
Number.isFinite(item.cacheGroup.maxAsyncRequests))
|
1246
1344
|
) {
|
1247
|
-
for (const chunk of usedChunks
|
1345
|
+
for (const chunk of usedChunks) {
|
1248
1346
|
// respect max requests
|
1249
1347
|
const maxRequests = chunk.isOnlyInitial()
|
1250
1348
|
? item.cacheGroup.maxInitialRequests
|
@@ -1258,28 +1356,30 @@ module.exports = class SplitChunksPlugin {
|
|
1258
1356
|
isFinite(maxRequests) &&
|
1259
1357
|
getRequests(chunk) >= maxRequests
|
1260
1358
|
) {
|
1261
|
-
usedChunks
|
1359
|
+
usedChunks.delete(chunk);
|
1262
1360
|
}
|
1263
1361
|
}
|
1264
1362
|
}
|
1265
1363
|
|
1266
|
-
outer: for (const chunk of usedChunks
|
1364
|
+
outer: for (const chunk of usedChunks) {
|
1267
1365
|
for (const module of item.modules) {
|
1268
1366
|
if (chunkGraph.isModuleInChunk(module, chunk)) continue outer;
|
1269
1367
|
}
|
1270
|
-
usedChunks
|
1368
|
+
usedChunks.delete(chunk);
|
1271
1369
|
}
|
1272
1370
|
|
1273
1371
|
// Were some (invalid) chunks removed from usedChunks?
|
1274
1372
|
// => readd all modules to the queue, as things could have been changed
|
1275
|
-
if (usedChunks
|
1276
|
-
if (isExistingChunk) usedChunks
|
1373
|
+
if (usedChunks.size < item.chunks.size) {
|
1374
|
+
if (isExistingChunk) usedChunks.add(newChunk);
|
1277
1375
|
if (usedChunks.size >= item.cacheGroup.minChunks) {
|
1376
|
+
const chunksArr = Array.from(usedChunks);
|
1278
1377
|
for (const module of item.modules) {
|
1279
1378
|
addModuleToChunksInfoMap(
|
1280
1379
|
item.cacheGroup,
|
1281
1380
|
item.cacheGroupIndex,
|
1282
|
-
|
1381
|
+
chunksArr,
|
1382
|
+
getKey(usedChunks),
|
1283
1383
|
module
|
1284
1384
|
);
|
1285
1385
|
}
|
@@ -1293,7 +1393,7 @@ module.exports = class SplitChunksPlugin {
|
|
1293
1393
|
item.cacheGroup._validateRemainingSize &&
|
1294
1394
|
usedChunks.size === 1
|
1295
1395
|
) {
|
1296
|
-
const [chunk] = usedChunks
|
1396
|
+
const [chunk] = usedChunks;
|
1297
1397
|
let chunkSizes = Object.create(null);
|
1298
1398
|
for (const module of chunkGraph.getChunkModulesIterable(chunk)) {
|
1299
1399
|
if (!item.modules.has(module)) {
|
@@ -1327,7 +1427,7 @@ module.exports = class SplitChunksPlugin {
|
|
1327
1427
|
newChunk = compilation.addChunk(chunkName);
|
1328
1428
|
}
|
1329
1429
|
// Walk through all chunks
|
1330
|
-
for (const chunk of usedChunks
|
1430
|
+
for (const chunk of usedChunks) {
|
1331
1431
|
// Add graph connections for splitted chunk
|
1332
1432
|
chunk.split(newChunk);
|
1333
1433
|
}
|
@@ -1357,14 +1457,14 @@ module.exports = class SplitChunksPlugin {
|
|
1357
1457
|
// Add module to new chunk
|
1358
1458
|
chunkGraph.connectChunkAndModule(newChunk, module);
|
1359
1459
|
// Remove module from used chunks
|
1360
|
-
for (const chunk of usedChunks
|
1460
|
+
for (const chunk of usedChunks) {
|
1361
1461
|
chunkGraph.disconnectChunkAndModule(chunk, module);
|
1362
1462
|
}
|
1363
1463
|
}
|
1364
1464
|
} else {
|
1365
1465
|
// Remove all modules from used chunks
|
1366
1466
|
for (const module of item.modules) {
|
1367
|
-
for (const chunk of usedChunks
|
1467
|
+
for (const chunk of usedChunks) {
|
1368
1468
|
chunkGraph.disconnectChunkAndModule(chunk, module);
|
1369
1469
|
}
|
1370
1470
|
}
|
@@ -1406,7 +1506,7 @@ module.exports = class SplitChunksPlugin {
|
|
1406
1506
|
|
1407
1507
|
// remove all modules from other entries and update size
|
1408
1508
|
for (const [key, info] of chunksInfoMap) {
|
1409
|
-
if (info.chunks
|
1509
|
+
if (isOverlap(info.chunks, usedChunks)) {
|
1410
1510
|
// update modules and total size
|
1411
1511
|
// may remove it from the map when < minSize
|
1412
1512
|
let updated = false;
|
@@ -5,7 +5,6 @@
|
|
5
5
|
"use strict";
|
6
6
|
|
7
7
|
const WebpackError = require("../WebpackError");
|
8
|
-
const { someInIterable } = require("../util/IterableHelpers");
|
9
8
|
|
10
9
|
/** @typedef {import("../ChunkGraph")} ChunkGraph */
|
11
10
|
/** @typedef {import("../Module")} Module */
|
@@ -43,11 +42,7 @@ const getInitialModuleChains = (
|
|
43
42
|
for (const connection of moduleGraph.getIncomingConnections(head)) {
|
44
43
|
const newHead = connection.originModule;
|
45
44
|
if (newHead) {
|
46
|
-
if (
|
47
|
-
!someInIterable(chunkGraph.getModuleChunksIterable(newHead), c =>
|
48
|
-
c.canBeInitial()
|
49
|
-
)
|
50
|
-
)
|
45
|
+
if (!chunkGraph.getModuleChunks(newHead).some(c => c.canBeInitial()))
|
51
46
|
continue;
|
52
47
|
final = false;
|
53
48
|
if (alreadyReferencedModules.has(newHead)) continue;
|
package/package.json
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
{
|
2
2
|
"name": "webpack",
|
3
|
-
"version": "5.58.
|
3
|
+
"version": "5.58.1",
|
4
4
|
"author": "Tobias Koppers @sokra",
|
5
5
|
"description": "Packs CommonJs/AMD modules for the browser. Allows to split your codebase into multiple bundles, which can be loaded on demand. Support loaders to preprocess files, i.e. json, jsx, es7, css, less, ... and your custom stuff.",
|
6
6
|
"license": "MIT",
|
package/types.d.ts
CHANGED
@@ -757,17 +757,6 @@ declare class Chunk {
|
|
757
757
|
filterFn?: (c: Chunk, chunkGraph: ChunkGraph) => boolean
|
758
758
|
): Record<string | number, Record<string, (string | number)[]>>;
|
759
759
|
}
|
760
|
-
declare abstract class ChunkCombination {
|
761
|
-
debugId: number;
|
762
|
-
size: number;
|
763
|
-
readonly chunksIterable: Iterable<Chunk>;
|
764
|
-
with(chunk: Chunk): ChunkCombination;
|
765
|
-
without(chunk: Chunk): ChunkCombination;
|
766
|
-
withAll(other?: any): any;
|
767
|
-
hasSharedChunks(other?: any): boolean;
|
768
|
-
isSubset(other: ChunkCombination): boolean;
|
769
|
-
getChunks(): Chunk[];
|
770
|
-
}
|
771
760
|
declare class ChunkGraph {
|
772
761
|
constructor(moduleGraph: ModuleGraph, hashFunction?: string | typeof Hash);
|
773
762
|
moduleGraph: ModuleGraph;
|
@@ -785,7 +774,6 @@ declare class ChunkGraph {
|
|
785
774
|
isModuleInChunk(module: Module, chunk: Chunk): boolean;
|
786
775
|
isModuleInChunkGroup(module: Module, chunkGroup: ChunkGroup): boolean;
|
787
776
|
isEntryModule(module: Module): boolean;
|
788
|
-
getModuleChunkCombination(module: Module): ChunkCombination;
|
789
777
|
getModuleChunksIterable(module: Module): Iterable<Chunk>;
|
790
778
|
getOrderedModuleChunksIterable(
|
791
779
|
module: Module,
|
package/lib/ChunkCombination.js
DELETED
@@ -1,187 +0,0 @@
|
|
1
|
-
/*
|
2
|
-
MIT License http://www.opensource.org/licenses/mit-license.php
|
3
|
-
Author Tobias Koppers @sokra
|
4
|
-
*/
|
5
|
-
|
6
|
-
"use strict";
|
7
|
-
|
8
|
-
const SortableSet = require("./util/SortableSet");
|
9
|
-
|
10
|
-
/** @typedef {import("./Chunk")} Chunk */
|
11
|
-
|
12
|
-
/**
|
13
|
-
* @template T
|
14
|
-
* @param {SortableSet<T>} set the set
|
15
|
-
* @returns {T[]} set as array
|
16
|
-
*/
|
17
|
-
const getArray = set => {
|
18
|
-
return Array.from(set);
|
19
|
-
};
|
20
|
-
|
21
|
-
let debugId = 1;
|
22
|
-
|
23
|
-
class ChunkCombination {
|
24
|
-
constructor() {
|
25
|
-
this.debugId = debugId++;
|
26
|
-
this.size = 0;
|
27
|
-
/**
|
28
|
-
* (do not modify)
|
29
|
-
* @type {SortableSet<Chunk>}
|
30
|
-
*/
|
31
|
-
this._chunks = new SortableSet();
|
32
|
-
/** @type {ChunkCombination} */
|
33
|
-
this._parent = undefined;
|
34
|
-
this._lastChunk = undefined;
|
35
|
-
/** @type {WeakMap<Chunk, ChunkCombination>} */
|
36
|
-
this._addMap = new WeakMap();
|
37
|
-
/** @type {WeakMap<Chunk, ChunkCombination>} */
|
38
|
-
this._removeCache = new WeakMap();
|
39
|
-
}
|
40
|
-
|
41
|
-
/**
|
42
|
-
* @returns {Iterable<Chunk>} iterable of chunks
|
43
|
-
*/
|
44
|
-
get chunksIterable() {
|
45
|
-
return this._chunks;
|
46
|
-
}
|
47
|
-
|
48
|
-
/**
|
49
|
-
* @param {Chunk} chunk chunk to add
|
50
|
-
* @returns {ChunkCombination} new chunk combination
|
51
|
-
*/
|
52
|
-
with(chunk) {
|
53
|
-
if (this._chunks.has(chunk)) return this;
|
54
|
-
let next = this._addMap.get(chunk);
|
55
|
-
if (next !== undefined) return next;
|
56
|
-
// must insert chunks in order to maintain order-independent identity of ChunkCombination
|
57
|
-
if (!this._parent || this._lastChunk.debugId < chunk.debugId) {
|
58
|
-
next = new ChunkCombination();
|
59
|
-
for (const chunk of this._chunks) {
|
60
|
-
next._chunks.add(chunk);
|
61
|
-
}
|
62
|
-
next._chunks.add(chunk);
|
63
|
-
next._removeCache.set(chunk, this);
|
64
|
-
next.size = this.size + 1;
|
65
|
-
next._parent = this;
|
66
|
-
next._lastChunk = chunk;
|
67
|
-
} else {
|
68
|
-
next = this._parent.with(chunk).with(this._lastChunk);
|
69
|
-
}
|
70
|
-
this._addMap.set(chunk, next);
|
71
|
-
return next;
|
72
|
-
}
|
73
|
-
|
74
|
-
/**
|
75
|
-
* @param {Chunk} chunk chunk to remove
|
76
|
-
* @returns {ChunkCombination} new chunk combination
|
77
|
-
*/
|
78
|
-
without(chunk) {
|
79
|
-
if (!this._chunks.has(chunk)) return this;
|
80
|
-
let next = this._removeCache.get(chunk);
|
81
|
-
if (next !== undefined) return next;
|
82
|
-
const stack = [this._lastChunk];
|
83
|
-
let current = this._parent;
|
84
|
-
while (current._lastChunk !== chunk) {
|
85
|
-
stack.push(current._lastChunk);
|
86
|
-
current = current._parent;
|
87
|
-
}
|
88
|
-
next = current._parent;
|
89
|
-
while (stack.length) next = next.with(stack.pop());
|
90
|
-
this._removeCache.set(chunk, next);
|
91
|
-
return next;
|
92
|
-
}
|
93
|
-
|
94
|
-
withAll(other) {
|
95
|
-
if (other.size === 0) return this;
|
96
|
-
if (this.size === 0) return other;
|
97
|
-
const stack = [];
|
98
|
-
/** @type {ChunkCombination} */
|
99
|
-
let current = this;
|
100
|
-
for (;;) {
|
101
|
-
if (current._lastChunk.debugId < other._lastChunk.debugId) {
|
102
|
-
stack.push(other._lastChunk);
|
103
|
-
other = other._parent;
|
104
|
-
if (other.size === 0) {
|
105
|
-
while (stack.length) current = current.with(stack.pop());
|
106
|
-
return current;
|
107
|
-
}
|
108
|
-
} else {
|
109
|
-
stack.push(current._lastChunk);
|
110
|
-
current = current._parent;
|
111
|
-
if (current.size === 0) {
|
112
|
-
while (stack.length) other = other.with(stack.pop());
|
113
|
-
return other;
|
114
|
-
}
|
115
|
-
}
|
116
|
-
}
|
117
|
-
}
|
118
|
-
|
119
|
-
hasSharedChunks(other) {
|
120
|
-
if (this.size > other.size) {
|
121
|
-
const chunks = this._chunks;
|
122
|
-
for (const chunk of other._chunks) {
|
123
|
-
if (chunks.has(chunk)) return true;
|
124
|
-
}
|
125
|
-
} else {
|
126
|
-
const chunks = other._chunks;
|
127
|
-
for (const chunk of this._chunks) {
|
128
|
-
if (chunks.has(chunk)) return true;
|
129
|
-
}
|
130
|
-
}
|
131
|
-
return false;
|
132
|
-
}
|
133
|
-
|
134
|
-
/**
|
135
|
-
* @param {ChunkCombination} other other combination
|
136
|
-
* @returns {boolean} true, when other is a subset of this combination
|
137
|
-
*/
|
138
|
-
isSubset(other) {
|
139
|
-
// TODO: This could be more efficient when using the debugId order of the combinations
|
140
|
-
/** @type {ChunkCombination} */
|
141
|
-
let current = this;
|
142
|
-
let otherSize = other.size;
|
143
|
-
let currentSize = current.size;
|
144
|
-
if (otherSize === 0) return true;
|
145
|
-
for (;;) {
|
146
|
-
if (currentSize === 0) return false;
|
147
|
-
if (otherSize === 1) {
|
148
|
-
if (currentSize === 1) {
|
149
|
-
return current._lastChunk === other._lastChunk;
|
150
|
-
} else {
|
151
|
-
return current._chunks.has(other._lastChunk);
|
152
|
-
}
|
153
|
-
}
|
154
|
-
if (otherSize * 8 < currentSize) {
|
155
|
-
// go for the Set access when current >> other
|
156
|
-
const chunks = current._chunks;
|
157
|
-
for (const item of other._chunks) {
|
158
|
-
if (!chunks.has(item)) return false;
|
159
|
-
}
|
160
|
-
return true;
|
161
|
-
}
|
162
|
-
const otherId = other._lastChunk.debugId;
|
163
|
-
// skip over nodes in current that have higher ids
|
164
|
-
while (otherId < current._lastChunk.debugId) {
|
165
|
-
current = current._parent;
|
166
|
-
currentSize--;
|
167
|
-
if (currentSize === 0) return false;
|
168
|
-
}
|
169
|
-
if (otherId > current._lastChunk.debugId) {
|
170
|
-
return false;
|
171
|
-
}
|
172
|
-
other = other._parent;
|
173
|
-
otherSize--;
|
174
|
-
if (otherSize === 0) return true;
|
175
|
-
current = current._parent;
|
176
|
-
currentSize--;
|
177
|
-
}
|
178
|
-
}
|
179
|
-
|
180
|
-
getChunks() {
|
181
|
-
return this._chunks.getFromUnorderedCache(getArray);
|
182
|
-
}
|
183
|
-
}
|
184
|
-
|
185
|
-
ChunkCombination.empty = new ChunkCombination();
|
186
|
-
|
187
|
-
module.exports = ChunkCombination;
|