webpack 4.40.0 → 4.41.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/lib/Chunk.js CHANGED
@@ -385,7 +385,7 @@ class Chunk {
385
385
  /**
386
386
  *
387
387
  * @param {Chunk} otherChunk the chunk to integrate with
388
- * @param {ModuleReason} reason reason why the module is being integrated
388
+ * @param {string} reason reason why the module is being integrated
389
389
  * @returns {boolean} returns true or false if integration succeeds or fails
390
390
  */
391
391
  integrate(otherChunk, reason) {
@@ -462,6 +462,11 @@ class Chunk {
462
462
  return false;
463
463
  }
464
464
 
465
+ /**
466
+ * @param {Chunk} a chunk
467
+ * @param {Chunk} b chunk
468
+ * @returns {boolean} true, if a is always available when b is reached
469
+ */
465
470
  const isAvailable = (a, b) => {
466
471
  const queue = new Set(b.groupsIterable);
467
472
  for (const chunkGroup of queue) {
@@ -37,11 +37,11 @@ const compareLocations = require("./compareLocations");
37
37
  const { Logger, LogType } = require("./logging/Logger");
38
38
  const ErrorHelpers = require("./ErrorHelpers");
39
39
  const buildChunkGraph = require("./buildChunkGraph");
40
+ const WebpackError = require("./WebpackError");
40
41
 
41
42
  /** @typedef {import("./Module")} Module */
42
43
  /** @typedef {import("./Compiler")} Compiler */
43
44
  /** @typedef {import("webpack-sources").Source} Source */
44
- /** @typedef {import("./WebpackError")} WebpackError */
45
45
  /** @typedef {import("./DependenciesBlockVariable")} DependenciesBlockVariable */
46
46
  /** @typedef {import("./dependencies/SingleEntryDependency")} SingleEntryDependency */
47
47
  /** @typedef {import("./dependencies/MultiEntryDependency")} MultiEntryDependency */
@@ -221,6 +221,25 @@ const addAllToSet = (set, otherSet) => {
221
221
  }
222
222
  };
223
223
 
224
+ /**
225
+ * @param {Source} a a source
226
+ * @param {Source} b another source
227
+ * @returns {boolean} true, when both sources are equal
228
+ */
229
+ const isSourceEqual = (a, b) => {
230
+ if (a === b) return true;
231
+ // TODO webpack 5: check .buffer() instead, it's called anyway during emit
232
+ /** @type {Buffer|string} */
233
+ let aSource = a.source();
234
+ /** @type {Buffer|string} */
235
+ let bSource = b.source();
236
+ if (aSource === bSource) return true;
237
+ if (typeof aSource === "string" && typeof bSource === "string") return false;
238
+ if (!Buffer.isBuffer(aSource)) aSource = Buffer.from(aSource, "utf-8");
239
+ if (!Buffer.isBuffer(bSource)) bSource = Buffer.from(bSource, "utf-8");
240
+ return aSource.equals(bSource);
241
+ };
242
+
224
243
  class Compilation extends Tapable {
225
244
  /**
226
245
  * Creates an instance of Compilation.
@@ -1990,10 +2009,16 @@ class Compilation extends Tapable {
1990
2009
  */
1991
2010
  emitAsset(file, source, assetInfo = {}) {
1992
2011
  if (this.assets[file]) {
1993
- if (this.assets[file] !== source) {
1994
- throw new Error(
1995
- `Conflict: Multiple assets emit to the same filename ${file}`
2012
+ if (!isSourceEqual(this.assets[file], source)) {
2013
+ // TODO webpack 5: make this an error instead
2014
+ this.warnings.push(
2015
+ new WebpackError(
2016
+ `Conflict: Multiple assets emit different content to the same filename ${file}`
2017
+ )
1996
2018
  );
2019
+ this.assets[file] = source;
2020
+ this.assetsInfo.set(file, assetInfo);
2021
+ return;
1997
2022
  }
1998
2023
  const oldInfo = this.assetsInfo.get(file);
1999
2024
  this.assetsInfo.set(file, Object.assign({}, oldInfo, assetInfo));
@@ -150,6 +150,10 @@ module.exports = class HotModuleReplacementPlugin {
150
150
  compiler.hooks.compilation.tap(
151
151
  "HotModuleReplacementPlugin",
152
152
  (compilation, { normalModuleFactory }) => {
153
+ // This applies the HMR plugin only to the targeted compiler
154
+ // It should not affect child compilations
155
+ if (compilation.compiler !== compiler) return;
156
+
153
157
  const hotUpdateChunkTemplate = compilation.hotUpdateChunkTemplate;
154
158
  if (!hotUpdateChunkTemplate) return;
155
159
 
@@ -87,6 +87,7 @@ class NormalModule extends Module {
87
87
  // Info from Build
88
88
  this.error = null;
89
89
  this._source = null;
90
+ this._sourceSize = null;
90
91
  this._buildHash = "";
91
92
  this.buildTimestamp = undefined;
92
93
  /** @private @type {Map<string, CachedSourceEntry>} */
@@ -347,6 +348,7 @@ class NormalModule extends Module {
347
348
  resourceBuffer,
348
349
  sourceMap
349
350
  );
351
+ this._sourceSize = null;
350
352
  this._ast =
351
353
  typeof extraInfo === "object" &&
352
354
  extraInfo !== null &&
@@ -366,6 +368,7 @@ class NormalModule extends Module {
366
368
  this._source = new RawSource(
367
369
  "throw new Error(" + JSON.stringify(this.error.message) + ");"
368
370
  );
371
+ this._sourceSize = null;
369
372
  this._ast = null;
370
373
  }
371
374
 
@@ -425,6 +428,7 @@ class NormalModule extends Module {
425
428
  this.buildTimestamp = Date.now();
426
429
  this.built = true;
427
430
  this._source = null;
431
+ this._sourceSize = null;
428
432
  this._ast = null;
429
433
  this._buildHash = "";
430
434
  this.error = null;
@@ -559,7 +563,10 @@ class NormalModule extends Module {
559
563
  }
560
564
 
561
565
  size() {
562
- return this._source ? this._source.size() : -1;
566
+ if (this._sourceSize === null) {
567
+ this._sourceSize = this._source ? this._source.size() : -1;
568
+ }
569
+ return this._sourceSize;
563
570
  }
564
571
 
565
572
  /**
@@ -362,6 +362,10 @@ class WebpackOptionsValidationError extends WebpackError {
362
362
  err.parentSchema
363
363
  )}`;
364
364
  }
365
+ } else if (err.keyword === "not") {
366
+ return `${dataPath} should not be ${getSchemaPartText(
367
+ err.schema
368
+ )}\n${getSchemaPartText(err.parentSchema)}`;
365
369
  } else if (err.keyword === "absolutePath") {
366
370
  const baseMessage = `${dataPath}: ${
367
371
  err.message
@@ -32,7 +32,7 @@ const LogType = Object.freeze({
32
32
 
33
33
  exports.LogType = LogType;
34
34
 
35
- /** @typedef {keyof LogType} LogTypeEnum */
35
+ /** @typedef {keyof typeof LogType} LogTypeEnum */
36
36
 
37
37
  const LOG_SYMBOL = Symbol("webpack logger raw log method");
38
38
  const TIMERS_SYMBOL = Symbol("webpack logger times");
@@ -6,8 +6,33 @@
6
6
 
7
7
  const validateOptions = require("schema-utils");
8
8
  const schema = require("../../schemas/plugins/optimize/LimitChunkCountPlugin.json");
9
+ const LazyBucketSortedSet = require("../util/LazyBucketSortedSet");
9
10
 
10
11
  /** @typedef {import("../../declarations/plugins/optimize/LimitChunkCountPlugin").LimitChunkCountPluginOptions} LimitChunkCountPluginOptions */
12
+ /** @typedef {import("../Chunk")} Chunk */
13
+ /** @typedef {import("../Compiler")} Compiler */
14
+
15
+ /**
16
+ * @typedef {Object} ChunkCombination
17
+ * @property {boolean} deleted this is set to true when combination was removed
18
+ * @property {number} sizeDiff
19
+ * @property {number} integratedSize
20
+ * @property {Chunk} a
21
+ * @property {Chunk} b
22
+ * @property {number} aIdx
23
+ * @property {number} bIdx
24
+ * @property {number} aSize
25
+ * @property {number} bSize
26
+ */
27
+
28
+ const addToSetMap = (map, key, value) => {
29
+ const set = map.get(key);
30
+ if (set === undefined) {
31
+ map.set(key, new Set([value]));
32
+ } else {
33
+ set.add(value);
34
+ }
35
+ };
11
36
 
12
37
  class LimitChunkCountPlugin {
13
38
  /**
@@ -19,6 +44,11 @@ class LimitChunkCountPlugin {
19
44
  validateOptions(schema, options, "Limit Chunk Count Plugin");
20
45
  this.options = options;
21
46
  }
47
+
48
+ /**
49
+ * @param {Compiler} compiler the webpack compiler
50
+ * @returns {void}
51
+ */
22
52
  apply(compiler) {
23
53
  const options = this.options;
24
54
  compiler.hooks.compilation.tap("LimitChunkCountPlugin", compilation => {
@@ -30,47 +60,169 @@ class LimitChunkCountPlugin {
30
60
  if (maxChunks < 1) return;
31
61
  if (chunks.length <= maxChunks) return;
32
62
 
63
+ let remainingChunksToMerge = chunks.length - maxChunks;
64
+
65
+ // order chunks in a deterministic way
33
66
  const orderedChunks = chunks.slice().sort((a, b) => a.compareTo(b));
34
67
 
35
- const sortedExtendedPairCombinations = orderedChunks
36
- .reduce((combinations, a, idx) => {
37
- // create combination pairs
38
- for (let i = 0; i < idx; i++) {
39
- const b = orderedChunks[i];
40
- combinations.push([b, a]);
41
- }
42
- return combinations;
43
- }, [])
44
- .map(pair => {
45
- // extend combination pairs with size and integrated size
46
- const a = pair[0].size(options);
47
- const b = pair[1].size(options);
48
- const ab = pair[0].integratedSize(pair[1], options);
49
- return [a + b - ab, ab, pair[0], pair[1], a, b];
50
- })
51
- .filter(extendedPair => {
68
+ // create a lazy sorted data structure to keep all combinations
69
+ // this is large. Size = chunks * (chunks - 1) / 2
70
+ // It uses a multi layer bucket sort plus normal sort in the last layer
71
+ // It's also lazy so only accessed buckets are sorted
72
+ const combinations = new LazyBucketSortedSet(
73
+ // Layer 1: ordered by largest size benefit
74
+ c => c.sizeDiff,
75
+ (a, b) => b - a,
76
+ // Layer 2: ordered by smallest combined size
77
+ c => c.integratedSize,
78
+ (a, b) => a - b,
79
+ // Layer 3: ordered by position difference in orderedChunk (-> to be deterministic)
80
+ c => c.bIdx - c.aIdx,
81
+ (a, b) => a - b,
82
+ // Layer 4: ordered by position in orderedChunk (-> to be deterministic)
83
+ (a, b) => a.bIdx - b.bIdx
84
+ );
85
+
86
+ // we keep a mappng from chunk to all combinations
87
+ // but this mapping is not kept up-to-date with deletions
88
+ // so `deleted` flag need to be considered when iterating this
89
+ /** @type {Map<Chunk, Set<ChunkCombination>>} */
90
+ const combinationsByChunk = new Map();
91
+
92
+ orderedChunks.forEach((b, bIdx) => {
93
+ // create combination pairs with size and integrated size
94
+ for (let aIdx = 0; aIdx < bIdx; aIdx++) {
95
+ const a = orderedChunks[aIdx];
96
+ const integratedSize = a.integratedSize(b, options);
97
+
52
98
  // filter pairs that do not have an integratedSize
53
99
  // meaning they can NOT be integrated!
54
- return extendedPair[1] !== false;
55
- })
56
- .sort((a, b) => {
57
- // sadly javascript does an inplace sort here
58
- // sort them by size
59
- const diff1 = b[0] - a[0];
60
- if (diff1 !== 0) return diff1;
61
- const diff2 = a[1] - b[1];
62
- if (diff2 !== 0) return diff2;
63
- const diff3 = a[2].compareTo(b[2]);
64
- if (diff3 !== 0) return diff3;
65
- return a[3].compareTo(b[3]);
66
- });
67
-
68
- const pair = sortedExtendedPairCombinations[0];
69
-
70
- if (pair && pair[2].integrate(pair[3], "limit")) {
71
- chunks.splice(chunks.indexOf(pair[3]), 1);
72
- return true;
100
+ if (integratedSize === false) continue;
101
+
102
+ const aSize = a.size(options);
103
+ const bSize = b.size(options);
104
+ const c = {
105
+ deleted: false,
106
+ sizeDiff: aSize + bSize - integratedSize,
107
+ integratedSize,
108
+ a,
109
+ b,
110
+ aIdx,
111
+ bIdx,
112
+ aSize,
113
+ bSize
114
+ };
115
+ combinations.add(c);
116
+ addToSetMap(combinationsByChunk, a, c);
117
+ addToSetMap(combinationsByChunk, b, c);
118
+ }
119
+ return combinations;
120
+ });
121
+
122
+ // list of modified chunks during this run
123
+ // combinations affected by this change are skipped to allow
124
+ // futher optimizations
125
+ /** @type {Set<Chunk>} */
126
+ const modifiedChunks = new Set();
127
+
128
+ let changed = false;
129
+ // eslint-disable-next-line no-constant-condition
130
+ loop: while (true) {
131
+ const combination = combinations.popFirst();
132
+ if (combination === undefined) break;
133
+
134
+ combination.deleted = true;
135
+ const { a, b, integratedSize } = combination;
136
+
137
+ // skip over pair when
138
+ // one of the already merged chunks is a parent of one of the chunks
139
+ if (modifiedChunks.size > 0) {
140
+ const queue = new Set(a.groupsIterable);
141
+ for (const group of b.groupsIterable) {
142
+ queue.add(group);
143
+ }
144
+ for (const group of queue) {
145
+ for (const mChunk of modifiedChunks) {
146
+ if (mChunk !== a && mChunk !== b && mChunk.isInGroup(group)) {
147
+ // This is a potential pair which needs recalculation
148
+ // We can't do that now, but it merge before following pairs
149
+ // so we leave space for it, and consider chunks as modified
150
+ // just for the worse case
151
+ remainingChunksToMerge--;
152
+ if (remainingChunksToMerge <= 0) break loop;
153
+ modifiedChunks.add(a);
154
+ modifiedChunks.add(b);
155
+ continue loop;
156
+ }
157
+ }
158
+ for (const parent of group.parentsIterable) {
159
+ queue.add(parent);
160
+ }
161
+ }
162
+ }
163
+
164
+ // merge the chunks
165
+ if (a.integrate(b, "limit")) {
166
+ chunks.splice(chunks.indexOf(b), 1);
167
+
168
+ // flag chunk a as modified as further optimization are possible for all children here
169
+ modifiedChunks.add(a);
170
+
171
+ changed = true;
172
+ remainingChunksToMerge--;
173
+ if (remainingChunksToMerge <= 0) break;
174
+
175
+ // Update all affected combinations
176
+ // delete all combination with the removed chunk
177
+ // we will use combinations with the kept chunk instead
178
+ for (const combination of combinationsByChunk.get(b)) {
179
+ if (combination.deleted) continue;
180
+ combination.deleted = true;
181
+ combinations.delete(combination);
182
+ }
183
+
184
+ // Update combinations with the kept chunk with new sizes
185
+ for (const combination of combinationsByChunk.get(a)) {
186
+ if (combination.deleted) continue;
187
+ if (combination.a === a) {
188
+ // Update size
189
+ const newIntegratedSize = a.integratedSize(
190
+ combination.b,
191
+ options
192
+ );
193
+ if (newIntegratedSize === false) {
194
+ combination.deleted = true;
195
+ combinations.delete(combination);
196
+ continue;
197
+ }
198
+ const finishUpdate = combinations.startUpdate(combination);
199
+ combination.integratedSize = newIntegratedSize;
200
+ combination.aSize = integratedSize;
201
+ combination.sizeDiff =
202
+ combination.bSize + integratedSize - newIntegratedSize;
203
+ finishUpdate();
204
+ } else if (combination.b === a) {
205
+ // Update size
206
+ const newIntegratedSize = combination.a.integratedSize(
207
+ a,
208
+ options
209
+ );
210
+ if (newIntegratedSize === false) {
211
+ combination.deleted = true;
212
+ combinations.delete(combination);
213
+ continue;
214
+ }
215
+ const finishUpdate = combinations.startUpdate(combination);
216
+ combination.integratedSize = newIntegratedSize;
217
+ combination.bSize = integratedSize;
218
+ combination.sizeDiff =
219
+ integratedSize + combination.aSize - newIntegratedSize;
220
+ finishUpdate();
221
+ }
222
+ }
223
+ }
73
224
  }
225
+ if (changed) return true;
74
226
  }
75
227
  );
76
228
  });
@@ -177,7 +177,7 @@ module.exports = class SplitChunksPlugin {
177
177
  name =
178
178
  name.slice(0, sliceLength) +
179
179
  automaticNameDelimiter +
180
- hashFilename(name);
180
+ hashedFilename;
181
181
  }
182
182
  cacheEntry[cacheGroup] = name;
183
183
  return name;
@@ -40,6 +40,7 @@ module.exports = class SizeLimitsPlugin {
40
40
  entrypoint.getFiles().reduce((currentSize, file) => {
41
41
  const asset = compilation.getAsset(file);
42
42
  if (
43
+ asset &&
43
44
  assetFilter(asset.name, asset.source, asset.info) &&
44
45
  asset.source
45
46
  ) {
@@ -67,7 +68,7 @@ module.exports = class SizeLimitsPlugin {
67
68
 
68
69
  const fileFilter = name => {
69
70
  const asset = compilation.getAsset(name);
70
- return assetFilter(asset.name, asset.source, asset.info);
71
+ return asset && assetFilter(asset.name, asset.source, asset.info);
71
72
  };
72
73
 
73
74
  const entrypointsOverLimit = [];
@@ -0,0 +1,235 @@
1
+ /*
2
+ MIT License http://www.opensource.org/licenses/mit-license.php
3
+ Author Tobias Koppers @sokra
4
+ */
5
+
6
+ "use strict";
7
+
8
+ const SortableSet = require("./SortableSet");
9
+
10
+ /**
11
+ * @template T
12
+ * @template K
13
+ * Multi layer bucket sorted set
14
+ * Supports adding non-existing items (DO NOT ADD ITEM TWICE)
15
+ * Supports removing exiting items (DO NOT REMOVE ITEM NOT IN SET)
16
+ * Supports popping the first items according to defined order
17
+ * Supports iterating all items without order
18
+ * Supports updating an item in an efficient way
19
+ * Supports size property, which is the number of items
20
+ * Items are lazy partially sorted when needed
21
+ */
22
+ class LazyBucketSortedSet {
23
+ /**
24
+ * @param {function(T): K} getKey function to get key from item
25
+ * @param {function(K, K): number} comparator comparator to sort keys
26
+ * @param {...((function(T): any) | (function(any, any): number))} args more pairs of getKey and comparator plus optional final comparator for the last layer
27
+ */
28
+ constructor(getKey, comparator, ...args) {
29
+ this._getKey = getKey;
30
+ this._innerArgs = args;
31
+ this._leaf = args.length <= 1;
32
+ this._keys = new SortableSet(undefined, comparator);
33
+ /** @type {Map<K, LazyBucketSortedSet<T, any> | SortableSet<T>>} */
34
+ this._map = new Map();
35
+ this._unsortedItems = new Set();
36
+ this.size = 0;
37
+ }
38
+
39
+ /**
40
+ * @param {T} item an item
41
+ * @returns {void}
42
+ */
43
+ add(item) {
44
+ this.size++;
45
+ this._unsortedItems.add(item);
46
+ }
47
+
48
+ /**
49
+ * @param {K} key key of item
50
+ * @param {T} item the item
51
+ * @returns {void}
52
+ */
53
+ _addInternal(key, item) {
54
+ let entry = this._map.get(key);
55
+ if (entry === undefined) {
56
+ entry = this._leaf
57
+ ? new SortableSet(undefined, this._innerArgs[0])
58
+ : new /** @type {any} */ (LazyBucketSortedSet)(...this._innerArgs);
59
+ this._keys.add(key);
60
+ this._map.set(key, entry);
61
+ }
62
+ entry.add(item);
63
+ }
64
+
65
+ /**
66
+ * @param {T} item an item
67
+ * @returns {void}
68
+ */
69
+ delete(item) {
70
+ this.size--;
71
+ if (this._unsortedItems.has(item)) {
72
+ this._unsortedItems.delete(item);
73
+ return;
74
+ }
75
+ const key = this._getKey(item);
76
+ const entry = this._map.get(key);
77
+ entry.delete(item);
78
+ if (entry.size === 0) {
79
+ this._deleteKey(key);
80
+ }
81
+ }
82
+
83
+ /**
84
+ * @param {K} key key to be removed
85
+ * @returns {void}
86
+ */
87
+ _deleteKey(key) {
88
+ this._keys.delete(key);
89
+ this._map.delete(key);
90
+ }
91
+
92
+ /**
93
+ * @returns {T | undefined} an item
94
+ */
95
+ popFirst() {
96
+ if (this.size === 0) return undefined;
97
+ this.size--;
98
+ if (this._unsortedItems.size > 0) {
99
+ for (const item of this._unsortedItems) {
100
+ const key = this._getKey(item);
101
+ this._addInternal(key, item);
102
+ }
103
+ this._unsortedItems.clear();
104
+ }
105
+ this._keys.sort();
106
+ const key = this._keys.values().next().value;
107
+ const entry = this._map.get(key);
108
+ if (this._leaf) {
109
+ const leafEntry = /** @type {SortableSet<T>} */ (entry);
110
+ leafEntry.sort();
111
+ const item = leafEntry.values().next().value;
112
+ leafEntry.delete(item);
113
+ if (leafEntry.size === 0) {
114
+ this._deleteKey(key);
115
+ }
116
+ return item;
117
+ } else {
118
+ const nodeEntry = /** @type {LazyBucketSortedSet<T, any>} */ (entry);
119
+ const item = nodeEntry.popFirst();
120
+ if (nodeEntry.size === 0) {
121
+ this._deleteKey(key);
122
+ }
123
+ return item;
124
+ }
125
+ }
126
+
127
+ /**
128
+ * @param {T} item to be updated item
129
+ * @returns {function(true=): void} finish update
130
+ */
131
+ startUpdate(item) {
132
+ if (this._unsortedItems.has(item)) {
133
+ return remove => {
134
+ if (remove) {
135
+ this._unsortedItems.delete(item);
136
+ this.size--;
137
+ return;
138
+ }
139
+ };
140
+ }
141
+ const key = this._getKey(item);
142
+ if (this._leaf) {
143
+ const oldEntry = /** @type {SortableSet<T>} */ (this._map.get(key));
144
+ return remove => {
145
+ if (remove) {
146
+ this.size--;
147
+ oldEntry.delete(item);
148
+ if (oldEntry.size === 0) {
149
+ this._deleteKey(key);
150
+ }
151
+ return;
152
+ }
153
+ const newKey = this._getKey(item);
154
+ if (key === newKey) {
155
+ // This flags the sortable set as unordered
156
+ oldEntry.add(item);
157
+ } else {
158
+ oldEntry.delete(item);
159
+ if (oldEntry.size === 0) {
160
+ this._deleteKey(key);
161
+ }
162
+ this._addInternal(newKey, item);
163
+ }
164
+ };
165
+ } else {
166
+ const oldEntry = /** @type {LazyBucketSortedSet<T, any>} */ (this._map.get(
167
+ key
168
+ ));
169
+ const finishUpdate = oldEntry.startUpdate(item);
170
+ return remove => {
171
+ if (remove) {
172
+ this.size--;
173
+ finishUpdate(true);
174
+ if (oldEntry.size === 0) {
175
+ this._deleteKey(key);
176
+ }
177
+ return;
178
+ }
179
+ const newKey = this._getKey(item);
180
+ if (key === newKey) {
181
+ finishUpdate();
182
+ } else {
183
+ finishUpdate(true);
184
+ if (oldEntry.size === 0) {
185
+ this._deleteKey(key);
186
+ }
187
+ this._addInternal(newKey, item);
188
+ }
189
+ };
190
+ }
191
+ }
192
+
193
+ /**
194
+ * @param {Iterator<T>[]} iterators list of iterators to append to
195
+ * @returns {void}
196
+ */
197
+ _appendIterators(iterators) {
198
+ if (this._unsortedItems.size > 0)
199
+ iterators.push(this._unsortedItems[Symbol.iterator]());
200
+ for (const key of this._keys) {
201
+ const entry = this._map.get(key);
202
+ if (this._leaf) {
203
+ const leafEntry = /** @type {SortableSet<T>} */ (entry);
204
+ const iterator = leafEntry[Symbol.iterator]();
205
+ iterators.push(iterator);
206
+ } else {
207
+ const nodeEntry = /** @type {LazyBucketSortedSet<T, any>} */ (entry);
208
+ nodeEntry._appendIterators(iterators);
209
+ }
210
+ }
211
+ }
212
+
213
+ /**
214
+ * @returns {Iterator<T>} the iterator
215
+ */
216
+ [Symbol.iterator]() {
217
+ const iterators = [];
218
+ this._appendIterators(iterators);
219
+ iterators.reverse();
220
+ let currentIterator = iterators.pop();
221
+ return {
222
+ next: () => {
223
+ const res = currentIterator.next();
224
+ if (res.done) {
225
+ if (iterators.length === 0) return res;
226
+ currentIterator = iterators.pop();
227
+ return currentIterator.next();
228
+ }
229
+ return res;
230
+ }
231
+ };
232
+ }
233
+ }
234
+
235
+ module.exports = LazyBucketSortedSet;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "webpack",
3
- "version": "4.40.0",
3
+ "version": "4.41.0",
4
4
  "author": "Tobias Koppers @sokra",
5
5
  "description": "Packs CommonJs/AMD modules for the browser. Allows to split your codebase into multiple bundles, which can be loaded on demand. Support loaders to preprocess files, i.e. json, jsx, es7, css, less, ... and your custom stuff.",
6
6
  "license": "MIT",
@@ -684,6 +684,30 @@
684
684
  }
685
685
  }
686
686
  ]
687
+ },
688
+ "not": {
689
+ "description": "Using the cacheGroup shorthand syntax with a cache group named 'test' is a potential config error\nDid you intent to define a cache group with a test instead?\ncacheGroups: {\n <name>: {\n test: ...\n }\n}",
690
+ "type": "object",
691
+ "additionalProperties": true,
692
+ "properties": {
693
+ "test": {
694
+ "description": "The test property is a cache group name, but using the test option of the cache group could be intended instead",
695
+ "anyOf": [
696
+ {
697
+ "instanceof": "Function",
698
+ "tsType": "Function"
699
+ },
700
+ {
701
+ "type": "string"
702
+ },
703
+ {
704
+ "instanceof": "RegExp",
705
+ "tsType": "RegExp"
706
+ }
707
+ ]
708
+ }
709
+ },
710
+ "required": ["test"]
687
711
  }
688
712
  },
689
713
  "chunks": {