rollup 3.15.0 → 3.15.1-1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/bin/rollup CHANGED
@@ -2,8 +2,8 @@
2
2
 
3
3
  /*
4
4
  @license
5
- Rollup.js v3.15.0
6
- Fri, 10 Feb 2023 05:20:05 GMT - commit 5d81532f688383a8aeaf6a099da2b0205e8b8609
5
+ Rollup.js v3.15.1-1
6
+ Tue, 14 Feb 2023 09:30:15 GMT - commit 5e91b2d7a9f833cf9e11d98c96c4cba3fac195a4
7
7
 
8
8
  https://github.com/rollup/rollup
9
9
 
package/dist/es/rollup.js CHANGED
@@ -1,7 +1,7 @@
1
1
  /*
2
2
  @license
3
- Rollup.js v3.15.0
4
- Fri, 10 Feb 2023 05:20:05 GMT - commit 5d81532f688383a8aeaf6a099da2b0205e8b8609
3
+ Rollup.js v3.15.1-1
4
+ Tue, 14 Feb 2023 09:30:15 GMT - commit 5e91b2d7a9f833cf9e11d98c96c4cba3fac195a4
5
5
 
6
6
  https://github.com/rollup/rollup
7
7
 
@@ -1,7 +1,7 @@
1
1
  /*
2
2
  @license
3
- Rollup.js v3.15.0
4
- Fri, 10 Feb 2023 05:20:05 GMT - commit 5d81532f688383a8aeaf6a099da2b0205e8b8609
3
+ Rollup.js v3.15.1-1
4
+ Tue, 14 Feb 2023 09:30:15 GMT - commit 5e91b2d7a9f833cf9e11d98c96c4cba3fac195a4
5
5
 
6
6
  https://github.com/rollup/rollup
7
7
 
@@ -16,7 +16,7 @@ import { lstat, realpath, readdir, readFile, mkdir, writeFile } from 'node:fs/pr
16
16
  import { EventEmitter } from 'node:events';
17
17
  import * as tty from 'tty';
18
18
 
19
- var version$1 = "3.15.0";
19
+ var version$1 = "3.15.1-1";
20
20
 
21
21
  const comma = ','.charCodeAt(0);
22
22
  const semicolon = ';'.charCodeAt(0);
@@ -370,6 +370,9 @@ class SourceMap {
370
370
  this.sourcesContent = properties.sourcesContent;
371
371
  this.names = properties.names;
372
372
  this.mappings = encode(properties.mappings);
373
+ if (typeof properties.x_google_ignoreList !== 'undefined') {
374
+ this.x_google_ignoreList = properties.x_google_ignoreList;
375
+ }
373
376
  }
374
377
 
375
378
  toString() {
@@ -16119,7 +16122,7 @@ function analyzeModuleGraph(entries) {
16119
16122
  }
16120
16123
  }
16121
16124
  return {
16122
- allEntries,
16125
+ allEntries: [...allEntries],
16123
16126
  dependentEntriesByModule,
16124
16127
  dynamicallyDependentEntriesByDynamicEntry: getDynamicallyDependentEntriesByDynamicEntry(dependentEntriesByModule, dynamicEntries)
16125
16128
  };
@@ -16193,7 +16196,7 @@ function createChunks(allEntries, assignedEntriesByModule, minChunkSize) {
16193
16196
  alias: null,
16194
16197
  modules
16195
16198
  }))
16196
- : getOptimizedChunks(chunkModulesBySignature, minChunkSize).map(({ modules }) => ({
16199
+ : getOptimizedChunks(chunkModulesBySignature, allEntries.length, minChunkSize).map(({ modules }) => ({
16197
16200
  alias: null,
16198
16201
  modules
16199
16202
  }));
@@ -16217,49 +16220,109 @@ function getChunkModulesBySignature(assignedEntriesByModule, allEntries) {
16217
16220
  }
16218
16221
  /**
16219
16222
  * This function tries to get rid of small chunks by merging them with other
16220
- * chunks. In order to merge chunks, one must obey the following rule:
16221
- * - When merging several chunks, at most one of the chunks can have side
16222
- * effects
16223
- * - When one of the chunks has side effects, the entry points depending on that
16224
- * chunk need to be a super set of the entry points depending on the other
16225
- * chunks
16226
- * - Pure chunks can always be merged
16227
- * - We use the entry point dependence signature to calculate "chunk distance",
16228
- * i.e. how likely it is that two chunks are loaded together
16223
+ * chunks.
16224
+ *
16225
+ * We can only merge chunks safely if after the merge, loading any entry point
16226
+ * in any allowed order will not trigger side effects that should not have been
16227
+ * triggered. While side effects are usually things like global function calls,
16228
+ * global variable mutations or potentially thrown errors, details do not
16229
+ * matter here, and we just discern chunks without side effects (pure chunks)
16230
+ * from other chunks.
16231
+ *
16232
+ * As a first step, we assign each pre-generated chunk with side effects a
16233
+ * label. I.e. we have side effect "A" if the non-pure chunk "A" is loaded.
16234
+ *
16235
+ * Now to determine the side effects of loading a chunk, one also has to take
16236
+ * the side effects of its dependencies into account. So if A depends on B
16237
+ * (A -> B) and both have side effects, loading A triggers effects AB.
16238
+ *
16239
+ * Now from the previous step we know that each chunk is uniquely determine by
16240
+ * the entry points that depend on it and cause it to load, which we will call
16241
+ * its dependent entry points.
16242
+ *
16243
+ * E.g. if X -> A and Y -> A, then the dependent entry points of A are XY.
16244
+ * Starting from that idea, we can determine a set of chunks—and thus a set
16245
+ * of side effects—that must have been triggered if a certain chunk has been
16246
+ * loaded. Basically, it is the intersection of all chunks loaded by the
16247
+ * dependent entry points of a given chunk. We call the corresponding side
16248
+ * effects the correlated side effects of that chunk.
16249
+ *
16250
+ * Example:
16251
+ * X -> ABC, Y -> ADE, A-> F, B -> D
16252
+ * Then taking dependencies into account, X -> ABCDF, Y -> ADEF
16253
+ * The intersection is ADF. So we know that when A is loaded, D and F must also
16254
+ * be in memory even though neither D nor A is a dependency of the other.
16255
+ * If all have side effects, we call ADF the correlated side effects of A. The
16256
+ * correlated side effects need to remain constant when merging chunks.
16257
+ *
16258
+ * In contrast, we have the dependency side effects of A, which represents
16259
+ * the side effects we trigger if we directly load A. In this example, the
16260
+ * dependency side effects are AF.
16261
+ * For entry chunks, dependency and correlated side effects are the same.
16262
+ *
16263
+ * With these concepts, merging chunks is allowed if the correlated side effects
16264
+ * of each entry do not change. Thus, we are allowed to merge two chunks if
16265
+ * a) the dependency side effects of each chunk are a subset of the correlated
16266
+ * side effects of the other chunk, so no additional side effects are
16267
+ * triggered for any entry, or
16268
+ * b) The signature of chunk A is a subset of the signature of chunk B while the
16269
+ * dependency side effects of A are a subset of the correlated side effects
16270
+ * of B. Because in that scenario, whenever A is loaded, B is loaded as well.
16271
+ * But there are cases when B is loaded where A is not loaded. So if we merge
16272
+ * the chunks, all dependency side effects of A will be added to the
16273
+ * correlated side effects of B, and as the latter is not allowed to change,
16274
+ * the former need to be a subset of the latter.
16275
+ *
16276
+ * Another consideration when merging small chunks into other chunks is to avoid
16277
+ * that too much additional code is loaded. This is achieved when the dependent
16278
+ * entries of the small chunk are a subset of the dependent entries of the other
16279
+ * chunk. Because then when the small chunk is loaded, the other chunk was
16280
+ * loaded/in memory anyway, so at most when the other chunk is loaded, the
16281
+ * additional size of the small chunk is loaded unnecessarily.
16282
+ *
16283
+ * So the algorithm performs merges in two passes:
16284
+ * 1. First we try to merge small chunks A only into other chunks B if the
16285
+ * dependent entries of A are a subset of the dependent entries of B and the
16286
+ * dependency side effects of A are a subset of the correlated side effects
16287
+ * of B.
16288
+ * 2. Only then for all remaining small chunks, we look for arbitrary merges
16289
+ * following the above rules (a) and (b), starting with the smallest chunks
16290
+ * to look for possible merge targets.
16229
16291
  */
16230
- function getOptimizedChunks(chunkModulesBySignature, minChunkSize) {
16292
+ function getOptimizedChunks(chunkModulesBySignature, numberOfEntries, minChunkSize) {
16231
16293
  timeStart('optimize chunks', 3);
16232
- const chunkPartition = getPartitionedChunks(chunkModulesBySignature, minChunkSize);
16233
- if (chunkPartition.small.sideEffect.size > 0) {
16234
- mergeChunks(chunkPartition.small.sideEffect, [chunkPartition.small.pure, chunkPartition.big.pure], minChunkSize, chunkPartition);
16235
- }
16236
- if (chunkPartition.small.pure.size > 0) {
16237
- mergeChunks(chunkPartition.small.pure, [chunkPartition.small.pure, chunkPartition.big.sideEffect, chunkPartition.big.pure], minChunkSize, chunkPartition);
16294
+ const chunkPartition = getPartitionedChunks(chunkModulesBySignature, numberOfEntries, minChunkSize);
16295
+ if (chunkPartition.small.size > 0) {
16296
+ mergeChunks(chunkPartition, minChunkSize);
16238
16297
  }
16239
16298
  timeEnd('optimize chunks', 3);
16240
- return [
16241
- ...chunkPartition.small.sideEffect,
16242
- ...chunkPartition.small.pure,
16243
- ...chunkPartition.big.sideEffect,
16244
- ...chunkPartition.big.pure
16245
- ];
16299
+ return [...chunkPartition.small, ...chunkPartition.big];
16246
16300
  }
16247
16301
  const CHAR_DEPENDENT = 'X';
16248
16302
  const CHAR_INDEPENDENT = '_';
16249
- const CHAR_CODE_DEPENDENT = CHAR_DEPENDENT.charCodeAt(0);
16250
- function getPartitionedChunks(chunkModulesBySignature, minChunkSize) {
16251
- const smallPureChunks = [];
16252
- const bigPureChunks = [];
16253
- const smallSideEffectChunks = [];
16254
- const bigSideEffectChunks = [];
16303
+ function getPartitionedChunks(chunkModulesBySignature, numberOfEntries, minChunkSize) {
16304
+ const smallChunks = [];
16305
+ const bigChunks = [];
16255
16306
  const chunkByModule = new Map();
16307
+ const sideEffectsByEntry = [];
16308
+ for (let index = 0; index < numberOfEntries; index++) {
16309
+ sideEffectsByEntry.push(new Set());
16310
+ }
16256
16311
  for (const [signature, modules] of Object.entries(chunkModulesBySignature)) {
16312
+ const dependentEntries = new Set();
16313
+ for (let position = 0; position < numberOfEntries; position++) {
16314
+ if (signature[position] === CHAR_DEPENDENT) {
16315
+ dependentEntries.add(position);
16316
+ }
16317
+ }
16257
16318
  const chunkDescription = {
16319
+ correlatedSideEffects: new Set(),
16258
16320
  dependencies: new Set(),
16259
16321
  dependentChunks: new Set(),
16322
+ dependentEntries,
16260
16323
  modules,
16261
16324
  pure: true,
16262
- signature,
16325
+ sideEffects: new Set(),
16263
16326
  size: 0
16264
16327
  };
16265
16328
  let size = 0;
@@ -16273,25 +16336,27 @@ function getPartitionedChunks(chunkModulesBySignature, minChunkSize) {
16273
16336
  }
16274
16337
  chunkDescription.pure = pure;
16275
16338
  chunkDescription.size = size;
16276
- (size < minChunkSize
16277
- ? pure
16278
- ? smallPureChunks
16279
- : smallSideEffectChunks
16280
- : pure
16281
- ? bigPureChunks
16282
- : bigSideEffectChunks).push(chunkDescription);
16283
- }
16284
- sortChunksAndAddDependencies([bigPureChunks, bigSideEffectChunks, smallPureChunks, smallSideEffectChunks], chunkByModule);
16339
+ if (!pure) {
16340
+ for (const entryIndex of dependentEntries) {
16341
+ sideEffectsByEntry[entryIndex].add(signature);
16342
+ }
16343
+ // In the beginning, each chunk is only its own side effect. After
16344
+ // merging, additional side effects can accumulate.
16345
+ chunkDescription.sideEffects.add(signature);
16346
+ }
16347
+ (size < minChunkSize ? smallChunks : bigChunks).push(chunkDescription);
16348
+ }
16349
+ sortChunksAndAddDependenciesAndEffects([bigChunks, smallChunks], chunkByModule, sideEffectsByEntry);
16285
16350
  return {
16286
- big: { pure: new Set(bigPureChunks), sideEffect: new Set(bigSideEffectChunks) },
16287
- small: { pure: new Set(smallPureChunks), sideEffect: new Set(smallSideEffectChunks) }
16351
+ big: new Set(bigChunks),
16352
+ small: new Set(smallChunks)
16288
16353
  };
16289
16354
  }
16290
- function sortChunksAndAddDependencies(chunkLists, chunkByModule) {
16355
+ function sortChunksAndAddDependenciesAndEffects(chunkLists, chunkByModule, sideEffectsByEntry) {
16291
16356
  for (const chunks of chunkLists) {
16292
- chunks.sort(compareChunks);
16357
+ chunks.sort(compareChunkSize);
16293
16358
  for (const chunk of chunks) {
16294
- const { dependencies, modules } = chunk;
16359
+ const { dependencies, modules, correlatedSideEffects, dependentEntries } = chunk;
16295
16360
  for (const module of modules) {
16296
16361
  for (const dependency of module.getDependenciesToBeIncluded()) {
16297
16362
  const dependencyChunk = chunkByModule.get(dependency);
@@ -16301,89 +16366,133 @@ function sortChunksAndAddDependencies(chunkLists, chunkByModule) {
16301
16366
  }
16302
16367
  }
16303
16368
  }
16369
+ let firstEntry = true;
16370
+ // Correlated side effects is the intersection of all entry side effects
16371
+ for (const entryIndex of dependentEntries) {
16372
+ const entryEffects = sideEffectsByEntry[entryIndex];
16373
+ if (firstEntry) {
16374
+ for (const sideEffect of entryEffects) {
16375
+ correlatedSideEffects.add(sideEffect);
16376
+ }
16377
+ firstEntry = false;
16378
+ }
16379
+ else {
16380
+ for (const sideEffect of correlatedSideEffects) {
16381
+ if (!entryEffects.has(sideEffect)) {
16382
+ correlatedSideEffects.delete(sideEffect);
16383
+ }
16384
+ }
16385
+ }
16386
+ }
16304
16387
  }
16305
16388
  }
16306
16389
  }
16307
- function compareChunks({ size: sizeA }, { size: sizeB }) {
16390
+ function compareChunkSize({ size: sizeA }, { size: sizeB }) {
16308
16391
  return sizeA - sizeB;
16309
16392
  }
16310
- function mergeChunks(chunksToBeMerged, targetChunks, minChunkSize, chunkPartition) {
16311
- for (const mergedChunk of chunksToBeMerged) {
16312
- let closestChunk = null;
16313
- let closestChunkDistance = Infinity;
16314
- const { signature, modules, pure, size } = mergedChunk;
16315
- for (const targetChunk of concatLazy(targetChunks)) {
16316
- if (mergedChunk === targetChunk)
16317
- continue;
16318
- // Possible improvement:
16319
- // For dynamic entries depending on a pure chunk, it is safe to merge that
16320
- // chunk into the chunk doing the dynamic import (i.e. into an "already
16321
- // loaded chunk") even if it is not pure.
16322
- // One way of handling this could be to add all "already loaded entries"
16323
- // of the dynamic importers into the signature as well. That could also
16324
- // change the way we do code-splitting for already loaded entries.
16325
- const distance = pure
16326
- ? getSignatureDistance(signature, targetChunk.signature, !targetChunk.pure)
16327
- : getSignatureDistance(targetChunk.signature, signature, true);
16328
- if (distance < closestChunkDistance && isValidMerge(mergedChunk, targetChunk)) {
16329
- if (distance === 1) {
16393
+ function mergeChunks(chunkPartition, minChunkSize) {
16394
+ console.log('---- Initial chunks');
16395
+ printConsistencyCheck(chunkPartition);
16396
+ for (const allowArbitraryMerges of [false, true]) {
16397
+ for (const mergedChunk of chunkPartition.small) {
16398
+ let closestChunk = null;
16399
+ let closestChunkDistance = Infinity;
16400
+ const { modules, pure, size } = mergedChunk;
16401
+ for (const targetChunk of concatLazy([chunkPartition.small, chunkPartition.big])) {
16402
+ if (mergedChunk === targetChunk)
16403
+ continue;
16404
+ // If both chunks are small, we also allow for unrelated merges during
16405
+ // the first pass
16406
+ const onlySubsetMerge = !allowArbitraryMerges && targetChunk.size >= minChunkSize;
16407
+ const distance = getChunkEntryDistance(mergedChunk, targetChunk, onlySubsetMerge);
16408
+ if (distance < closestChunkDistance &&
16409
+ isValidMerge(mergedChunk, targetChunk, onlySubsetMerge)) {
16330
16410
  closestChunk = targetChunk;
16331
- break;
16411
+ closestChunkDistance = distance;
16332
16412
  }
16333
- closestChunk = targetChunk;
16334
- closestChunkDistance = distance;
16335
- }
16336
- }
16337
- if (closestChunk) {
16338
- chunksToBeMerged.delete(mergedChunk);
16339
- getChunksInPartition(closestChunk, minChunkSize, chunkPartition).delete(closestChunk);
16340
- closestChunk.modules.push(...modules);
16341
- closestChunk.size += size;
16342
- closestChunk.pure && (closestChunk.pure = pure);
16343
- closestChunk.signature = mergeSignatures(signature, closestChunk.signature);
16344
- const { dependencies, dependentChunks } = closestChunk;
16345
- for (const dependency of mergedChunk.dependencies) {
16346
- dependencies.add(dependency);
16347
16413
  }
16348
- for (const dependentChunk of mergedChunk.dependentChunks) {
16349
- dependentChunks.add(dependentChunk);
16350
- dependentChunk.dependencies.delete(mergedChunk);
16351
- dependentChunk.dependencies.add(closestChunk);
16414
+ if (closestChunk) {
16415
+ chunkPartition.small.delete(mergedChunk);
16416
+ getChunksInPartition(closestChunk, minChunkSize, chunkPartition).delete(closestChunk);
16417
+ closestChunk.modules.push(...modules);
16418
+ closestChunk.size += size;
16419
+ closestChunk.pure && (closestChunk.pure = pure);
16420
+ const { correlatedSideEffects, dependencies, dependentChunks, dependentEntries, sideEffects } = closestChunk;
16421
+ for (const sideEffect of correlatedSideEffects) {
16422
+ if (!mergedChunk.correlatedSideEffects.has(sideEffect)) {
16423
+ correlatedSideEffects.delete(sideEffect);
16424
+ }
16425
+ }
16426
+ for (const entry of mergedChunk.dependentEntries) {
16427
+ dependentEntries.add(entry);
16428
+ }
16429
+ for (const sideEffect of mergedChunk.sideEffects) {
16430
+ sideEffects.add(sideEffect);
16431
+ }
16432
+ for (const dependency of mergedChunk.dependencies) {
16433
+ dependencies.add(dependency);
16434
+ dependency.dependentChunks.delete(mergedChunk);
16435
+ dependency.dependentChunks.add(closestChunk);
16436
+ }
16437
+ for (const dependentChunk of mergedChunk.dependentChunks) {
16438
+ dependentChunks.add(dependentChunk);
16439
+ dependentChunk.dependencies.delete(mergedChunk);
16440
+ dependentChunk.dependencies.add(closestChunk);
16441
+ }
16442
+ dependencies.delete(closestChunk);
16443
+ getChunksInPartition(closestChunk, minChunkSize, chunkPartition).add(closestChunk);
16352
16444
  }
16353
- dependencies.delete(closestChunk);
16354
- getChunksInPartition(closestChunk, minChunkSize, chunkPartition).add(closestChunk);
16355
16445
  }
16446
+ console.log('---- After run with arbitrary merges:', allowArbitraryMerges);
16447
+ printConsistencyCheck(chunkPartition);
16356
16448
  }
16357
16449
  }
16358
16450
  // Merging will not produce cycles if none of the direct non-merged dependencies
16359
16451
  // of a chunk have the other chunk as a transitive dependency
16360
- function isValidMerge(mergedChunk, targetChunk) {
16361
- return !(hasTransitiveDependency(mergedChunk, targetChunk) ||
16362
- hasTransitiveDependency(targetChunk, mergedChunk));
16363
- }
16364
- function hasTransitiveDependency(dependentChunk, dependencyChunk) {
16452
+ function isValidMerge(mergedChunk, targetChunk, onlySubsetMerge) {
16453
+ return !(hasTransitiveDependencyOrNonCorrelatedSideEffect(mergedChunk, targetChunk, true) ||
16454
+ hasTransitiveDependencyOrNonCorrelatedSideEffect(targetChunk, mergedChunk, !onlySubsetMerge));
16455
+ }
16456
+ function hasTransitiveDependencyOrNonCorrelatedSideEffect(dependentChunk, dependencyChunk, checkSideEffects) {
16457
+ const { correlatedSideEffects } = dependencyChunk;
16458
+ if (checkSideEffects) {
16459
+ for (const sideEffect of dependentChunk.sideEffects) {
16460
+ if (!correlatedSideEffects.has(sideEffect)) {
16461
+ return true;
16462
+ }
16463
+ }
16464
+ }
16365
16465
  const chunksToCheck = new Set(dependentChunk.dependencies);
16366
- for (const { dependencies } of chunksToCheck) {
16466
+ for (const { dependencies, sideEffects } of chunksToCheck) {
16367
16467
  for (const dependency of dependencies) {
16368
16468
  if (dependency === dependencyChunk) {
16369
16469
  return true;
16370
16470
  }
16371
16471
  chunksToCheck.add(dependency);
16372
16472
  }
16473
+ if (checkSideEffects) {
16474
+ for (const sideEffect of sideEffects) {
16475
+ if (!correlatedSideEffects.has(sideEffect)) {
16476
+ return true;
16477
+ }
16478
+ }
16479
+ }
16373
16480
  }
16374
16481
  return false;
16375
16482
  }
16376
16483
  function getChunksInPartition(chunk, minChunkSize, chunkPartition) {
16377
- const subPartition = chunk.size < minChunkSize ? chunkPartition.small : chunkPartition.big;
16378
- return chunk.pure ? subPartition.pure : subPartition.sideEffect;
16484
+ return chunk.size < minChunkSize ? chunkPartition.small : chunkPartition.big;
16379
16485
  }
16380
- function getSignatureDistance(sourceSignature, targetSignature, enforceSubset) {
16486
+ function getChunkEntryDistance({ dependentEntries: sourceEntries }, { dependentEntries: targetEntries }, enforceSubest) {
16381
16487
  let distance = 0;
16382
- const { length } = sourceSignature;
16383
- for (let index = 0; index < length; index++) {
16384
- const sourceValue = sourceSignature.charCodeAt(index);
16385
- if (sourceValue !== targetSignature.charCodeAt(index)) {
16386
- if (enforceSubset && sourceValue === CHAR_CODE_DEPENDENT) {
16488
+ for (const entryIndex of targetEntries) {
16489
+ if (!sourceEntries.has(entryIndex)) {
16490
+ distance++;
16491
+ }
16492
+ }
16493
+ for (const entryIndex of sourceEntries) {
16494
+ if (!targetEntries.has(entryIndex)) {
16495
+ if (enforceSubest) {
16387
16496
  return Infinity;
16388
16497
  }
16389
16498
  distance++;
@@ -16391,17 +16500,57 @@ function getSignatureDistance(sourceSignature, targetSignature, enforceSubset) {
16391
16500
  }
16392
16501
  return distance;
16393
16502
  }
16394
- function mergeSignatures(sourceSignature, targetSignature) {
16395
- let signature = '';
16396
- const { length } = sourceSignature;
16397
- for (let index = 0; index < length; index++) {
16398
- signature +=
16399
- sourceSignature.charCodeAt(index) === CHAR_CODE_DEPENDENT ||
16400
- targetSignature.charCodeAt(index) === CHAR_CODE_DEPENDENT
16401
- ? CHAR_DEPENDENT
16402
- : CHAR_INDEPENDENT;
16403
- }
16404
- return signature;
16503
+ function printConsistencyCheck(partition) {
16504
+ console.log(`Chunks\n small: ${partition.small.size},\n large: ${partition.big.size}`);
16505
+ const chunks = new Set([...partition.big, ...partition.small]);
16506
+ console.log('Number of cycles:', getNumberOfCycles(chunks));
16507
+ let missingDependencies = 0;
16508
+ let missingDependentChunks = 0;
16509
+ const seenModules = new Set();
16510
+ for (const { modules, dependencies, dependentChunks } of chunks) {
16511
+ for (const module of modules) {
16512
+ if (seenModules.has(module)) {
16513
+ console.log(`Module ${module.id} is duplicated between chunks.`);
16514
+ }
16515
+ seenModules.add(module);
16516
+ }
16517
+ for (const dependency of dependencies) {
16518
+ if (!chunks.has(dependency)) {
16519
+ missingDependencies++;
16520
+ }
16521
+ }
16522
+ for (const dependency of dependentChunks) {
16523
+ if (!chunks.has(dependency)) {
16524
+ missingDependentChunks++;
16525
+ }
16526
+ }
16527
+ }
16528
+ console.log(`Missing\n dependencies: ${missingDependencies},\n dependent chunks: ${missingDependentChunks}\n`);
16529
+ }
16530
+ function getNumberOfCycles(chunks) {
16531
+ const parents = new Set();
16532
+ const analysedChunks = new Set();
16533
+ let cycles = 0;
16534
+ const analyseChunk = (chunk) => {
16535
+ for (const dependency of chunk.dependencies) {
16536
+ if (parents.has(dependency)) {
16537
+ if (!analysedChunks.has(dependency)) {
16538
+ cycles++;
16539
+ }
16540
+ continue;
16541
+ }
16542
+ parents.add(dependency);
16543
+ analyseChunk(dependency);
16544
+ }
16545
+ analysedChunks.add(chunk);
16546
+ };
16547
+ for (const chunk of chunks) {
16548
+ if (!parents.has(chunk)) {
16549
+ parents.add(chunk);
16550
+ analyseChunk(chunk);
16551
+ }
16552
+ }
16553
+ return cycles;
16405
16554
  }
16406
16555
 
16407
16556
  // ported from https://github.com/substack/node-commondir
@@ -1,7 +1,7 @@
1
1
  /*
2
2
  @license
3
- Rollup.js v3.15.0
4
- Fri, 10 Feb 2023 05:20:05 GMT - commit 5d81532f688383a8aeaf6a099da2b0205e8b8609
3
+ Rollup.js v3.15.1-1
4
+ Tue, 14 Feb 2023 09:30:15 GMT - commit 5e91b2d7a9f833cf9e11d98c96c4cba3fac195a4
5
5
 
6
6
  https://github.com/rollup/rollup
7
7
 
@@ -1,7 +1,7 @@
1
1
  /*
2
2
  @license
3
- Rollup.js v3.15.0
4
- Fri, 10 Feb 2023 05:20:05 GMT - commit 5d81532f688383a8aeaf6a099da2b0205e8b8609
3
+ Rollup.js v3.15.1-1
4
+ Tue, 14 Feb 2023 09:30:15 GMT - commit 5e91b2d7a9f833cf9e11d98c96c4cba3fac195a4
5
5
 
6
6
  https://github.com/rollup/rollup
7
7
 
package/dist/rollup.d.ts CHANGED
@@ -52,7 +52,7 @@ export interface ExistingDecodedSourceMap {
52
52
  names: string[];
53
53
  sourceRoot?: string;
54
54
  sources: string[];
55
- sourcesContent?: string[];
55
+ sourcesContent?: (string | null)[];
56
56
  version: number;
57
57
  }
58
58
 
@@ -62,7 +62,7 @@ export interface ExistingRawSourceMap {
62
62
  names: string[];
63
63
  sourceRoot?: string;
64
64
  sources: string[];
65
- sourcesContent?: string[];
65
+ sourcesContent?: (string | null)[];
66
66
  version: number;
67
67
  }
68
68
 
@@ -79,7 +79,7 @@ export interface SourceMap {
79
79
  mappings: string;
80
80
  names: string[];
81
81
  sources: string[];
82
- sourcesContent: string[];
82
+ sourcesContent: (string | null)[];
83
83
  version: number;
84
84
  toString(): string;
85
85
  toUrl(): string;
package/dist/rollup.js CHANGED
@@ -1,7 +1,7 @@
1
1
  /*
2
2
  @license
3
- Rollup.js v3.15.0
4
- Fri, 10 Feb 2023 05:20:05 GMT - commit 5d81532f688383a8aeaf6a099da2b0205e8b8609
3
+ Rollup.js v3.15.1-1
4
+ Tue, 14 Feb 2023 09:30:15 GMT - commit 5e91b2d7a9f833cf9e11d98c96c4cba3fac195a4
5
5
 
6
6
  https://github.com/rollup/rollup
7
7
 
@@ -1,7 +1,7 @@
1
1
  /*
2
2
  @license
3
- Rollup.js v3.15.0
4
- Fri, 10 Feb 2023 05:20:05 GMT - commit 5d81532f688383a8aeaf6a099da2b0205e8b8609
3
+ Rollup.js v3.15.1-1
4
+ Tue, 14 Feb 2023 09:30:15 GMT - commit 5e91b2d7a9f833cf9e11d98c96c4cba3fac195a4
5
5
 
6
6
  https://github.com/rollup/rollup
7
7
 
@@ -1,7 +1,7 @@
1
1
  /*
2
2
  @license
3
- Rollup.js v3.15.0
4
- Fri, 10 Feb 2023 05:20:05 GMT - commit 5d81532f688383a8aeaf6a099da2b0205e8b8609
3
+ Rollup.js v3.15.1-1
4
+ Tue, 14 Feb 2023 09:30:15 GMT - commit 5e91b2d7a9f833cf9e11d98c96c4cba3fac195a4
5
5
 
6
6
  https://github.com/rollup/rollup
7
7
 
@@ -1,7 +1,7 @@
1
1
  /*
2
2
  @license
3
- Rollup.js v3.15.0
4
- Fri, 10 Feb 2023 05:20:05 GMT - commit 5d81532f688383a8aeaf6a099da2b0205e8b8609
3
+ Rollup.js v3.15.1-1
4
+ Tue, 14 Feb 2023 09:30:15 GMT - commit 5e91b2d7a9f833cf9e11d98c96c4cba3fac195a4
5
5
 
6
6
  https://github.com/rollup/rollup
7
7
 
@@ -31,7 +31,7 @@ function _interopNamespaceDefault(e) {
31
31
 
32
32
  const tty__namespace = /*#__PURE__*/_interopNamespaceDefault(tty);
33
33
 
34
- var version$1 = "3.15.0";
34
+ var version$1 = "3.15.1-1";
35
35
 
36
36
  function ensureArray$1(items) {
37
37
  if (Array.isArray(items)) {
@@ -1641,6 +1641,9 @@ class SourceMap {
1641
1641
  this.sourcesContent = properties.sourcesContent;
1642
1642
  this.names = properties.names;
1643
1643
  this.mappings = encode(properties.mappings);
1644
+ if (typeof properties.x_google_ignoreList !== 'undefined') {
1645
+ this.x_google_ignoreList = properties.x_google_ignoreList;
1646
+ }
1644
1647
  }
1645
1648
 
1646
1649
  toString() {
@@ -16638,7 +16641,7 @@ function analyzeModuleGraph(entries) {
16638
16641
  }
16639
16642
  }
16640
16643
  return {
16641
- allEntries,
16644
+ allEntries: [...allEntries],
16642
16645
  dependentEntriesByModule,
16643
16646
  dynamicallyDependentEntriesByDynamicEntry: getDynamicallyDependentEntriesByDynamicEntry(dependentEntriesByModule, dynamicEntries)
16644
16647
  };
@@ -16712,7 +16715,7 @@ function createChunks(allEntries, assignedEntriesByModule, minChunkSize) {
16712
16715
  alias: null,
16713
16716
  modules
16714
16717
  }))
16715
- : getOptimizedChunks(chunkModulesBySignature, minChunkSize).map(({ modules }) => ({
16718
+ : getOptimizedChunks(chunkModulesBySignature, allEntries.length, minChunkSize).map(({ modules }) => ({
16716
16719
  alias: null,
16717
16720
  modules
16718
16721
  }));
@@ -16736,49 +16739,109 @@ function getChunkModulesBySignature(assignedEntriesByModule, allEntries) {
16736
16739
  }
16737
16740
  /**
16738
16741
  * This function tries to get rid of small chunks by merging them with other
16739
- * chunks. In order to merge chunks, one must obey the following rule:
16740
- * - When merging several chunks, at most one of the chunks can have side
16741
- * effects
16742
- * - When one of the chunks has side effects, the entry points depending on that
16743
- * chunk need to be a super set of the entry points depending on the other
16744
- * chunks
16745
- * - Pure chunks can always be merged
16746
- * - We use the entry point dependence signature to calculate "chunk distance",
16747
- * i.e. how likely it is that two chunks are loaded together
16742
+ * chunks.
16743
+ *
16744
+ * We can only merge chunks safely if after the merge, loading any entry point
16745
+ * in any allowed order will not trigger side effects that should not have been
16746
+ * triggered. While side effects are usually things like global function calls,
16747
+ * global variable mutations or potentially thrown errors, details do not
16748
+ * matter here, and we just discern chunks without side effects (pure chunks)
16749
+ * from other chunks.
16750
+ *
16751
+ * As a first step, we assign each pre-generated chunk with side effects a
16752
+ * label. I.e. we have side effect "A" if the non-pure chunk "A" is loaded.
16753
+ *
16754
+ * Now to determine the side effects of loading a chunk, one also has to take
16755
+ * the side effects of its dependencies into account. So if A depends on B
16756
+ * (A -> B) and both have side effects, loading A triggers effects AB.
16757
+ *
16758
+ * Now from the previous step we know that each chunk is uniquely determine by
16759
+ * the entry points that depend on it and cause it to load, which we will call
16760
+ * its dependent entry points.
16761
+ *
16762
+ * E.g. if X -> A and Y -> A, then the dependent entry points of A are XY.
16763
+ * Starting from that idea, we can determine a set of chunks—and thus a set
16764
+ * of side effects—that must have been triggered if a certain chunk has been
16765
+ * loaded. Basically, it is the intersection of all chunks loaded by the
16766
+ * dependent entry points of a given chunk. We call the corresponding side
16767
+ * effects the correlated side effects of that chunk.
16768
+ *
16769
+ * Example:
16770
+ * X -> ABC, Y -> ADE, A-> F, B -> D
16771
+ * Then taking dependencies into account, X -> ABCDF, Y -> ADEF
16772
+ * The intersection is ADF. So we know that when A is loaded, D and F must also
16773
+ * be in memory even though neither D nor A is a dependency of the other.
16774
+ * If all have side effects, we call ADF the correlated side effects of A. The
16775
+ * correlated side effects need to remain constant when merging chunks.
16776
+ *
16777
+ * In contrast, we have the dependency side effects of A, which represents
16778
+ * the side effects we trigger if we directly load A. In this example, the
16779
+ * dependency side effects are AF.
16780
+ * For entry chunks, dependency and correlated side effects are the same.
16781
+ *
16782
+ * With these concepts, merging chunks is allowed if the correlated side effects
16783
+ * of each entry do not change. Thus, we are allowed to merge two chunks if
16784
+ * a) the dependency side effects of each chunk are a subset of the correlated
16785
+ * side effects of the other chunk, so no additional side effects are
16786
+ * triggered for any entry, or
16787
+ * b) The signature of chunk A is a subset of the signature of chunk B while the
16788
+ * dependency side effects of A are a subset of the correlated side effects
16789
+ * of B. Because in that scenario, whenever A is loaded, B is loaded as well.
16790
+ * But there are cases when B is loaded where A is not loaded. So if we merge
16791
+ * the chunks, all dependency side effects of A will be added to the
16792
+ * correlated side effects of B, and as the latter is not allowed to change,
16793
+ * the former need to be a subset of the latter.
16794
+ *
16795
+ * Another consideration when merging small chunks into other chunks is to avoid
16796
+ * that too much additional code is loaded. This is achieved when the dependent
16797
+ * entries of the small chunk are a subset of the dependent entries of the other
16798
+ * chunk. Because then when the small chunk is loaded, the other chunk was
16799
+ * loaded/in memory anyway, so at most when the other chunk is loaded, the
16800
+ * additional size of the small chunk is loaded unnecessarily.
16801
+ *
16802
+ * So the algorithm performs merges in two passes:
16803
+ * 1. First we try to merge small chunks A only into other chunks B if the
16804
+ * dependent entries of A are a subset of the dependent entries of B and the
16805
+ * dependency side effects of A are a subset of the correlated side effects
16806
+ * of B.
16807
+ * 2. Only then for all remaining small chunks, we look for arbitrary merges
16808
+ * following the above rules (a) and (b), starting with the smallest chunks
16809
+ * to look for possible merge targets.
16748
16810
  */
16749
- function getOptimizedChunks(chunkModulesBySignature, minChunkSize) {
16811
+ function getOptimizedChunks(chunkModulesBySignature, numberOfEntries, minChunkSize) {
16750
16812
  timeStart('optimize chunks', 3);
16751
- const chunkPartition = getPartitionedChunks(chunkModulesBySignature, minChunkSize);
16752
- if (chunkPartition.small.sideEffect.size > 0) {
16753
- mergeChunks(chunkPartition.small.sideEffect, [chunkPartition.small.pure, chunkPartition.big.pure], minChunkSize, chunkPartition);
16754
- }
16755
- if (chunkPartition.small.pure.size > 0) {
16756
- mergeChunks(chunkPartition.small.pure, [chunkPartition.small.pure, chunkPartition.big.sideEffect, chunkPartition.big.pure], minChunkSize, chunkPartition);
16813
+ const chunkPartition = getPartitionedChunks(chunkModulesBySignature, numberOfEntries, minChunkSize);
16814
+ if (chunkPartition.small.size > 0) {
16815
+ mergeChunks(chunkPartition, minChunkSize);
16757
16816
  }
16758
16817
  timeEnd('optimize chunks', 3);
16759
- return [
16760
- ...chunkPartition.small.sideEffect,
16761
- ...chunkPartition.small.pure,
16762
- ...chunkPartition.big.sideEffect,
16763
- ...chunkPartition.big.pure
16764
- ];
16818
+ return [...chunkPartition.small, ...chunkPartition.big];
16765
16819
  }
16766
16820
  const CHAR_DEPENDENT = 'X';
16767
16821
  const CHAR_INDEPENDENT = '_';
16768
- const CHAR_CODE_DEPENDENT = CHAR_DEPENDENT.charCodeAt(0);
16769
- function getPartitionedChunks(chunkModulesBySignature, minChunkSize) {
16770
- const smallPureChunks = [];
16771
- const bigPureChunks = [];
16772
- const smallSideEffectChunks = [];
16773
- const bigSideEffectChunks = [];
16822
+ function getPartitionedChunks(chunkModulesBySignature, numberOfEntries, minChunkSize) {
16823
+ const smallChunks = [];
16824
+ const bigChunks = [];
16774
16825
  const chunkByModule = new Map();
16826
+ const sideEffectsByEntry = [];
16827
+ for (let index = 0; index < numberOfEntries; index++) {
16828
+ sideEffectsByEntry.push(new Set());
16829
+ }
16775
16830
  for (const [signature, modules] of Object.entries(chunkModulesBySignature)) {
16831
+ const dependentEntries = new Set();
16832
+ for (let position = 0; position < numberOfEntries; position++) {
16833
+ if (signature[position] === CHAR_DEPENDENT) {
16834
+ dependentEntries.add(position);
16835
+ }
16836
+ }
16776
16837
  const chunkDescription = {
16838
+ correlatedSideEffects: new Set(),
16777
16839
  dependencies: new Set(),
16778
16840
  dependentChunks: new Set(),
16841
+ dependentEntries,
16779
16842
  modules,
16780
16843
  pure: true,
16781
- signature,
16844
+ sideEffects: new Set(),
16782
16845
  size: 0
16783
16846
  };
16784
16847
  let size = 0;
@@ -16792,25 +16855,27 @@ function getPartitionedChunks(chunkModulesBySignature, minChunkSize) {
16792
16855
  }
16793
16856
  chunkDescription.pure = pure;
16794
16857
  chunkDescription.size = size;
16795
- (size < minChunkSize
16796
- ? pure
16797
- ? smallPureChunks
16798
- : smallSideEffectChunks
16799
- : pure
16800
- ? bigPureChunks
16801
- : bigSideEffectChunks).push(chunkDescription);
16802
- }
16803
- sortChunksAndAddDependencies([bigPureChunks, bigSideEffectChunks, smallPureChunks, smallSideEffectChunks], chunkByModule);
16858
+ if (!pure) {
16859
+ for (const entryIndex of dependentEntries) {
16860
+ sideEffectsByEntry[entryIndex].add(signature);
16861
+ }
16862
+ // In the beginning, each chunk is only its own side effect. After
16863
+ // merging, additional side effects can accumulate.
16864
+ chunkDescription.sideEffects.add(signature);
16865
+ }
16866
+ (size < minChunkSize ? smallChunks : bigChunks).push(chunkDescription);
16867
+ }
16868
+ sortChunksAndAddDependenciesAndEffects([bigChunks, smallChunks], chunkByModule, sideEffectsByEntry);
16804
16869
  return {
16805
- big: { pure: new Set(bigPureChunks), sideEffect: new Set(bigSideEffectChunks) },
16806
- small: { pure: new Set(smallPureChunks), sideEffect: new Set(smallSideEffectChunks) }
16870
+ big: new Set(bigChunks),
16871
+ small: new Set(smallChunks)
16807
16872
  };
16808
16873
  }
16809
- function sortChunksAndAddDependencies(chunkLists, chunkByModule) {
16874
+ function sortChunksAndAddDependenciesAndEffects(chunkLists, chunkByModule, sideEffectsByEntry) {
16810
16875
  for (const chunks of chunkLists) {
16811
- chunks.sort(compareChunks);
16876
+ chunks.sort(compareChunkSize);
16812
16877
  for (const chunk of chunks) {
16813
- const { dependencies, modules } = chunk;
16878
+ const { dependencies, modules, correlatedSideEffects, dependentEntries } = chunk;
16814
16879
  for (const module of modules) {
16815
16880
  for (const dependency of module.getDependenciesToBeIncluded()) {
16816
16881
  const dependencyChunk = chunkByModule.get(dependency);
@@ -16820,89 +16885,133 @@ function sortChunksAndAddDependencies(chunkLists, chunkByModule) {
16820
16885
  }
16821
16886
  }
16822
16887
  }
16888
+ let firstEntry = true;
16889
+ // Correlated side effects is the intersection of all entry side effects
16890
+ for (const entryIndex of dependentEntries) {
16891
+ const entryEffects = sideEffectsByEntry[entryIndex];
16892
+ if (firstEntry) {
16893
+ for (const sideEffect of entryEffects) {
16894
+ correlatedSideEffects.add(sideEffect);
16895
+ }
16896
+ firstEntry = false;
16897
+ }
16898
+ else {
16899
+ for (const sideEffect of correlatedSideEffects) {
16900
+ if (!entryEffects.has(sideEffect)) {
16901
+ correlatedSideEffects.delete(sideEffect);
16902
+ }
16903
+ }
16904
+ }
16905
+ }
16823
16906
  }
16824
16907
  }
16825
16908
  }
16826
- function compareChunks({ size: sizeA }, { size: sizeB }) {
16909
+ function compareChunkSize({ size: sizeA }, { size: sizeB }) {
16827
16910
  return sizeA - sizeB;
16828
16911
  }
16829
- function mergeChunks(chunksToBeMerged, targetChunks, minChunkSize, chunkPartition) {
16830
- for (const mergedChunk of chunksToBeMerged) {
16831
- let closestChunk = null;
16832
- let closestChunkDistance = Infinity;
16833
- const { signature, modules, pure, size } = mergedChunk;
16834
- for (const targetChunk of concatLazy(targetChunks)) {
16835
- if (mergedChunk === targetChunk)
16836
- continue;
16837
- // Possible improvement:
16838
- // For dynamic entries depending on a pure chunk, it is safe to merge that
16839
- // chunk into the chunk doing the dynamic import (i.e. into an "already
16840
- // loaded chunk") even if it is not pure.
16841
- // One way of handling this could be to add all "already loaded entries"
16842
- // of the dynamic importers into the signature as well. That could also
16843
- // change the way we do code-splitting for already loaded entries.
16844
- const distance = pure
16845
- ? getSignatureDistance(signature, targetChunk.signature, !targetChunk.pure)
16846
- : getSignatureDistance(targetChunk.signature, signature, true);
16847
- if (distance < closestChunkDistance && isValidMerge(mergedChunk, targetChunk)) {
16848
- if (distance === 1) {
16912
+ function mergeChunks(chunkPartition, minChunkSize) {
16913
+ console.log('---- Initial chunks');
16914
+ printConsistencyCheck(chunkPartition);
16915
+ for (const allowArbitraryMerges of [false, true]) {
16916
+ for (const mergedChunk of chunkPartition.small) {
16917
+ let closestChunk = null;
16918
+ let closestChunkDistance = Infinity;
16919
+ const { modules, pure, size } = mergedChunk;
16920
+ for (const targetChunk of concatLazy([chunkPartition.small, chunkPartition.big])) {
16921
+ if (mergedChunk === targetChunk)
16922
+ continue;
16923
+ // If both chunks are small, we also allow for unrelated merges during
16924
+ // the first pass
16925
+ const onlySubsetMerge = !allowArbitraryMerges && targetChunk.size >= minChunkSize;
16926
+ const distance = getChunkEntryDistance(mergedChunk, targetChunk, onlySubsetMerge);
16927
+ if (distance < closestChunkDistance &&
16928
+ isValidMerge(mergedChunk, targetChunk, onlySubsetMerge)) {
16849
16929
  closestChunk = targetChunk;
16850
- break;
16930
+ closestChunkDistance = distance;
16851
16931
  }
16852
- closestChunk = targetChunk;
16853
- closestChunkDistance = distance;
16854
- }
16855
- }
16856
- if (closestChunk) {
16857
- chunksToBeMerged.delete(mergedChunk);
16858
- getChunksInPartition(closestChunk, minChunkSize, chunkPartition).delete(closestChunk);
16859
- closestChunk.modules.push(...modules);
16860
- closestChunk.size += size;
16861
- closestChunk.pure && (closestChunk.pure = pure);
16862
- closestChunk.signature = mergeSignatures(signature, closestChunk.signature);
16863
- const { dependencies, dependentChunks } = closestChunk;
16864
- for (const dependency of mergedChunk.dependencies) {
16865
- dependencies.add(dependency);
16866
16932
  }
16867
- for (const dependentChunk of mergedChunk.dependentChunks) {
16868
- dependentChunks.add(dependentChunk);
16869
- dependentChunk.dependencies.delete(mergedChunk);
16870
- dependentChunk.dependencies.add(closestChunk);
16933
+ if (closestChunk) {
16934
+ chunkPartition.small.delete(mergedChunk);
16935
+ getChunksInPartition(closestChunk, minChunkSize, chunkPartition).delete(closestChunk);
16936
+ closestChunk.modules.push(...modules);
16937
+ closestChunk.size += size;
16938
+ closestChunk.pure && (closestChunk.pure = pure);
16939
+ const { correlatedSideEffects, dependencies, dependentChunks, dependentEntries, sideEffects } = closestChunk;
16940
+ for (const sideEffect of correlatedSideEffects) {
16941
+ if (!mergedChunk.correlatedSideEffects.has(sideEffect)) {
16942
+ correlatedSideEffects.delete(sideEffect);
16943
+ }
16944
+ }
16945
+ for (const entry of mergedChunk.dependentEntries) {
16946
+ dependentEntries.add(entry);
16947
+ }
16948
+ for (const sideEffect of mergedChunk.sideEffects) {
16949
+ sideEffects.add(sideEffect);
16950
+ }
16951
+ for (const dependency of mergedChunk.dependencies) {
16952
+ dependencies.add(dependency);
16953
+ dependency.dependentChunks.delete(mergedChunk);
16954
+ dependency.dependentChunks.add(closestChunk);
16955
+ }
16956
+ for (const dependentChunk of mergedChunk.dependentChunks) {
16957
+ dependentChunks.add(dependentChunk);
16958
+ dependentChunk.dependencies.delete(mergedChunk);
16959
+ dependentChunk.dependencies.add(closestChunk);
16960
+ }
16961
+ dependencies.delete(closestChunk);
16962
+ getChunksInPartition(closestChunk, minChunkSize, chunkPartition).add(closestChunk);
16871
16963
  }
16872
- dependencies.delete(closestChunk);
16873
- getChunksInPartition(closestChunk, minChunkSize, chunkPartition).add(closestChunk);
16874
16964
  }
16965
+ console.log('---- After run with arbitrary merges:', allowArbitraryMerges);
16966
+ printConsistencyCheck(chunkPartition);
16875
16967
  }
16876
16968
  }
16877
16969
  // Merging will not produce cycles if none of the direct non-merged dependencies
16878
16970
  // of a chunk have the other chunk as a transitive dependency
16879
- function isValidMerge(mergedChunk, targetChunk) {
16880
- return !(hasTransitiveDependency(mergedChunk, targetChunk) ||
16881
- hasTransitiveDependency(targetChunk, mergedChunk));
16882
- }
16883
- function hasTransitiveDependency(dependentChunk, dependencyChunk) {
16971
+ function isValidMerge(mergedChunk, targetChunk, onlySubsetMerge) {
16972
+ return !(hasTransitiveDependencyOrNonCorrelatedSideEffect(mergedChunk, targetChunk, true) ||
16973
+ hasTransitiveDependencyOrNonCorrelatedSideEffect(targetChunk, mergedChunk, !onlySubsetMerge));
16974
+ }
16975
+ function hasTransitiveDependencyOrNonCorrelatedSideEffect(dependentChunk, dependencyChunk, checkSideEffects) {
16976
+ const { correlatedSideEffects } = dependencyChunk;
16977
+ if (checkSideEffects) {
16978
+ for (const sideEffect of dependentChunk.sideEffects) {
16979
+ if (!correlatedSideEffects.has(sideEffect)) {
16980
+ return true;
16981
+ }
16982
+ }
16983
+ }
16884
16984
  const chunksToCheck = new Set(dependentChunk.dependencies);
16885
- for (const { dependencies } of chunksToCheck) {
16985
+ for (const { dependencies, sideEffects } of chunksToCheck) {
16886
16986
  for (const dependency of dependencies) {
16887
16987
  if (dependency === dependencyChunk) {
16888
16988
  return true;
16889
16989
  }
16890
16990
  chunksToCheck.add(dependency);
16891
16991
  }
16992
+ if (checkSideEffects) {
16993
+ for (const sideEffect of sideEffects) {
16994
+ if (!correlatedSideEffects.has(sideEffect)) {
16995
+ return true;
16996
+ }
16997
+ }
16998
+ }
16892
16999
  }
16893
17000
  return false;
16894
17001
  }
16895
17002
  function getChunksInPartition(chunk, minChunkSize, chunkPartition) {
16896
- const subPartition = chunk.size < minChunkSize ? chunkPartition.small : chunkPartition.big;
16897
- return chunk.pure ? subPartition.pure : subPartition.sideEffect;
17003
+ return chunk.size < minChunkSize ? chunkPartition.small : chunkPartition.big;
16898
17004
  }
16899
- function getSignatureDistance(sourceSignature, targetSignature, enforceSubset) {
17005
+ function getChunkEntryDistance({ dependentEntries: sourceEntries }, { dependentEntries: targetEntries }, enforceSubest) {
16900
17006
  let distance = 0;
16901
- const { length } = sourceSignature;
16902
- for (let index = 0; index < length; index++) {
16903
- const sourceValue = sourceSignature.charCodeAt(index);
16904
- if (sourceValue !== targetSignature.charCodeAt(index)) {
16905
- if (enforceSubset && sourceValue === CHAR_CODE_DEPENDENT) {
17007
+ for (const entryIndex of targetEntries) {
17008
+ if (!sourceEntries.has(entryIndex)) {
17009
+ distance++;
17010
+ }
17011
+ }
17012
+ for (const entryIndex of sourceEntries) {
17013
+ if (!targetEntries.has(entryIndex)) {
17014
+ if (enforceSubest) {
16906
17015
  return Infinity;
16907
17016
  }
16908
17017
  distance++;
@@ -16910,17 +17019,57 @@ function getSignatureDistance(sourceSignature, targetSignature, enforceSubset) {
16910
17019
  }
16911
17020
  return distance;
16912
17021
  }
16913
- function mergeSignatures(sourceSignature, targetSignature) {
16914
- let signature = '';
16915
- const { length } = sourceSignature;
16916
- for (let index = 0; index < length; index++) {
16917
- signature +=
16918
- sourceSignature.charCodeAt(index) === CHAR_CODE_DEPENDENT ||
16919
- targetSignature.charCodeAt(index) === CHAR_CODE_DEPENDENT
16920
- ? CHAR_DEPENDENT
16921
- : CHAR_INDEPENDENT;
16922
- }
16923
- return signature;
17022
+ function printConsistencyCheck(partition) {
17023
+ console.log(`Chunks\n small: ${partition.small.size},\n large: ${partition.big.size}`);
17024
+ const chunks = new Set([...partition.big, ...partition.small]);
17025
+ console.log('Number of cycles:', getNumberOfCycles(chunks));
17026
+ let missingDependencies = 0;
17027
+ let missingDependentChunks = 0;
17028
+ const seenModules = new Set();
17029
+ for (const { modules, dependencies, dependentChunks } of chunks) {
17030
+ for (const module of modules) {
17031
+ if (seenModules.has(module)) {
17032
+ console.log(`Module ${module.id} is duplicated between chunks.`);
17033
+ }
17034
+ seenModules.add(module);
17035
+ }
17036
+ for (const dependency of dependencies) {
17037
+ if (!chunks.has(dependency)) {
17038
+ missingDependencies++;
17039
+ }
17040
+ }
17041
+ for (const dependency of dependentChunks) {
17042
+ if (!chunks.has(dependency)) {
17043
+ missingDependentChunks++;
17044
+ }
17045
+ }
17046
+ }
17047
+ console.log(`Missing\n dependencies: ${missingDependencies},\n dependent chunks: ${missingDependentChunks}\n`);
17048
+ }
17049
+ function getNumberOfCycles(chunks) {
17050
+ const parents = new Set();
17051
+ const analysedChunks = new Set();
17052
+ let cycles = 0;
17053
+ const analyseChunk = (chunk) => {
17054
+ for (const dependency of chunk.dependencies) {
17055
+ if (parents.has(dependency)) {
17056
+ if (!analysedChunks.has(dependency)) {
17057
+ cycles++;
17058
+ }
17059
+ continue;
17060
+ }
17061
+ parents.add(dependency);
17062
+ analyseChunk(dependency);
17063
+ }
17064
+ analysedChunks.add(chunk);
17065
+ };
17066
+ for (const chunk of chunks) {
17067
+ if (!parents.has(chunk)) {
17068
+ parents.add(chunk);
17069
+ analyseChunk(chunk);
17070
+ }
17071
+ }
17072
+ return cycles;
16924
17073
  }
16925
17074
 
16926
17075
  // ported from https://github.com/substack/node-commondir
@@ -1,7 +1,7 @@
1
1
  /*
2
2
  @license
3
- Rollup.js v3.15.0
4
- Fri, 10 Feb 2023 05:20:05 GMT - commit 5d81532f688383a8aeaf6a099da2b0205e8b8609
3
+ Rollup.js v3.15.1-1
4
+ Tue, 14 Feb 2023 09:30:15 GMT - commit 5e91b2d7a9f833cf9e11d98c96c4cba3fac195a4
5
5
 
6
6
  https://github.com/rollup/rollup
7
7
 
@@ -1,7 +1,7 @@
1
1
  /*
2
2
  @license
3
- Rollup.js v3.15.0
4
- Fri, 10 Feb 2023 05:20:05 GMT - commit 5d81532f688383a8aeaf6a099da2b0205e8b8609
3
+ Rollup.js v3.15.1-1
4
+ Tue, 14 Feb 2023 09:30:15 GMT - commit 5e91b2d7a9f833cf9e11d98c96c4cba3fac195a4
5
5
 
6
6
  https://github.com/rollup/rollup
7
7
 
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "rollup",
3
- "version": "3.15.0",
3
+ "version": "3.15.1-1",
4
4
  "description": "Next-generation ES module bundler",
5
5
  "main": "dist/rollup.js",
6
6
  "module": "dist/es/rollup.js",
@@ -116,7 +116,7 @@
116
116
  "is-reference": "^3.0.1",
117
117
  "lint-staged": "^13.1.0",
118
118
  "locate-character": "^2.0.5",
119
- "magic-string": "^0.27.0",
119
+ "magic-string": "^0.29.0",
120
120
  "mocha": "^10.2.0",
121
121
  "nyc": "^15.1.0",
122
122
  "pinia": "^2.0.29",