@atlaspack/core 2.35.0 → 2.38.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. package/CHANGELOG.md +107 -0
  2. package/dist/Atlaspack.js +41 -6
  3. package/dist/atlaspack-v3/AtlaspackV3.js +7 -4
  4. package/dist/atlaspack-v3/fs.js +1 -0
  5. package/dist/atlaspack-v3/worker/worker.js +11 -2
  6. package/dist/requests/AssetGraphRequestRust.js +5 -1
  7. package/dist/requests/AtlaspackBuildRequest.js +3 -0
  8. package/dist/requests/BundleGraphRequest.js +9 -6
  9. package/dist/requests/BundleGraphRequestRust.js +6 -1
  10. package/dist/requests/BundleGraphRequestUtils.js +133 -2
  11. package/dist/requests/WriteBundleRequest.js +168 -17
  12. package/lib/Atlaspack.js +47 -15
  13. package/lib/atlaspack-v3/AtlaspackV3.js +7 -4
  14. package/lib/atlaspack-v3/fs.js +1 -0
  15. package/lib/atlaspack-v3/worker/worker.js +13 -2
  16. package/lib/requests/AssetGraphRequestRust.js +5 -1
  17. package/lib/requests/AtlaspackBuildRequest.js +9 -0
  18. package/lib/requests/BundleGraphRequest.js +10 -7
  19. package/lib/requests/BundleGraphRequestRust.js +6 -1
  20. package/lib/requests/BundleGraphRequestUtils.js +132 -2
  21. package/lib/requests/WriteBundleRequest.js +181 -13
  22. package/lib/types/atlaspack-v3/AtlaspackV3.d.ts +3 -2
  23. package/lib/types/atlaspack-v3/fs.d.ts +1 -0
  24. package/lib/types/requests/BundleGraphRequestUtils.d.ts +7 -0
  25. package/lib/types/requests/WriteBundleRequest.d.ts +33 -0
  26. package/package.json +15 -15
  27. package/src/Atlaspack.ts +54 -12
  28. package/src/atlaspack-v3/AtlaspackV3.ts +22 -4
  29. package/src/atlaspack-v3/fs.ts +5 -0
  30. package/src/atlaspack-v3/worker/worker.ts +11 -2
  31. package/src/requests/AssetGraphRequestRust.ts +5 -1
  32. package/src/requests/AtlaspackBuildRequest.ts +4 -0
  33. package/src/requests/BundleGraphRequest.ts +11 -6
  34. package/src/requests/BundleGraphRequestRust.ts +8 -1
  35. package/src/requests/BundleGraphRequestUtils.ts +157 -1
  36. package/src/requests/WriteBundleRequest.ts +202 -22
  37. package/test/requests/WriteBundleRequest.test.ts +363 -0
  38. package/tsconfig.tsbuildinfo +1 -1
@@ -17,11 +17,13 @@ import type {
17
17
  } from '../types';
18
18
 
19
19
  import assert from 'assert';
20
+ import fs from 'fs';
20
21
  import nullthrows from 'nullthrows';
22
+ import path from 'path';
21
23
  import {PluginLogger} from '@atlaspack/logger';
22
24
  import ThrowableDiagnostic, {errorToDiagnostic} from '@atlaspack/diagnostic';
23
25
  import {unique, setSymmetricDifference} from '@atlaspack/utils';
24
- import InternalBundleGraph from '../BundleGraph';
26
+ import InternalBundleGraph, {bundleGraphEdgeTypes} from '../BundleGraph';
25
27
  import BundleGraph from '../public/BundleGraph';
26
28
  import {Bundle, NamedBundle} from '../public/Bundle';
27
29
  import PluginOptions from '../public/PluginOptions';
@@ -66,6 +68,160 @@ export function validateBundles(bundleGraph: InternalBundleGraph): void {
66
68
  );
67
69
  }
68
70
 
71
+ /**
72
+ * Dump a canonical JSON snapshot of the bundle graph for parity comparison.
73
+ * Gated by ATLASPACK_DUMP_BUNDLE_GRAPH environment variable which specifies the output directory.
74
+ * The snapshot captures bundle identity, type, contained assets, and bundle group structure
75
+ * in a deterministic, sorted format suitable for diffing.
76
+ */
77
+ export function dumpBundleGraphSnapshot(
78
+ bundleGraph: InternalBundleGraph,
79
+ variant: 'js' | 'rust',
80
+ ): void {
81
+ let outDir = process.env.ATLASPACK_DUMP_BUNDLE_GRAPH;
82
+ if (!outDir) return;
83
+
84
+ let filename =
85
+ variant === 'js' ? 'bundle-graph-js.json' : 'bundle-graph-rust.json';
86
+ let outPath = path.join(outDir, filename);
87
+
88
+ fs.mkdirSync(outDir, {recursive: true});
89
+
90
+ let bundles = bundleGraph.getBundles();
91
+ let bundlesSnapshot = bundles
92
+ .map((bundle) => {
93
+ let bundleNodeId = bundleGraph._graph.getNodeIdByContentKey(bundle.id);
94
+ let containedAssetNodeIds = bundleGraph._graph.getNodeIdsConnectedFrom(
95
+ bundleNodeId,
96
+ bundleGraphEdgeTypes.contains,
97
+ );
98
+ let containedAssets = containedAssetNodeIds
99
+ .map((nodeId) => bundleGraph._graph.getNode(nodeId))
100
+ .flatMap((node) => {
101
+ if (node?.type !== 'asset') return [];
102
+ return [
103
+ {
104
+ id: node.value.id,
105
+ filePath: fromProjectPathRelative(node.value.filePath),
106
+ },
107
+ ];
108
+ })
109
+ .sort((a, b) => a.filePath.localeCompare(b.filePath));
110
+
111
+ // Resolve mainEntry and entry asset file paths
112
+ let mainEntryPath: string | null = null;
113
+ let entryAssetPaths: string[] = [];
114
+ if (bundle.mainEntryId) {
115
+ let mainEntryNodeId = bundleGraph._graph.getNodeIdByContentKey(
116
+ bundle.mainEntryId,
117
+ );
118
+ let mainEntryNode = bundleGraph._graph.getNode(mainEntryNodeId);
119
+ if (mainEntryNode?.type === 'asset') {
120
+ mainEntryPath = fromProjectPathRelative(mainEntryNode.value.filePath);
121
+ }
122
+ }
123
+ for (let entryId of bundle.entryAssetIds) {
124
+ let entryNodeId = bundleGraph._graph.getNodeIdByContentKey(entryId);
125
+ let entryNode = bundleGraph._graph.getNode(entryNodeId);
126
+ if (entryNode?.type === 'asset') {
127
+ entryAssetPaths.push(
128
+ fromProjectPathRelative(entryNode.value.filePath),
129
+ );
130
+ }
131
+ }
132
+ entryAssetPaths.sort();
133
+
134
+ return {
135
+ id: bundle.id,
136
+ type: bundle.type,
137
+ bundleBehavior: bundle.bundleBehavior ?? null,
138
+ needsStableName: bundle.needsStableName,
139
+ isSplittable: bundle.isSplittable,
140
+ isPlaceholder: bundle.isPlaceholder,
141
+ mainEntryPath,
142
+ entryAssetPaths,
143
+ assets: containedAssets.map((a) => a.filePath),
144
+ };
145
+ })
146
+ .sort((a, b) => {
147
+ // Sort by mainEntryPath first, then by sorted assets as tiebreaker
148
+ let aKey = a.mainEntryPath || a.assets.join(',');
149
+ let bKey = b.mainEntryPath || b.assets.join(',');
150
+ return aKey.localeCompare(bKey);
151
+ });
152
+
153
+ let bundleGroupsSnapshot = bundleGraph._graph.nodes
154
+ .flatMap((node) => {
155
+ if (node?.type !== 'bundle_group') return [];
156
+
157
+ let bundleGroup = node.value;
158
+
159
+ // Resolve entry asset file path
160
+ let entryAssetPath: string | null = null;
161
+ try {
162
+ let entryNodeId = bundleGraph._graph.getNodeIdByContentKey(
163
+ bundleGroup.entryAssetId,
164
+ );
165
+ let entryNode = bundleGraph._graph.getNode(entryNodeId);
166
+ if (entryNode?.type === 'asset') {
167
+ entryAssetPath = fromProjectPathRelative(entryNode.value.filePath);
168
+ }
169
+ } catch {
170
+ // Content key not found
171
+ }
172
+
173
+ let bundlesInGroup = bundleGraph.getBundlesInBundleGroup(bundleGroup);
174
+ let bundlePaths = bundlesInGroup
175
+ .map((b) => {
176
+ // Use mainEntry file path if available, otherwise bundle id as fallback
177
+ if (b.mainEntryId) {
178
+ try {
179
+ let nodeId = bundleGraph._graph.getNodeIdByContentKey(
180
+ b.mainEntryId,
181
+ );
182
+ let node = bundleGraph._graph.getNode(nodeId);
183
+ if (node?.type === 'asset') {
184
+ return fromProjectPathRelative(node.value.filePath);
185
+ }
186
+ } catch {
187
+ // fallback
188
+ }
189
+ }
190
+ return `[bundle:${b.id}]`;
191
+ })
192
+ .sort();
193
+
194
+ return [
195
+ {
196
+ entryAssetPath:
197
+ entryAssetPath ?? `[unknown:${bundleGroup.entryAssetId}]`,
198
+ bundlePaths,
199
+ },
200
+ ];
201
+ })
202
+ .sort((a, b) => a.entryAssetPath.localeCompare(b.entryAssetPath));
203
+
204
+ let totalAssets = bundleGraph._graph.nodes.filter(
205
+ (node) => node?.type === 'asset',
206
+ ).length;
207
+
208
+ let snapshot = {
209
+ version: 1,
210
+ variant,
211
+ stats: {
212
+ totalBundles: bundlesSnapshot.length,
213
+ totalBundleGroups: bundleGroupsSnapshot.length,
214
+ totalAssets,
215
+ },
216
+ bundles: bundlesSnapshot,
217
+ bundleGroups: bundleGroupsSnapshot,
218
+ };
219
+
220
+ fs.writeFileSync(outPath, JSON.stringify(snapshot, null, 2), 'utf8');
221
+ // eslint-disable-next-line no-console
222
+ console.log(`[BundleGraphSnapshot] Wrote ${variant} snapshot to ${outPath}`);
223
+ }
224
+
69
225
  /**
70
226
  * Names a bundle by running through the configured namers until one returns a name.
71
227
  */
@@ -41,12 +41,15 @@ import {PluginTracer, tracer} from '@atlaspack/profiler';
41
41
  import {requestTypes} from '../RequestTracker';
42
42
  import {getFeatureFlag} from '@atlaspack/feature-flags';
43
43
  import {fromEnvironmentId} from '../EnvironmentManager';
44
- import SourceMap from '@atlaspack/source-map';
44
+ import SourceMap, {decodeVLQ, encodeVLQ} from '@atlaspack/source-map';
45
45
 
46
46
  const HASH_REF_PREFIX_LEN = HASH_REF_PREFIX.length;
47
47
  const BOUNDARY_LENGTH = HASH_REF_PREFIX.length + 32 - 1;
48
48
  const HASH_REF_PLACEHOLDER_LEN = HASH_REF_PREFIX_LEN + HASH_REF_HASH_LEN;
49
49
 
50
+ // The JSON key prefix we scan for in the source map stream.
51
+ const MAPPINGS_KEY_BUF = Buffer.from('"mappings":"');
52
+
50
53
  export type HashRefReplacement = {
51
54
  line: number;
52
55
  column: number;
@@ -187,35 +190,17 @@ async function run({input, options, api}) {
187
190
 
188
191
  const hasSourceMap = await options.cache.has(mapKey);
189
192
  if (mapKey && env.sourceMap && !env.sourceMap.inline && hasSourceMap) {
193
+ const mapEntry = await options.cache.getBlob(mapKey);
190
194
  let mapStream: Readable;
191
195
  if (
192
196
  getFeatureFlag('fixSourceMapHashRefs') &&
193
197
  bundleReplacements &&
194
198
  bundleReplacements.length > 0
195
199
  ) {
196
- const mapEntry = await options.cache.getBlob(mapKey);
197
- const mapBuffer = Buffer.isBuffer(mapEntry)
198
- ? mapEntry
199
- : Buffer.from(mapEntry);
200
- const projectRoot =
201
- typeof options.projectRoot === 'string'
202
- ? options.projectRoot
203
- : String(options.projectRoot);
204
- const sourceMap = new SourceMap(projectRoot, mapBuffer);
205
- applyReplacementsToSourceMap(sourceMap, bundleReplacements);
206
- const mapJson = await sourceMap.stringify({
207
- format: 'string',
208
- file: name,
209
- sourceRoot: computeSourceMapRoot(bundle, options),
210
- });
211
- mapStream = blobToStream(
212
- Buffer.from(
213
- typeof mapJson === 'string' ? mapJson : JSON.stringify(mapJson),
214
- 'utf8',
215
- ),
200
+ mapStream = blobToStream(mapEntry).pipe(
201
+ new SourceMapHashRefRewriteStream(bundleReplacements),
216
202
  );
217
203
  } else {
218
- const mapEntry = await options.cache.getBlob(mapKey);
219
204
  mapStream = blobToStream(mapEntry);
220
205
  }
221
206
  await writeFiles(
@@ -269,6 +254,201 @@ export function applyReplacementsToSourceMap(
269
254
  }
270
255
  }
271
256
 
257
+ /**
258
+ * Applies hash-ref replacement column offsets directly to a VLQ mappings
259
+ * string without deserializing the full source map into a native struct.
260
+ *
261
+ * Each replacement r describes a hash-ref that was substituted in the output
262
+ * file. r.column is in the progressively-shifted post-replacement coordinate
263
+ * space (matching the already-shifted source map state after all previous
264
+ * offsetColumns calls), so thresholds are applied sequentially against the
265
+ * running absCol values exactly as the native offsetColumns implementation does.
266
+ */
267
+ export function applyReplacementsToVLQMappings(
268
+ mappings: string,
269
+ replacements: HashRefReplacement[],
270
+ ): string {
271
+ if (replacements.length === 0) return mappings;
272
+
273
+ // Group replacements by line (0-indexed), sorted by column ascending.
274
+ const byLine = new Map<number, HashRefReplacement[]>();
275
+ for (const r of replacements) {
276
+ let arr = byLine.get(r.line);
277
+ if (!arr) {
278
+ arr = [];
279
+ byLine.set(r.line, arr);
280
+ }
281
+ arr.push(r);
282
+ }
283
+ for (const arr of byLine.values()) {
284
+ arr.sort((a, b) => a.column - b.column);
285
+ }
286
+
287
+ const lines = mappings.split(';');
288
+ const resultLines: string[] = [];
289
+
290
+ for (let lineIdx = 0; lineIdx < lines.length; lineIdx++) {
291
+ const lineReps = byLine.get(lineIdx);
292
+ if (!lineReps || lineReps.length === 0) {
293
+ resultLines.push(lines[lineIdx]);
294
+ continue;
295
+ }
296
+
297
+ const line = lines[lineIdx];
298
+ if (!line) {
299
+ resultLines.push('');
300
+ continue;
301
+ }
302
+
303
+ // Decode segment column deltas to absolute columns.
304
+ const segments = line.split(',');
305
+ const colVlqEnds: number[] = [];
306
+ const absCols: number[] = [];
307
+ let absCol = 0;
308
+ for (const seg of segments) {
309
+ const {value: colDelta, nextPos} = decodeVLQ(seg, 0);
310
+ absCol += colDelta;
311
+ colVlqEnds.push(nextPos);
312
+ absCols.push(absCol);
313
+ }
314
+
315
+ // Apply each replacement's column shift sequentially against the
316
+ // current absCol values (which have already been adjusted by previous
317
+ // replacements on this line), mirroring the sequential offsetColumns calls.
318
+ for (const r of lineReps) {
319
+ const delta = r.newLength - r.originalLength;
320
+ if (delta === 0) continue;
321
+ const threshold = r.column + r.originalLength;
322
+ for (let i = 0; i < absCols.length; i++) {
323
+ if (absCols[i] >= threshold) {
324
+ absCols[i] += delta;
325
+ }
326
+ }
327
+ }
328
+
329
+ // Re-encode with updated absolute columns; only the leading column VLQ
330
+ // field of each segment changes – the tail bytes are sliced unchanged.
331
+ const resultSegments: string[] = [];
332
+ let prevAbsCol = 0;
333
+ for (let i = 0; i < segments.length; i++) {
334
+ const newDelta = absCols[i] - prevAbsCol;
335
+ prevAbsCol = absCols[i];
336
+ resultSegments.push(
337
+ encodeVLQ(newDelta) + segments[i].slice(colVlqEnds[i]),
338
+ );
339
+ }
340
+
341
+ resultLines.push(resultSegments.join(','));
342
+ }
343
+
344
+ return resultLines.join(';');
345
+ }
346
+
347
+ type StreamState = 'scanning' | 'buffering' | 'passthrough';
348
+
349
+ /**
350
+ * A Transform stream that rewrites the "mappings" VLQ field of a source map
351
+ * JSON to account for hash-ref replacements, without ever loading the full
352
+ * JSON object or the native Rust SourceMapInner into memory.
353
+ *
354
+ * Field order in cached source maps (from partialVlqMapToSourceMap / toVLQ):
355
+ * mappings → sources → sourcesContent → names → version → file → sourceRoot
356
+ *
357
+ * "mappings" is the very first field, so we scan only a tiny header before
358
+ * switching to zero-copy passthrough for the bulk sourcesContent bytes.
359
+ */
360
+ export class SourceMapHashRefRewriteStream extends Transform {
361
+ private replacements: HashRefReplacement[];
362
+ private state: StreamState;
363
+ private scanBuf: Buffer;
364
+ private mappingsBufs: Buffer[];
365
+
366
+ constructor(replacements: HashRefReplacement[]) {
367
+ super();
368
+ this.replacements = replacements;
369
+ this.state = 'scanning';
370
+ this.scanBuf = Buffer.alloc(0);
371
+ this.mappingsBufs = [];
372
+ }
373
+
374
+ // @ts-expect-error TS7006
375
+ _transform(chunk: Buffer, _encoding: string, cb): void {
376
+ if (this.state === 'passthrough') {
377
+ this.push(chunk);
378
+ cb();
379
+ return;
380
+ }
381
+
382
+ if (this.state === 'scanning') {
383
+ const combined = Buffer.concat([this.scanBuf, chunk]);
384
+ const idx = combined.indexOf(MAPPINGS_KEY_BUF);
385
+
386
+ if (idx === -1) {
387
+ // Key not yet found – hold back enough bytes to handle a split key.
388
+ const keepLen = Math.min(combined.length, MAPPINGS_KEY_BUF.length - 1);
389
+ if (combined.length > keepLen) {
390
+ this.push(combined.slice(0, combined.length - keepLen));
391
+ }
392
+ this.scanBuf = combined.slice(combined.length - keepLen);
393
+ cb();
394
+ return;
395
+ }
396
+
397
+ // Emit everything up to and including the key.
398
+ const keyEnd = idx + MAPPINGS_KEY_BUF.length;
399
+ this.push(combined.slice(0, keyEnd));
400
+ this.scanBuf = Buffer.alloc(0);
401
+ this.state = 'buffering';
402
+ this._bufferingTransform(combined.slice(keyEnd), cb);
403
+ return;
404
+ }
405
+
406
+ // state === 'buffering'
407
+ this._bufferingTransform(chunk, cb);
408
+ }
409
+
410
+ // @ts-expect-error TS7006
411
+ private _bufferingTransform(chunk: Buffer, cb): void {
412
+ // Mappings values contain only base64 chars, ';', and ',' – no escaping –
413
+ // so scanning for the closing '"' (0x22) is safe.
414
+ const closeIdx = chunk.indexOf(0x22);
415
+
416
+ if (closeIdx === -1) {
417
+ this.mappingsBufs.push(chunk);
418
+ cb();
419
+ return;
420
+ }
421
+
422
+ this.mappingsBufs.push(chunk.slice(0, closeIdx));
423
+
424
+ // VLQ chars are all ASCII (<128), so latin1 round-trips without loss.
425
+ const mappingsStr = Buffer.concat(this.mappingsBufs).toString('latin1');
426
+ const rewritten = applyReplacementsToVLQMappings(
427
+ mappingsStr,
428
+ this.replacements,
429
+ );
430
+ this.push(Buffer.from(rewritten, 'latin1'));
431
+
432
+ // Emit the closing '"' and everything remaining in one push.
433
+ this.push(chunk.slice(closeIdx));
434
+
435
+ this.state = 'passthrough';
436
+ this.mappingsBufs = [];
437
+ cb();
438
+ }
439
+
440
+ // @ts-expect-error TS7006
441
+ _flush(cb): void {
442
+ if (this.state === 'scanning' && this.scanBuf.length > 0) {
443
+ this.push(this.scanBuf);
444
+ } else if (this.state === 'buffering') {
445
+ // Malformed JSON – flush whatever we buffered as-is.
446
+ this.push(Buffer.concat(this.mappingsBufs));
447
+ }
448
+ cb();
449
+ }
450
+ }
451
+
272
452
  /**
273
453
  * Computes the sourceRoot for a source map file. This is the relative path from
274
454
  * the output directory back to the project root, so that source paths (stored