@atlaspack/core 2.35.0 → 2.36.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -41,12 +41,15 @@ import {PluginTracer, tracer} from '@atlaspack/profiler';
41
41
  import {requestTypes} from '../RequestTracker';
42
42
  import {getFeatureFlag} from '@atlaspack/feature-flags';
43
43
  import {fromEnvironmentId} from '../EnvironmentManager';
44
- import SourceMap from '@atlaspack/source-map';
44
+ import SourceMap, {decodeVLQ, encodeVLQ} from '@atlaspack/source-map';
45
45
 
46
46
  const HASH_REF_PREFIX_LEN = HASH_REF_PREFIX.length;
47
47
  const BOUNDARY_LENGTH = HASH_REF_PREFIX.length + 32 - 1;
48
48
  const HASH_REF_PLACEHOLDER_LEN = HASH_REF_PREFIX_LEN + HASH_REF_HASH_LEN;
49
49
 
50
+ // The JSON key prefix we scan for in the source map stream.
51
+ const MAPPINGS_KEY_BUF = Buffer.from('"mappings":"');
52
+
50
53
  export type HashRefReplacement = {
51
54
  line: number;
52
55
  column: number;
@@ -187,35 +190,17 @@ async function run({input, options, api}) {
187
190
 
188
191
  const hasSourceMap = await options.cache.has(mapKey);
189
192
  if (mapKey && env.sourceMap && !env.sourceMap.inline && hasSourceMap) {
193
+ const mapEntry = await options.cache.getBlob(mapKey);
190
194
  let mapStream: Readable;
191
195
  if (
192
196
  getFeatureFlag('fixSourceMapHashRefs') &&
193
197
  bundleReplacements &&
194
198
  bundleReplacements.length > 0
195
199
  ) {
196
- const mapEntry = await options.cache.getBlob(mapKey);
197
- const mapBuffer = Buffer.isBuffer(mapEntry)
198
- ? mapEntry
199
- : Buffer.from(mapEntry);
200
- const projectRoot =
201
- typeof options.projectRoot === 'string'
202
- ? options.projectRoot
203
- : String(options.projectRoot);
204
- const sourceMap = new SourceMap(projectRoot, mapBuffer);
205
- applyReplacementsToSourceMap(sourceMap, bundleReplacements);
206
- const mapJson = await sourceMap.stringify({
207
- format: 'string',
208
- file: name,
209
- sourceRoot: computeSourceMapRoot(bundle, options),
210
- });
211
- mapStream = blobToStream(
212
- Buffer.from(
213
- typeof mapJson === 'string' ? mapJson : JSON.stringify(mapJson),
214
- 'utf8',
215
- ),
200
+ mapStream = blobToStream(mapEntry).pipe(
201
+ new SourceMapHashRefRewriteStream(bundleReplacements),
216
202
  );
217
203
  } else {
218
- const mapEntry = await options.cache.getBlob(mapKey);
219
204
  mapStream = blobToStream(mapEntry);
220
205
  }
221
206
  await writeFiles(
@@ -269,6 +254,201 @@ export function applyReplacementsToSourceMap(
269
254
  }
270
255
  }
271
256
 
257
+ /**
258
+ * Applies hash-ref replacement column offsets directly to a VLQ mappings
259
+ * string without deserializing the full source map into a native struct.
260
+ *
261
+ * Each replacement r describes a hash-ref that was substituted in the output
262
+ * file. r.column is in the progressively-shifted post-replacement coordinate
263
+ * space (matching the already-shifted source map state after all previous
264
+ * offsetColumns calls), so thresholds are applied sequentially against the
265
+ * running absCol values exactly as the native offsetColumns implementation does.
266
+ */
267
+ export function applyReplacementsToVLQMappings(
268
+ mappings: string,
269
+ replacements: HashRefReplacement[],
270
+ ): string {
271
+ if (replacements.length === 0) return mappings;
272
+
273
+ // Group replacements by line (0-indexed), sorted by column ascending.
274
+ const byLine = new Map<number, HashRefReplacement[]>();
275
+ for (const r of replacements) {
276
+ let arr = byLine.get(r.line);
277
+ if (!arr) {
278
+ arr = [];
279
+ byLine.set(r.line, arr);
280
+ }
281
+ arr.push(r);
282
+ }
283
+ for (const arr of byLine.values()) {
284
+ arr.sort((a, b) => a.column - b.column);
285
+ }
286
+
287
+ const lines = mappings.split(';');
288
+ const resultLines: string[] = [];
289
+
290
+ for (let lineIdx = 0; lineIdx < lines.length; lineIdx++) {
291
+ const lineReps = byLine.get(lineIdx);
292
+ if (!lineReps || lineReps.length === 0) {
293
+ resultLines.push(lines[lineIdx]);
294
+ continue;
295
+ }
296
+
297
+ const line = lines[lineIdx];
298
+ if (!line) {
299
+ resultLines.push('');
300
+ continue;
301
+ }
302
+
303
+ // Decode segment column deltas to absolute columns.
304
+ const segments = line.split(',');
305
+ const colVlqEnds: number[] = [];
306
+ const absCols: number[] = [];
307
+ let absCol = 0;
308
+ for (const seg of segments) {
309
+ const {value: colDelta, nextPos} = decodeVLQ(seg, 0);
310
+ absCol += colDelta;
311
+ colVlqEnds.push(nextPos);
312
+ absCols.push(absCol);
313
+ }
314
+
315
+ // Apply each replacement's column shift sequentially against the
316
+ // current absCol values (which have already been adjusted by previous
317
+ // replacements on this line), mirroring the sequential offsetColumns calls.
318
+ for (const r of lineReps) {
319
+ const delta = r.newLength - r.originalLength;
320
+ if (delta === 0) continue;
321
+ const threshold = r.column + r.originalLength;
322
+ for (let i = 0; i < absCols.length; i++) {
323
+ if (absCols[i] >= threshold) {
324
+ absCols[i] += delta;
325
+ }
326
+ }
327
+ }
328
+
329
+ // Re-encode with updated absolute columns; only the leading column VLQ
330
+ // field of each segment changes – the tail bytes are sliced unchanged.
331
+ const resultSegments: string[] = [];
332
+ let prevAbsCol = 0;
333
+ for (let i = 0; i < segments.length; i++) {
334
+ const newDelta = absCols[i] - prevAbsCol;
335
+ prevAbsCol = absCols[i];
336
+ resultSegments.push(
337
+ encodeVLQ(newDelta) + segments[i].slice(colVlqEnds[i]),
338
+ );
339
+ }
340
+
341
+ resultLines.push(resultSegments.join(','));
342
+ }
343
+
344
+ return resultLines.join(';');
345
+ }
346
+
347
+ type StreamState = 'scanning' | 'buffering' | 'passthrough';
348
+
349
+ /**
350
+ * A Transform stream that rewrites the "mappings" VLQ field of a source map
351
+ * JSON to account for hash-ref replacements, without ever loading the full
352
+ * JSON object or the native Rust SourceMapInner into memory.
353
+ *
354
+ * Field order in cached source maps (from partialVlqMapToSourceMap / toVLQ):
355
+ * mappings → sources → sourcesContent → names → version → file → sourceRoot
356
+ *
357
+ * "mappings" is the very first field, so we scan only a tiny header before
358
+ * switching to zero-copy passthrough for the bulk sourcesContent bytes.
359
+ */
360
+ export class SourceMapHashRefRewriteStream extends Transform {
361
+ private replacements: HashRefReplacement[];
362
+ private state: StreamState;
363
+ private scanBuf: Buffer;
364
+ private mappingsBufs: Buffer[];
365
+
366
+ constructor(replacements: HashRefReplacement[]) {
367
+ super();
368
+ this.replacements = replacements;
369
+ this.state = 'scanning';
370
+ this.scanBuf = Buffer.alloc(0);
371
+ this.mappingsBufs = [];
372
+ }
373
+
374
+ // @ts-expect-error TS7006
375
+ _transform(chunk: Buffer, _encoding: string, cb): void {
376
+ if (this.state === 'passthrough') {
377
+ this.push(chunk);
378
+ cb();
379
+ return;
380
+ }
381
+
382
+ if (this.state === 'scanning') {
383
+ const combined = Buffer.concat([this.scanBuf, chunk]);
384
+ const idx = combined.indexOf(MAPPINGS_KEY_BUF);
385
+
386
+ if (idx === -1) {
387
+ // Key not yet found – hold back enough bytes to handle a split key.
388
+ const keepLen = Math.min(combined.length, MAPPINGS_KEY_BUF.length - 1);
389
+ if (combined.length > keepLen) {
390
+ this.push(combined.slice(0, combined.length - keepLen));
391
+ }
392
+ this.scanBuf = combined.slice(combined.length - keepLen);
393
+ cb();
394
+ return;
395
+ }
396
+
397
+ // Emit everything up to and including the key.
398
+ const keyEnd = idx + MAPPINGS_KEY_BUF.length;
399
+ this.push(combined.slice(0, keyEnd));
400
+ this.scanBuf = Buffer.alloc(0);
401
+ this.state = 'buffering';
402
+ this._bufferingTransform(combined.slice(keyEnd), cb);
403
+ return;
404
+ }
405
+
406
+ // state === 'buffering'
407
+ this._bufferingTransform(chunk, cb);
408
+ }
409
+
410
+ // @ts-expect-error TS7006
411
+ private _bufferingTransform(chunk: Buffer, cb): void {
412
+ // Mappings values contain only base64 chars, ';', and ',' – no escaping –
413
+ // so scanning for the closing '"' (0x22) is safe.
414
+ const closeIdx = chunk.indexOf(0x22);
415
+
416
+ if (closeIdx === -1) {
417
+ this.mappingsBufs.push(chunk);
418
+ cb();
419
+ return;
420
+ }
421
+
422
+ this.mappingsBufs.push(chunk.slice(0, closeIdx));
423
+
424
+ // VLQ chars are all ASCII (<128), so latin1 round-trips without loss.
425
+ const mappingsStr = Buffer.concat(this.mappingsBufs).toString('latin1');
426
+ const rewritten = applyReplacementsToVLQMappings(
427
+ mappingsStr,
428
+ this.replacements,
429
+ );
430
+ this.push(Buffer.from(rewritten, 'latin1'));
431
+
432
+ // Emit the closing '"' and everything remaining in one push.
433
+ this.push(chunk.slice(closeIdx));
434
+
435
+ this.state = 'passthrough';
436
+ this.mappingsBufs = [];
437
+ cb();
438
+ }
439
+
440
+ // @ts-expect-error TS7006
441
+ _flush(cb): void {
442
+ if (this.state === 'scanning' && this.scanBuf.length > 0) {
443
+ this.push(this.scanBuf);
444
+ } else if (this.state === 'buffering') {
445
+ // Malformed JSON – flush whatever we buffered as-is.
446
+ this.push(Buffer.concat(this.mappingsBufs));
447
+ }
448
+ cb();
449
+ }
450
+ }
451
+
272
452
  /**
273
453
  * Computes the sourceRoot for a source map file. This is the relative path from
274
454
  * the output directory back to the project root, so that source paths (stored
@@ -1,7 +1,10 @@
1
1
  import assert from 'assert';
2
+ import {Readable} from 'stream';
2
3
  import SourceMap from '@atlaspack/source-map';
3
4
  import {
4
5
  applyReplacementsToSourceMap,
6
+ applyReplacementsToVLQMappings,
7
+ SourceMapHashRefRewriteStream,
5
8
  type HashRefReplacement,
6
9
  } from '../../src/requests/WriteBundleRequest';
7
10
 
@@ -69,6 +72,35 @@ function buildCodeWithHashRefs(
69
72
  };
70
73
  }
71
74
 
75
+ /**
76
+ * Drains a Readable stream into a single Buffer.
77
+ */
78
+ function streamToBuffer(stream: Readable): Promise<Buffer> {
79
+ return new Promise((resolve, reject) => {
80
+ const chunks: Buffer[] = [];
81
+ stream.on('data', (chunk: Buffer) => chunks.push(chunk));
82
+ stream.on('end', () => resolve(Buffer.concat(chunks)));
83
+ stream.on('error', reject);
84
+ });
85
+ }
86
+
87
+ /**
88
+ * Cross-checks applyReplacementsToVLQMappings against the native
89
+ * applyReplacementsToSourceMap by building a SourceMap, running both
90
+ * implementations, and asserting identical VLQ output.
91
+ */
92
+ function crossCheck(sm: SourceMap, replacements: HashRefReplacement[]): void {
93
+ const vlqBefore = sm.toVLQ().mappings;
94
+ const vlqResult = applyReplacementsToVLQMappings(vlqBefore, replacements);
95
+ applyReplacementsToSourceMap(sm, replacements);
96
+ const nativeResult = sm.toVLQ().mappings;
97
+ assert.strictEqual(
98
+ vlqResult,
99
+ nativeResult,
100
+ `VLQ result differs from native:\n VLQ: ${vlqResult}\n native: ${nativeResult}`,
101
+ );
102
+ }
103
+
72
104
  describe('applyReplacementsToSourceMap', () => {
73
105
  describe('with correct replacement coordinates', () => {
74
106
  it('should correctly adjust a single HASH_REF replacement', () => {
@@ -237,3 +269,334 @@ describe('applyReplacementsToSourceMap', () => {
237
269
  });
238
270
  });
239
271
  });
272
+
273
+ describe('applyReplacementsToVLQMappings', () => {
274
+ it('returns the same string for empty replacements', () => {
275
+ const sm = new SourceMap('/');
276
+ sm.addIndexedMapping({
277
+ generated: {line: 1, column: 10},
278
+ original: {line: 1, column: 0},
279
+ source: 'test.js',
280
+ });
281
+ const vlq = sm.toVLQ().mappings;
282
+ assert.strictEqual(applyReplacementsToVLQMappings(vlq, []), vlq);
283
+ });
284
+
285
+ it('returns the same string for a zero-delta replacement', () => {
286
+ const sm = new SourceMap('/');
287
+ sm.addIndexedMapping({
288
+ generated: {line: 1, column: 30},
289
+ original: {line: 1, column: 0},
290
+ source: 'test.js',
291
+ });
292
+ const vlq = sm.toVLQ().mappings;
293
+ const repl: HashRefReplacement[] = [
294
+ {line: 0, column: 0, originalLength: 10, newLength: 10},
295
+ ];
296
+ assert.strictEqual(applyReplacementsToVLQMappings(vlq, repl), vlq);
297
+ });
298
+
299
+ it('single replacement agrees with native', () => {
300
+ const {correctReplacements, identifierPositions} = buildCodeWithHashRefs([
301
+ {type: 'hashref'},
302
+ {type: 'code', text: ';var x=SOME_IDENT;'},
303
+ ]);
304
+ const sm = new SourceMap('/');
305
+ sm.addIndexedMapping({
306
+ generated: {line: 1, column: identifierPositions.get('SOME_IDENT')!},
307
+ original: {line: 10, column: 5},
308
+ source: 'test.js',
309
+ });
310
+ crossCheck(sm, correctReplacements);
311
+ });
312
+
313
+ it('multiple replacements on the same line agree with native', () => {
314
+ const {correctReplacements, identifierPositions} = buildCodeWithHashRefs([
315
+ {type: 'hashref'},
316
+ {type: 'code', text: ';var a=IDENT_ALPHA;require("'},
317
+ {type: 'hashref'},
318
+ {type: 'code', text: '");var b=IDENT_BETA;require("'},
319
+ {type: 'hashref'},
320
+ {type: 'code', text: '");var c=IDENT_GAMMA;'},
321
+ ]);
322
+ const sm = new SourceMap('/');
323
+ for (const [name, origLine] of [
324
+ ['IDENT_ALPHA', 10],
325
+ ['IDENT_BETA', 20],
326
+ ['IDENT_GAMMA', 30],
327
+ ] as const) {
328
+ sm.addIndexedMapping({
329
+ generated: {line: 1, column: identifierPositions.get(name)!},
330
+ original: {line: origLine, column: 0},
331
+ source: 'test.js',
332
+ });
333
+ }
334
+ crossCheck(sm, correctReplacements);
335
+ });
336
+
337
+ it('10 replacements on the same line agree with native', () => {
338
+ const segments: Array<{type: 'code'; text: string} | {type: 'hashref'}> =
339
+ [];
340
+ for (let i = 0; i < 10; i++) {
341
+ segments.push({type: 'hashref'});
342
+ segments.push({
343
+ type: 'code',
344
+ text: `;var x${i}=TARGET_${String(i).padStart(2, '0')};require("`,
345
+ });
346
+ }
347
+ segments.push({type: 'code', text: '");'});
348
+
349
+ const {correctReplacements, identifierPositions} =
350
+ buildCodeWithHashRefs(segments);
351
+
352
+ const sm = new SourceMap('/');
353
+ for (let i = 0; i < 10; i++) {
354
+ const name = `TARGET_${String(i).padStart(2, '0')}`;
355
+ sm.addIndexedMapping({
356
+ generated: {line: 1, column: identifierPositions.get(name)!},
357
+ original: {line: (i + 1) * 10, column: 0},
358
+ source: 'test.js',
359
+ });
360
+ }
361
+ crossCheck(sm, correctReplacements);
362
+ });
363
+
364
+ it('mapping before the threshold is unaffected', () => {
365
+ const {correctReplacements, identifierPositions} = buildCodeWithHashRefs([
366
+ {type: 'code', text: 'var BEFORE_HASH=1;'},
367
+ {type: 'hashref'},
368
+ {type: 'code', text: ';var AFTER_HASH=2;'},
369
+ ]);
370
+ const sm = new SourceMap('/');
371
+ sm.addIndexedMapping({
372
+ generated: {line: 1, column: identifierPositions.get('BEFORE_HASH')!},
373
+ original: {line: 1, column: 0},
374
+ source: 'test.js',
375
+ });
376
+ sm.addIndexedMapping({
377
+ generated: {line: 1, column: identifierPositions.get('AFTER_HASH')!},
378
+ original: {line: 2, column: 0},
379
+ source: 'test.js',
380
+ });
381
+ crossCheck(sm, correctReplacements);
382
+ });
383
+
384
+ it('mapping on a different line is unaffected', () => {
385
+ const sm = new SourceMap('/');
386
+ // Line 0 (VLQ line index 0): hash ref at col 0, mapping at col 50
387
+ // Line 1 (VLQ line index 1): mapping at col 5 – should be untouched
388
+ sm.addIndexedMapping({
389
+ generated: {line: 1, column: 50},
390
+ original: {line: 10, column: 0},
391
+ source: 'test.js',
392
+ });
393
+ sm.addIndexedMapping({
394
+ generated: {line: 2, column: 5},
395
+ original: {line: 20, column: 0},
396
+ source: 'test.js',
397
+ });
398
+
399
+ const replacements: HashRefReplacement[] = [
400
+ {
401
+ line: 0,
402
+ column: 0,
403
+ originalLength: HASH_REF_LEN,
404
+ newLength: REPLACEMENT_LEN,
405
+ },
406
+ ];
407
+
408
+ const vlqBefore = sm.toVLQ().mappings;
409
+ const vlqResult = applyReplacementsToVLQMappings(vlqBefore, replacements);
410
+ applyReplacementsToSourceMap(sm, replacements);
411
+
412
+ // Verify the line-1 mapping is unchanged by checking parsed values
413
+ const mappings = sm.getMap().mappings;
414
+ const line2Mapping = mappings.find((m) => m.original?.line === 20);
415
+ assert.ok(line2Mapping, 'Line 2 mapping should exist');
416
+ assert.strictEqual(line2Mapping!.generated.column, 5);
417
+
418
+ // Also verify VLQ agrees with native
419
+ assert.strictEqual(vlqResult, sm.toVLQ().mappings);
420
+ });
421
+ });
422
+
423
+ describe('SourceMapHashRefRewriteStream', () => {
424
+ async function applyStream(
425
+ json: string,
426
+ replacements: HashRefReplacement[],
427
+ chunkSize?: number,
428
+ ): Promise<string> {
429
+ const inputBuf = Buffer.from(json, 'utf8');
430
+ let readable: Readable;
431
+ if (chunkSize != null) {
432
+ readable = new Readable({
433
+ read() {
434
+ let offset = 0;
435
+ while (offset < inputBuf.length) {
436
+ this.push(inputBuf.slice(offset, offset + chunkSize));
437
+ offset += chunkSize;
438
+ }
439
+ this.push(null);
440
+ },
441
+ });
442
+ } else {
443
+ readable = Readable.from([inputBuf]);
444
+ }
445
+ const outBuf = await streamToBuffer(
446
+ readable.pipe(new SourceMapHashRefRewriteStream(replacements)),
447
+ );
448
+ return outBuf.toString('utf8');
449
+ }
450
+
451
+ it('full round-trip: mappings field is correctly rewritten', async () => {
452
+ const {correctReplacements, identifierPositions} = buildCodeWithHashRefs([
453
+ {type: 'hashref'},
454
+ {type: 'code', text: ';var x=SOME_IDENT;'},
455
+ ]);
456
+
457
+ const sm = new SourceMap('/');
458
+ sm.addIndexedMapping({
459
+ generated: {line: 1, column: identifierPositions.get('SOME_IDENT')!},
460
+ original: {line: 10, column: 5},
461
+ source: 'test.js',
462
+ });
463
+
464
+ const vlqBefore = sm.toVLQ().mappings;
465
+ const expectedMappings = applyReplacementsToVLQMappings(
466
+ vlqBefore,
467
+ correctReplacements,
468
+ );
469
+
470
+ const mapJson = await sm.stringify({format: 'string'});
471
+ const outputJson = await applyStream(
472
+ mapJson as string,
473
+ correctReplacements,
474
+ );
475
+ const parsed = JSON.parse(outputJson);
476
+
477
+ assert.strictEqual(parsed.mappings, expectedMappings);
478
+ });
479
+
480
+ it('bytes after mappings (sourcesContent) pass through unchanged', async () => {
481
+ const sm = new SourceMap('/');
482
+ sm.addIndexedMapping({
483
+ generated: {line: 1, column: 30},
484
+ original: {line: 5, column: 0},
485
+ source: 'test.js',
486
+ });
487
+ sm.setSourceContent('test.js', 'const x = 1;\nconst y = 2;\n');
488
+
489
+ const replacements: HashRefReplacement[] = [
490
+ {
491
+ line: 0,
492
+ column: 0,
493
+ originalLength: HASH_REF_LEN,
494
+ newLength: REPLACEMENT_LEN,
495
+ },
496
+ ];
497
+
498
+ const mapJson = (await sm.stringify({format: 'string'})) as string;
499
+ const outputJson = await applyStream(mapJson, replacements);
500
+ const parsedInput = JSON.parse(mapJson);
501
+ const parsedOutput = JSON.parse(outputJson);
502
+
503
+ // sourcesContent must be byte-for-byte identical
504
+ assert.deepStrictEqual(
505
+ parsedOutput.sourcesContent,
506
+ parsedInput.sourcesContent,
507
+ );
508
+ assert.deepStrictEqual(parsedOutput.sources, parsedInput.sources);
509
+ assert.deepStrictEqual(parsedOutput.names, parsedInput.names);
510
+ });
511
+
512
+ it('handles chunk boundaries mid-key', async () => {
513
+ const {correctReplacements, identifierPositions} = buildCodeWithHashRefs([
514
+ {type: 'hashref'},
515
+ {type: 'code', text: ';var x=SOME_IDENT;'},
516
+ ]);
517
+
518
+ const sm = new SourceMap('/');
519
+ sm.addIndexedMapping({
520
+ generated: {line: 1, column: identifierPositions.get('SOME_IDENT')!},
521
+ original: {line: 10, column: 5},
522
+ source: 'test.js',
523
+ });
524
+
525
+ const vlqBefore = sm.toVLQ().mappings;
526
+ const expectedMappings = applyReplacementsToVLQMappings(
527
+ vlqBefore,
528
+ correctReplacements,
529
+ );
530
+
531
+ const mapJson = (await sm.stringify({format: 'string'})) as string;
532
+
533
+ // Test multiple chunk sizes to exercise boundary conditions.
534
+ for (const chunkSize of [1, 3, 7, 11, 13]) {
535
+ const outputJson = await applyStream(
536
+ mapJson,
537
+ correctReplacements,
538
+ chunkSize,
539
+ );
540
+ const parsed = JSON.parse(outputJson);
541
+ assert.strictEqual(
542
+ parsed.mappings,
543
+ expectedMappings,
544
+ `Chunk size ${chunkSize}: mappings mismatch`,
545
+ );
546
+ }
547
+ });
548
+
549
+ it('handles chunk boundaries mid-value', async () => {
550
+ // Use a source map with a longer mappings string to ensure the VLQ value
551
+ // spans multiple chunks for small chunk sizes.
552
+ const sm = new SourceMap('/');
553
+ for (let i = 0; i < 20; i++) {
554
+ sm.addIndexedMapping({
555
+ generated: {line: 1, column: i * 5},
556
+ original: {line: i + 1, column: 0},
557
+ source: 'test.js',
558
+ });
559
+ }
560
+
561
+ const replacements: HashRefReplacement[] = [
562
+ {
563
+ line: 0,
564
+ column: 10,
565
+ originalLength: HASH_REF_LEN,
566
+ newLength: REPLACEMENT_LEN,
567
+ },
568
+ ];
569
+
570
+ const vlqBefore = sm.toVLQ().mappings;
571
+ const expectedMappings = applyReplacementsToVLQMappings(
572
+ vlqBefore,
573
+ replacements,
574
+ );
575
+
576
+ const mapJson = (await sm.stringify({format: 'string'})) as string;
577
+
578
+ for (const chunkSize of [1, 5, 8]) {
579
+ const outputJson = await applyStream(mapJson, replacements, chunkSize);
580
+ const parsed = JSON.parse(outputJson);
581
+ assert.strictEqual(
582
+ parsed.mappings,
583
+ expectedMappings,
584
+ `Chunk size ${chunkSize}: mappings mismatch`,
585
+ );
586
+ }
587
+ });
588
+
589
+ it('no-op for empty replacements – output equals input', async () => {
590
+ const sm = new SourceMap('/');
591
+ sm.addIndexedMapping({
592
+ generated: {line: 1, column: 10},
593
+ original: {line: 1, column: 0},
594
+ source: 'test.js',
595
+ });
596
+
597
+ const mapJson = (await sm.stringify({format: 'string'})) as string;
598
+ const outputJson = await applyStream(mapJson, []);
599
+
600
+ assert.strictEqual(outputJson, mapJson);
601
+ });
602
+ });