pushwork 1.1.1 → 1.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -266,7 +266,7 @@ export class SyncEngine {
266
266
  const plainUrl = getPlainUrl(entry.url)
267
267
  if (!failedUrls.has(plainUrl)) continue
268
268
 
269
- debug(`recreate: recreating document for ${filePath} (${plainUrl.slice(0, 20)}...)`)
269
+ debug(`recreate: recreating document for ${filePath} (${plainUrl})`)
270
270
  out.taskLine(`Recreating document for ${filePath}`)
271
271
 
272
272
  try {
@@ -335,7 +335,7 @@ export class SyncEngine {
335
335
  newHandles.push(newHandle)
336
336
  newHandles.push(dirHandle)
337
337
 
338
- debug(`recreate: created new doc for ${filePath} -> ${newHandle.url.slice(0, 20)}...`)
338
+ debug(`recreate: created new doc for ${filePath} -> ${newHandle.url}`)
339
339
  } catch (error) {
340
340
  debug(`recreate: failed for ${filePath}: ${error}`)
341
341
  out.taskLine(`Failed to recreate ${filePath}: ${error}`, true)
@@ -378,7 +378,7 @@ export class SyncEngine {
378
378
  (await this.snapshotManager.load()) ||
379
379
  this.snapshotManager.createEmpty()
380
380
 
381
- debug(`sync: rootDirectoryUrl=${snapshot.rootDirectoryUrl?.slice(0, 30)}..., files=${snapshot.files.size}, dirs=${snapshot.directories.size}`)
381
+ debug(`sync: rootDirectoryUrl=${snapshot.rootDirectoryUrl}, files=${snapshot.files.size}, dirs=${snapshot.directories.size}`)
382
382
 
383
383
  // Wait for initial sync to receive any pending remote changes
384
384
  if (this.config.sync_enabled && snapshot.rootDirectoryUrl) {
@@ -583,9 +583,17 @@ export class SyncEngine {
583
583
  result.warnings.push(...phase2Result.warnings)
584
584
 
585
585
  // Update snapshot heads after pulling remote changes
586
+ // IMPORTANT: Use getPlainUrl() to strip version/heads from URLs.
587
+ // Artifact entries store versioned URLs (with heads baked in).
588
+ // repo.find(versionedUrl) returns a view handle whose .heads()
589
+ // returns the VERSION heads, not the current document heads.
590
+ // Using the versioned URL here would overwrite correct heads with
591
+ // stale ones, causing changeAt() to fork from the wrong point
592
+ // on the next sync (e.g. an empty directory state where deletions
593
+ // can't find the entries to splice out).
586
594
  for (const [filePath, snapshotEntry] of snapshot.files.entries()) {
587
595
  try {
588
- const handle = await this.repo.find(snapshotEntry.url)
596
+ const handle = await this.repo.find(getPlainUrl(snapshotEntry.url))
589
597
  const currentHeads = handle.heads()
590
598
  if (!A.equals(currentHeads, snapshotEntry.head)) {
591
599
  // Update snapshot with current heads after pulling changes
@@ -602,7 +610,7 @@ export class SyncEngine {
602
610
  // Update directory document heads
603
611
  for (const [dirPath, snapshotEntry] of snapshot.directories.entries()) {
604
612
  try {
605
- const handle = await this.repo.find(snapshotEntry.url)
613
+ const handle = await this.repo.find(getPlainUrl(snapshotEntry.url))
606
614
  const currentHeads = handle.heads()
607
615
  if (!A.equals(currentHeads, snapshotEntry.head)) {
608
616
  // Update snapshot with current heads after pulling changes
@@ -797,7 +805,7 @@ export class SyncEngine {
797
805
  }
798
806
  )
799
807
  result.filesChanged++
800
- debug(`push: created ${change.path} -> ${handle.url.slice(0, 20)}...`)
808
+ debug(`push: created ${change.path} -> ${handle.url}`)
801
809
  }
802
810
  } else {
803
811
  // Update existing file
@@ -958,6 +966,11 @@ export class SyncEngine {
958
966
  if (snapshotEntry) {
959
967
  // Update existing entry
960
968
  snapshotEntry.head = change.remoteHead
969
+ // If the remote document was replaced (new URL), update the snapshot URL
970
+ if (change.remoteUrl) {
971
+ const fileHandle = await this.repo.find<FileDocument>(change.remoteUrl)
972
+ snapshotEntry.url = this.getEntryUrl(fileHandle, change.path)
973
+ }
961
974
  } else {
962
975
  // Create new snapshot entry for newly discovered remote file
963
976
  // We need to find the remote file's URL from the directory hierarchy
@@ -41,7 +41,7 @@ export interface FileDocument {
41
41
  export enum FileType {
42
42
  TEXT = "text",
43
43
  BINARY = "binary",
44
- DIRECTORY = "directory",
44
+ DIRECTORY = "directory"
45
45
  }
46
46
 
47
47
  /**
@@ -51,7 +51,7 @@ export enum ChangeType {
51
51
  NO_CHANGE = "no_change",
52
52
  LOCAL_ONLY = "local_only",
53
53
  REMOTE_ONLY = "remote_only",
54
- BOTH_CHANGED = "both_changed",
54
+ BOTH_CHANGED = "both_changed"
55
55
  }
56
56
 
57
57
  /**
@@ -86,4 +86,6 @@ export interface DetectedChange {
86
86
  remoteContent: string | Uint8Array | null
87
87
  localHead?: UrlHeads
88
88
  remoteHead?: UrlHeads
89
+ /** New remote URL when the remote document was replaced (artifact URL change) */
90
+ remoteUrl?: AutomergeUrl
89
91
  }
@@ -88,7 +88,7 @@ export async function waitForBidirectionalSync(
88
88
  const changedDocs: string[] = [];
89
89
  for (const [url, heads] of currentHeads) {
90
90
  if (lastSeenHeads.get(url) !== heads) {
91
- changedDocs.push(url.slice(0, 20) + "...");
91
+ changedDocs.push(url);
92
92
  }
93
93
  }
94
94
  const newDocs = currentHeads.size - lastSeenHeads.size;
@@ -236,13 +236,13 @@ function waitForHandleSync(
236
236
  };
237
237
 
238
238
  const onConverged = () => {
239
- debug(`waitForSync: ${handle.url.slice(0, 20)}... converged in ${Date.now() - startTime}ms`);
239
+ debug(`waitForSync: ${handle.url}... converged in ${Date.now() - startTime}ms`);
240
240
  cleanup();
241
241
  resolve(handle);
242
242
  };
243
243
 
244
244
  const timeout = setTimeout(() => {
245
- debug(`waitForSync: ${handle.url.slice(0, 20)}... timed out after ${timeoutMs}ms`);
245
+ debug(`waitForSync: ${handle.url}... timed out after ${timeoutMs}ms`);
246
246
  cleanup();
247
247
  reject(handle);
248
248
  }, timeoutMs);
@@ -317,9 +317,9 @@ export async function waitForSync(
317
317
  const remoteHeads = syncInfo?.lastHeads;
318
318
  if (A.equals(heads, remoteHeads)) {
319
319
  alreadySynced++;
320
- debug(`waitForSync: ${handle.url.slice(0, 20)}... already synced`);
320
+ debug(`waitForSync: ${handle.url}... already synced`);
321
321
  } else {
322
- debug(`waitForSync: ${handle.url.slice(0, 20)}... needs sync (remoteHeads=${remoteHeads ? 'present' : 'missing'})`);
322
+ debug(`waitForSync: ${handle.url}... needs sync (remoteHeads=${remoteHeads ? 'present' : 'missing'})`);
323
323
  needsSync.push(handle);
324
324
  }
325
325
  }
@@ -391,6 +391,401 @@ describe("Sync Reliability Tests", () => {
391
391
  }, 30000);
392
392
  });
393
393
 
394
+ describe("Subdirectory File Deletion - Resurrection Bug", () => {
395
+ it("deleted file in artifact directory should not resurrect", async () => {
396
+ // Files in artifact directories (dist/ by default) resurrect after sync.
397
+ // Phase 1 (push) correctly removes the file entry from the directory doc,
398
+ // but the Automerge merge with the server's version re-introduces it.
399
+ // Phase 2 (pull) then sees it as a "new remote document" and re-creates it.
400
+ const repoA = path.join(tmpDir, "repo-a");
401
+ await fs.mkdir(repoA);
402
+
403
+ await fs.mkdir(path.join(repoA, "dist", "assets"), { recursive: true });
404
+ await fs.writeFile(path.join(repoA, "dist", "assets", "app.js"), "// build 1");
405
+ await pushwork(["init", "."], repoA);
406
+ await pushwork(["sync"], repoA);
407
+
408
+ // Delete the file
409
+ await fs.unlink(path.join(repoA, "dist", "assets", "app.js"));
410
+
411
+ // Sync - push deletion then pull
412
+ await pushwork(["sync"], repoA);
413
+
414
+ // File should stay deleted
415
+ expect(await pathExists(path.join(repoA, "dist", "assets", "app.js"))).toBe(false);
416
+
417
+ // Sync again - should NOT come back from server
418
+ await pushwork(["sync"], repoA);
419
+ expect(await pathExists(path.join(repoA, "dist", "assets", "app.js"))).toBe(false);
420
+ }, 60000);
421
+
422
+ it("deleted file in depth-1 subdirectory should not resurrect (control)", async () => {
423
+ // Control: depth-1 subdirectories work correctly
424
+ const repoA = path.join(tmpDir, "repo-a");
425
+ await fs.mkdir(repoA);
426
+
427
+ await fs.mkdir(path.join(repoA, "subdir"), { recursive: true });
428
+ await fs.writeFile(path.join(repoA, "subdir", "file.txt"), "content");
429
+ await pushwork(["init", "."], repoA);
430
+ await pushwork(["sync"], repoA);
431
+
432
+ await fs.unlink(path.join(repoA, "subdir", "file.txt"));
433
+
434
+ await pushwork(["sync"], repoA);
435
+ expect(await pathExists(path.join(repoA, "subdir", "file.txt"))).toBe(false);
436
+
437
+ await pushwork(["sync"], repoA);
438
+ expect(await pathExists(path.join(repoA, "subdir", "file.txt"))).toBe(false);
439
+ }, 60000);
440
+
441
+ it("deleted build artifacts should not resurrect after rebuild cycle", async () => {
442
+ // Real-world scenario: build step creates new hashed files and deletes
443
+ // old ones in dist/assets/. The deleted files come back from the server.
444
+ const repoA = path.join(tmpDir, "repo-a");
445
+ await fs.mkdir(repoA);
446
+
447
+ await fs.mkdir(path.join(repoA, "dist", "assets"), { recursive: true });
448
+ await fs.writeFile(path.join(repoA, "dist", "assets", "app-ABC123.js"), "// build 1");
449
+ await fs.writeFile(path.join(repoA, "dist", "assets", "vendor-DEF456.js"), "// vendor 1");
450
+ await fs.writeFile(path.join(repoA, "dist", "index.js"), "// index 1");
451
+ await pushwork(["init", "."], repoA);
452
+ await pushwork(["sync"], repoA);
453
+
454
+ // Simulate rebuild: new hashed files replace old ones
455
+ await fs.unlink(path.join(repoA, "dist", "assets", "app-ABC123.js"));
456
+ await fs.unlink(path.join(repoA, "dist", "assets", "vendor-DEF456.js"));
457
+ await fs.writeFile(path.join(repoA, "dist", "assets", "app-XYZ789.js"), "// build 2");
458
+ await fs.writeFile(path.join(repoA, "dist", "assets", "vendor-UVW012.js"), "// vendor 2");
459
+ await fs.writeFile(path.join(repoA, "dist", "index.js"), "// index 2");
460
+
461
+ await pushwork(["sync"], repoA);
462
+
463
+ // Old files should be gone, new files should exist
464
+ expect(await pathExists(path.join(repoA, "dist", "assets", "app-ABC123.js"))).toBe(false);
465
+ expect(await pathExists(path.join(repoA, "dist", "assets", "vendor-DEF456.js"))).toBe(false);
466
+ expect(await pathExists(path.join(repoA, "dist", "assets", "app-XYZ789.js"))).toBe(true);
467
+ expect(await pathExists(path.join(repoA, "dist", "assets", "vendor-UVW012.js"))).toBe(true);
468
+
469
+ // Sync again - old files should NOT come back from server
470
+ await pushwork(["sync"], repoA);
471
+
472
+ expect(await pathExists(path.join(repoA, "dist", "assets", "app-ABC123.js"))).toBe(false);
473
+ expect(await pathExists(path.join(repoA, "dist", "assets", "vendor-DEF456.js"))).toBe(false);
474
+ }, 60000);
475
+
476
+ it("deleted artifact files should not resurrect on clone", async () => {
477
+ // Two repos: A deletes files in an artifact directory, B should not
478
+ // see the deleted files after syncing.
479
+ const repoA = path.join(tmpDir, "repo-a");
480
+ const repoB = path.join(tmpDir, "repo-b");
481
+ await fs.mkdir(repoA);
482
+ await fs.mkdir(repoB);
483
+
484
+ await fs.mkdir(path.join(repoA, "dist", "assets"), { recursive: true });
485
+ await fs.writeFile(path.join(repoA, "dist", "assets", "app-ABC123.js"), "// build 1");
486
+ await fs.writeFile(path.join(repoA, "dist", "assets", "vendor-DEF456.js"), "// vendor 1");
487
+ await fs.writeFile(path.join(repoA, "dist", "index.js"), "// index 1");
488
+ await pushwork(["init", "."], repoA);
489
+
490
+ // Clone to B and converge
491
+ const { stdout: rootUrl } = await pushwork(["url"], repoA);
492
+ await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
493
+ await syncUntilConverged(repoA, repoB);
494
+
495
+ expect(await pathExists(path.join(repoB, "dist", "assets", "app-ABC123.js"))).toBe(true);
496
+
497
+ // A rebuilds
498
+ await fs.unlink(path.join(repoA, "dist", "assets", "app-ABC123.js"));
499
+ await fs.unlink(path.join(repoA, "dist", "assets", "vendor-DEF456.js"));
500
+ await fs.writeFile(path.join(repoA, "dist", "assets", "app-XYZ789.js"), "// build 2");
501
+ await fs.writeFile(path.join(repoA, "dist", "assets", "vendor-UVW012.js"), "// vendor 2");
502
+ await fs.writeFile(path.join(repoA, "dist", "index.js"), "// index 2");
503
+
504
+ // Sync A then B
505
+ await pushwork(["sync"], repoA);
506
+
507
+ // A should not have resurrected files
508
+ expect(await pathExists(path.join(repoA, "dist", "assets", "app-ABC123.js"))).toBe(false);
509
+ expect(await pathExists(path.join(repoA, "dist", "assets", "vendor-DEF456.js"))).toBe(false);
510
+
511
+ await pushwork(["sync"], repoB);
512
+
513
+ // B should have new files, NOT old files
514
+ expect(await pathExists(path.join(repoB, "dist", "assets", "app-ABC123.js"))).toBe(false);
515
+ expect(await pathExists(path.join(repoB, "dist", "assets", "vendor-DEF456.js"))).toBe(false);
516
+ expect(await pathExists(path.join(repoB, "dist", "assets", "app-XYZ789.js"))).toBe(true);
517
+ }, 90000);
518
+
519
+ it("deleted file in depth-3 subdirectory should not resurrect", async () => {
520
+ const repoA = path.join(tmpDir, "repo-a");
521
+ await fs.mkdir(repoA);
522
+
523
+ await fs.mkdir(path.join(repoA, "a", "b", "c"), { recursive: true });
524
+ await fs.writeFile(path.join(repoA, "a", "b", "c", "deep.txt"), "deep");
525
+ await pushwork(["init", "."], repoA);
526
+ await pushwork(["sync"], repoA);
527
+
528
+ await fs.unlink(path.join(repoA, "a", "b", "c", "deep.txt"));
529
+
530
+ await pushwork(["sync"], repoA);
531
+ expect(await pathExists(path.join(repoA, "a", "b", "c", "deep.txt"))).toBe(false);
532
+
533
+ await pushwork(["sync"], repoA);
534
+ expect(await pathExists(path.join(repoA, "a", "b", "c", "deep.txt"))).toBe(false);
535
+ }, 60000);
536
+
537
+ it("create+delete in same subdirectory should not resurrect deleted files", async () => {
538
+ // Regression guard: simultaneous create+delete in the same non-artifact
539
+ // subdirectory should work. This passes today but we don't want it to regress.
540
+ const repoA = path.join(tmpDir, "repo-a");
541
+ await fs.mkdir(repoA);
542
+
543
+ await fs.mkdir(path.join(repoA, "subdir"), { recursive: true });
544
+ await fs.writeFile(path.join(repoA, "subdir", "old.txt"), "old content");
545
+ await pushwork(["init", "."], repoA);
546
+ await pushwork(["sync"], repoA);
547
+
548
+ // Simultaneously create new file and delete old file in same dir
549
+ await fs.unlink(path.join(repoA, "subdir", "old.txt"));
550
+ await fs.writeFile(path.join(repoA, "subdir", "new.txt"), "new content");
551
+
552
+ await pushwork(["sync"], repoA);
553
+
554
+ expect(await pathExists(path.join(repoA, "subdir", "old.txt"))).toBe(false);
555
+ expect(await pathExists(path.join(repoA, "subdir", "new.txt"))).toBe(true);
556
+
557
+ // Sync again - old file should NOT come back
558
+ await pushwork(["sync"], repoA);
559
+
560
+ expect(await pathExists(path.join(repoA, "subdir", "old.txt"))).toBe(false);
561
+ expect(await pathExists(path.join(repoA, "subdir", "new.txt"))).toBe(true);
562
+ }, 60000);
563
+
564
+ it("deleted file in depth-2 with sibling dirs should not resurrect", async () => {
565
+ // The depth-3 test has intermediate dirs (a/b/c) with only one child each.
566
+ // The dist/assets test has dist/ containing both assets/ (subdir) and
567
+ // index.js (file). Test if having a file sibling alongside the subdir matters.
568
+ const repoA = path.join(tmpDir, "repo-a");
569
+ await fs.mkdir(repoA);
570
+
571
+ await fs.mkdir(path.join(repoA, "parent", "child"), { recursive: true });
572
+ await fs.writeFile(path.join(repoA, "parent", "sibling.txt"), "sibling at parent level");
573
+ await fs.writeFile(path.join(repoA, "parent", "child", "target.txt"), "will be deleted");
574
+ await pushwork(["init", "."], repoA);
575
+ await pushwork(["sync"], repoA);
576
+
577
+ await fs.unlink(path.join(repoA, "parent", "child", "target.txt"));
578
+
579
+ await pushwork(["sync"], repoA);
580
+ expect(await pathExists(path.join(repoA, "parent", "child", "target.txt"))).toBe(false);
581
+ expect(await pathExists(path.join(repoA, "parent", "sibling.txt"))).toBe(true);
582
+
583
+ await pushwork(["sync"], repoA);
584
+ expect(await pathExists(path.join(repoA, "parent", "child", "target.txt"))).toBe(false);
585
+ }, 60000);
586
+
587
+ it("deleted file in root directory should not resurrect", async () => {
588
+ const repoA = path.join(tmpDir, "repo-a");
589
+ await fs.mkdir(repoA);
590
+
591
+ await fs.writeFile(path.join(repoA, "root-file.txt"), "root content");
592
+ await fs.writeFile(path.join(repoA, "keep.txt"), "keep this");
593
+ await pushwork(["init", "."], repoA);
594
+ await pushwork(["sync"], repoA);
595
+
596
+ // Delete file in root
597
+ await fs.unlink(path.join(repoA, "root-file.txt"));
598
+
599
+ await pushwork(["sync"], repoA);
600
+ expect(await pathExists(path.join(repoA, "root-file.txt"))).toBe(false);
601
+ expect(await pathExists(path.join(repoA, "keep.txt"))).toBe(true);
602
+
603
+ // Sync again - should NOT come back
604
+ await pushwork(["sync"], repoA);
605
+ expect(await pathExists(path.join(repoA, "root-file.txt"))).toBe(false);
606
+ }, 60000);
607
+
608
+ it("deleted file in non-artifact subdirectory (src/) should not resurrect", async () => {
609
+ const repoA = path.join(tmpDir, "repo-a");
610
+ await fs.mkdir(repoA);
611
+
612
+ await fs.mkdir(path.join(repoA, "src"), { recursive: true });
613
+ await fs.writeFile(path.join(repoA, "src", "index.ts"), "export default 1");
614
+ await fs.writeFile(path.join(repoA, "src", "helper.ts"), "export function help() {}");
615
+ await pushwork(["init", "."], repoA);
616
+ await pushwork(["sync"], repoA);
617
+
618
+ // Delete one file in src/
619
+ await fs.unlink(path.join(repoA, "src", "helper.ts"));
620
+
621
+ await pushwork(["sync"], repoA);
622
+ expect(await pathExists(path.join(repoA, "src", "helper.ts"))).toBe(false);
623
+ expect(await pathExists(path.join(repoA, "src", "index.ts"))).toBe(true);
624
+
625
+ // Sync again - should NOT come back
626
+ await pushwork(["sync"], repoA);
627
+ expect(await pathExists(path.join(repoA, "src", "helper.ts"))).toBe(false);
628
+ }, 60000);
629
+
630
+ it("deleted files should not resurrect after multiple sync cycles", async () => {
631
+ // Simulate real-world usage: multiple syncs over time with deletions
632
+ const repoA = path.join(tmpDir, "repo-a");
633
+ await fs.mkdir(repoA);
634
+
635
+ await fs.mkdir(path.join(repoA, "src"), { recursive: true });
636
+ await fs.writeFile(path.join(repoA, "readme.txt"), "readme");
637
+ await fs.writeFile(path.join(repoA, "src", "app.ts"), "app");
638
+ await fs.writeFile(path.join(repoA, "src", "old.ts"), "old");
639
+ await pushwork(["init", "."], repoA);
640
+ await pushwork(["sync"], repoA);
641
+
642
+ // Cycle 1: delete root file
643
+ await fs.unlink(path.join(repoA, "readme.txt"));
644
+ await pushwork(["sync"], repoA);
645
+ expect(await pathExists(path.join(repoA, "readme.txt"))).toBe(false);
646
+
647
+ // Cycle 2: delete src file
648
+ await fs.unlink(path.join(repoA, "src", "old.ts"));
649
+ await pushwork(["sync"], repoA);
650
+ expect(await pathExists(path.join(repoA, "src", "old.ts"))).toBe(false);
651
+
652
+ // Cycle 3: just sync - nothing should come back
653
+ await pushwork(["sync"], repoA);
654
+ expect(await pathExists(path.join(repoA, "readme.txt"))).toBe(false);
655
+ expect(await pathExists(path.join(repoA, "src", "old.ts"))).toBe(false);
656
+ expect(await pathExists(path.join(repoA, "src", "app.ts"))).toBe(true);
657
+ }, 90000);
658
+
659
+ it("peer B should not see files deleted by peer A (root)", async () => {
660
+ const repoA = path.join(tmpDir, "repo-a");
661
+ const repoB = path.join(tmpDir, "repo-b");
662
+ await fs.mkdir(repoA);
663
+ await fs.mkdir(repoB);
664
+
665
+ await fs.writeFile(path.join(repoA, "keep.txt"), "keep");
666
+ await fs.writeFile(path.join(repoA, "delete-me.txt"), "gone");
667
+ await pushwork(["init", "."], repoA);
668
+
669
+ // Clone to B and converge
670
+ const { stdout: rootUrl } = await pushwork(["url"], repoA);
671
+ await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
672
+ await syncUntilConverged(repoA, repoB);
673
+
674
+ expect(await pathExists(path.join(repoB, "delete-me.txt"))).toBe(true);
675
+
676
+ // A deletes a root file
677
+ await fs.unlink(path.join(repoA, "delete-me.txt"));
678
+ await pushwork(["sync"], repoA);
679
+
680
+ // B syncs - should see the deletion
681
+ await pushwork(["sync"], repoB);
682
+ expect(await pathExists(path.join(repoB, "delete-me.txt"))).toBe(false);
683
+ expect(await pathExists(path.join(repoB, "keep.txt"))).toBe(true);
684
+
685
+ // B syncs again - should stay deleted
686
+ await pushwork(["sync"], repoB);
687
+ expect(await pathExists(path.join(repoB, "delete-me.txt"))).toBe(false);
688
+ }, 90000);
689
+
690
+ it("peer B should not see files deleted by peer A (src/)", async () => {
691
+ const repoA = path.join(tmpDir, "repo-a");
692
+ const repoB = path.join(tmpDir, "repo-b");
693
+ await fs.mkdir(repoA);
694
+ await fs.mkdir(repoB);
695
+
696
+ await fs.mkdir(path.join(repoA, "src"), { recursive: true });
697
+ await fs.writeFile(path.join(repoA, "src", "index.ts"), "export default 1");
698
+ await fs.writeFile(path.join(repoA, "src", "old.ts"), "old code");
699
+ await pushwork(["init", "."], repoA);
700
+
701
+ const { stdout: rootUrl } = await pushwork(["url"], repoA);
702
+ await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
703
+ await syncUntilConverged(repoA, repoB);
704
+
705
+ expect(await pathExists(path.join(repoB, "src", "old.ts"))).toBe(true);
706
+
707
+ // A deletes a file in src/
708
+ await fs.unlink(path.join(repoA, "src", "old.ts"));
709
+ await pushwork(["sync"], repoA);
710
+
711
+ // B syncs - should see the deletion
712
+ await pushwork(["sync"], repoB);
713
+ expect(await pathExists(path.join(repoB, "src", "old.ts"))).toBe(false);
714
+ expect(await pathExists(path.join(repoB, "src", "index.ts"))).toBe(true);
715
+
716
+ // B syncs again - should stay deleted
717
+ await pushwork(["sync"], repoB);
718
+ expect(await pathExists(path.join(repoB, "src", "old.ts"))).toBe(false);
719
+ }, 90000);
720
+
721
+ it("peer B should not see files deleted by peer A (dist/)", async () => {
722
+ const repoA = path.join(tmpDir, "repo-a");
723
+ const repoB = path.join(tmpDir, "repo-b");
724
+ await fs.mkdir(repoA);
725
+ await fs.mkdir(repoB);
726
+
727
+ await fs.mkdir(path.join(repoA, "dist", "assets"), { recursive: true });
728
+ await fs.writeFile(path.join(repoA, "dist", "index.js"), "// index");
729
+ await fs.writeFile(path.join(repoA, "dist", "assets", "app-ABC.js"), "// build 1");
730
+ await pushwork(["init", "."], repoA);
731
+
732
+ const { stdout: rootUrl } = await pushwork(["url"], repoA);
733
+ await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
734
+ await syncUntilConverged(repoA, repoB);
735
+
736
+ expect(await pathExists(path.join(repoB, "dist", "assets", "app-ABC.js"))).toBe(true);
737
+
738
+ // A rebuilds: delete old artifact, create new one
739
+ await fs.unlink(path.join(repoA, "dist", "assets", "app-ABC.js"));
740
+ await fs.writeFile(path.join(repoA, "dist", "assets", "app-XYZ.js"), "// build 2");
741
+ await pushwork(["sync"], repoA);
742
+
743
+ // A should not have resurrected
744
+ expect(await pathExists(path.join(repoA, "dist", "assets", "app-ABC.js"))).toBe(false);
745
+
746
+ // B syncs - should see new file, NOT old file
747
+ await pushwork(["sync"], repoB);
748
+ expect(await pathExists(path.join(repoB, "dist", "assets", "app-ABC.js"))).toBe(false);
749
+ expect(await pathExists(path.join(repoB, "dist", "assets", "app-XYZ.js"))).toBe(true);
750
+
751
+ // B syncs again - old file should stay gone
752
+ await pushwork(["sync"], repoB);
753
+ expect(await pathExists(path.join(repoB, "dist", "assets", "app-ABC.js"))).toBe(false);
754
+ }, 90000);
755
+
756
+ it("peer B should see artifact file content update after URL replacement", async () => {
757
+ // When peer A modifies an artifact file, the document is replaced entirely
758
+ // (new Automerge doc with a new URL). Peer B's snapshot still points to the
759
+ // old (now orphaned) URL. detectRemoteChanges sees no head change on the old
760
+ // doc, and detectNewRemoteDocuments skips paths already in the snapshot.
761
+ // Without URL replacement detection, B never sees the update.
762
+ const repoA = path.join(tmpDir, "repo-a");
763
+ const repoB = path.join(tmpDir, "repo-b");
764
+ await fs.mkdir(repoA);
765
+ await fs.mkdir(repoB);
766
+
767
+ await fs.mkdir(path.join(repoA, "dist"), { recursive: true });
768
+ await fs.writeFile(path.join(repoA, "dist", "app.js"), "// version 1");
769
+ await pushwork(["init", "."], repoA);
770
+
771
+ const { stdout: rootUrl } = await pushwork(["url"], repoA);
772
+ await pushwork(["clone", rootUrl.trim(), repoB], tmpDir);
773
+ await syncUntilConverged(repoA, repoB);
774
+
775
+ const bContentV1 = await fs.readFile(path.join(repoB, "dist", "app.js"), "utf-8");
776
+ expect(bContentV1).toBe("// version 1");
777
+
778
+ // A modifies the artifact file — this triggers nuclear replacement (new URL)
779
+ await fs.writeFile(path.join(repoA, "dist", "app.js"), "// version 2");
780
+ await pushwork(["sync"], repoA);
781
+
782
+ // B syncs — should pick up the new content despite the URL change
783
+ await pushwork(["sync"], repoB);
784
+ const bContentV2 = await fs.readFile(path.join(repoB, "dist", "app.js"), "utf-8");
785
+ expect(bContentV2).toBe("// version 2");
786
+ }, 90000);
787
+ });
788
+
394
789
  describe("Move/Rename Detection", () => {
395
790
  it("should handle file rename", async () => {
396
791
  const repoA = path.join(tmpDir, "repo-a");