pushwork 1.0.7 → 1.0.11

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (56) hide show
  1. package/babel.config.js +5 -0
  2. package/dist/cli/commands.d.ts +61 -0
  3. package/dist/cli/commands.d.ts.map +1 -0
  4. package/dist/cli/commands.js +661 -0
  5. package/dist/cli/commands.js.map +1 -0
  6. package/dist/cli/index.d.ts +2 -0
  7. package/dist/cli/index.d.ts.map +1 -0
  8. package/dist/cli/index.js +19 -0
  9. package/dist/cli/index.js.map +1 -0
  10. package/dist/cli/output.d.ts +61 -0
  11. package/dist/cli/output.d.ts.map +1 -0
  12. package/dist/cli/output.js +176 -0
  13. package/dist/cli/output.js.map +1 -0
  14. package/dist/config/index.d.ts +71 -0
  15. package/dist/config/index.d.ts.map +1 -0
  16. package/dist/config/index.js +314 -0
  17. package/dist/config/index.js.map +1 -0
  18. package/dist/core/change-detection.d.ts +5 -0
  19. package/dist/core/change-detection.d.ts.map +1 -1
  20. package/dist/core/change-detection.js +36 -8
  21. package/dist/core/change-detection.js.map +1 -1
  22. package/dist/core/sync-engine.d.ts +36 -1
  23. package/dist/core/sync-engine.d.ts.map +1 -1
  24. package/dist/core/sync-engine.js +350 -90
  25. package/dist/core/sync-engine.js.map +1 -1
  26. package/dist/utils/content-similarity.d.ts +53 -0
  27. package/dist/utils/content-similarity.d.ts.map +1 -0
  28. package/dist/utils/content-similarity.js +155 -0
  29. package/dist/utils/content-similarity.js.map +1 -0
  30. package/dist/utils/directory.d.ts +15 -1
  31. package/dist/utils/directory.d.ts.map +1 -1
  32. package/dist/utils/directory.js +22 -3
  33. package/dist/utils/directory.js.map +1 -1
  34. package/dist/utils/fs.d.ts.map +1 -1
  35. package/dist/utils/fs.js +1 -2
  36. package/dist/utils/fs.js.map +1 -1
  37. package/dist/utils/keyhive.d.ts +9 -0
  38. package/dist/utils/keyhive.d.ts.map +1 -0
  39. package/dist/utils/keyhive.js +26 -0
  40. package/dist/utils/keyhive.js.map +1 -0
  41. package/dist/utils/network-sync.d.ts +16 -1
  42. package/dist/utils/network-sync.d.ts.map +1 -1
  43. package/dist/utils/network-sync.js +103 -0
  44. package/dist/utils/network-sync.js.map +1 -1
  45. package/package.json +12 -2
  46. package/src/core/change-detection.ts +47 -9
  47. package/src/core/sync-engine.ts +440 -98
  48. package/src/utils/directory.ts +27 -4
  49. package/src/utils/fs.ts +1 -2
  50. package/src/utils/network-sync.ts +137 -1
  51. package/test/integration/in-memory-sync.test.ts +435 -0
  52. package/test/integration/init-sync.test.ts +89 -89
  53. package/dist/.pushwork/automerge/3P/Dm3ekE2pmjGnWvDaG3vSR7ww98/snapshot/aa2349c94955ea561f698720142f9d884a6872d9f82dc332d578c216beb0df0e +0 -0
  54. package/dist/.pushwork/automerge/st/orage-adapter-id +0 -1
  55. package/dist/.pushwork/config.json +0 -15
  56. package/dist/.pushwork/snapshot.json +0 -7
@@ -34,6 +34,7 @@ var __importStar = (this && this.__importStar) || (function () {
34
34
  })();
35
35
  Object.defineProperty(exports, "__esModule", { value: true });
36
36
  exports.SyncEngine = void 0;
37
+ const automerge_repo_1 = require("@automerge/automerge-repo");
37
38
  const A = __importStar(require("@automerge/automerge"));
38
39
  const types_1 = require("../types");
39
40
  const utils_1 = require("../utils");
@@ -44,13 +45,9 @@ const change_detection_1 = require("./change-detection");
44
45
  const move_detection_1 = require("./move-detection");
45
46
  const output_1 = require("../utils/output");
46
47
  /**
47
- * Post-sync delay constants for network propagation
48
- * These delays allow the WebSocket protocol to propagate peer changes after
49
- * our changes reach the server. waitForSync only ensures OUR changes reached
50
- * the server, not that we've RECEIVED changes from other peers.
51
- * TODO: remove need for this to exist.
48
+ * Sync configuration constants
52
49
  */
53
- const POST_SYNC_DELAY_MS = 200; // After we pushed changes
50
+ const BIDIRECTIONAL_SYNC_TIMEOUT_MS = 5000; // Timeout for bidirectional sync stability check
54
51
  /**
55
52
  * Bidirectional sync engine implementing two-phase sync
56
53
  */
@@ -58,7 +55,9 @@ class SyncEngine {
58
55
  constructor(repo, rootPath, config) {
59
56
  this.repo = repo;
60
57
  this.rootPath = rootPath;
61
- this.handlesToWaitOn = [];
58
+ // Map from path to handle for leaf-first sync ordering
59
+ // Path depth determines sync order (deepest first)
60
+ this.handlesByPath = new Map();
62
61
  this.config = config;
63
62
  this.snapshotManager = new snapshot_1.SnapshotManager(rootPath);
64
63
  this.changeDetector = new change_detection_1.ChangeDetector(repo, rootPath, config.exclude_patterns);
@@ -73,6 +72,15 @@ class SyncEngine {
73
72
  // Simply check the actual type of the content
74
73
  return typeof content === "string";
75
74
  }
75
+ /**
76
+ * Get a versioned URL from a handle (includes current heads).
77
+ * This ensures clients can fetch the exact version of the document.
78
+ */
79
+ getVersionedUrl(handle) {
80
+ const { documentId } = (0, automerge_repo_1.parseAutomergeUrl)(handle.url);
81
+ const heads = handle.heads();
82
+ return (0, automerge_repo_1.stringifyAutomergeUrl)({ documentId, heads });
83
+ }
76
84
  /**
77
85
  * Set the root directory URL in the snapshot
78
86
  */
@@ -111,6 +119,8 @@ class SyncEngine {
111
119
  result.directoriesChanged += commitResult.directoriesChanged;
112
120
  result.errors.push(...commitResult.errors);
113
121
  result.warnings.push(...commitResult.warnings);
122
+ // Update directory URLs with current heads after all children are populated
123
+ await this.updateDirectoryUrlsLeafFirst(snapshot);
114
124
  // Touch root directory if any changes were made
115
125
  const hasChanges = result.filesChanged > 0 || result.directoriesChanged > 0;
116
126
  if (hasChanges) {
@@ -144,12 +154,25 @@ class SyncEngine {
144
154
  warnings: [],
145
155
  timings: {},
146
156
  };
147
- // Reset handles to wait on
148
- this.handlesToWaitOn = [];
157
+ // Reset tracked handles for sync
158
+ this.handlesByPath = new Map();
149
159
  try {
150
160
  // Load current snapshot
151
161
  const snapshot = (await this.snapshotManager.load()) ||
152
162
  this.snapshotManager.createEmpty();
163
+ // Wait for initial sync to receive any pending remote changes
164
+ if (this.config.sync_enabled && snapshot.rootDirectoryUrl) {
165
+ try {
166
+ await (0, network_sync_1.waitForBidirectionalSync)(this.repo, snapshot.rootDirectoryUrl, this.config.sync_server_storage_id, {
167
+ timeoutMs: 3000, // Short timeout for initial sync
168
+ pollIntervalMs: 100,
169
+ stableChecksRequired: 3,
170
+ });
171
+ }
172
+ catch (error) {
173
+ output_1.out.taskLine(`Initial sync: ${error}`, true);
174
+ }
175
+ }
153
176
  // Detect all changes
154
177
  const changes = await this.changeDetector.detectChanges(snapshot);
155
178
  // Detect moves
@@ -160,38 +183,38 @@ class SyncEngine {
160
183
  result.directoriesChanged += phase1Result.directoriesChanged;
161
184
  result.errors.push(...phase1Result.errors);
162
185
  result.warnings.push(...phase1Result.warnings);
163
- // Always wait for network sync when enabled (not just when local changes exist)
164
- // This is critical for clone scenarios where we need to pull remote changes
186
+ // Update directory URLs with current heads after all children are populated
187
+ await this.updateDirectoryUrlsLeafFirst(snapshot);
188
+ // Wait for network sync (important for clone scenarios)
165
189
  if (this.config.sync_enabled) {
166
190
  try {
167
- // If we have a root directory URL, wait for it to sync
191
+ // If we have a root directory URL, add it to tracked handles
168
192
  if (snapshot.rootDirectoryUrl) {
169
193
  const rootDirUrl = snapshot.rootDirectoryUrl;
170
194
  const rootHandle = await this.repo.find(rootDirUrl);
171
- this.handlesToWaitOn.push(rootHandle);
195
+ this.handlesByPath.set("", rootHandle);
172
196
  }
173
- if (this.handlesToWaitOn.length > 0) {
174
- await (0, network_sync_1.waitForSync)(this.handlesToWaitOn, this.config.sync_server_storage_id);
175
- // CRITICAL: Wait a bit after our changes reach the server to allow
176
- // time for WebSocket to deliver OTHER peers' changes to us.
177
- // waitForSync only ensures OUR changes reached the server, not that
178
- // we've RECEIVED changes from other peers. This delay allows the
179
- // WebSocket protocol to propagate peer changes before we re-detect.
180
- // Without this, concurrent operations on different peers can miss
181
- // each other due to timing races.
182
- //
183
- // Optimization: Only wait if we pushed changes (shorter delay if no changes)
184
- await new Promise((resolve) => setTimeout(resolve, POST_SYNC_DELAY_MS));
197
+ if (this.handlesByPath.size > 0) {
198
+ // Sort handles leaf-first (deepest paths first, then shallower)
199
+ const sortedHandles = this.sortHandlesLeafFirst();
200
+ await (0, network_sync_1.waitForSync)(sortedHandles, this.config.sync_server_storage_id);
185
201
  }
202
+ // Wait for bidirectional sync to stabilize.
203
+ // This polls document heads until they stop changing, which indicates
204
+ // that both our outgoing changes and any incoming peer changes have
205
+ // been received.
206
+ await (0, network_sync_1.waitForBidirectionalSync)(this.repo, snapshot.rootDirectoryUrl, this.config.sync_server_storage_id, {
207
+ timeoutMs: BIDIRECTIONAL_SYNC_TIMEOUT_MS,
208
+ pollIntervalMs: 100,
209
+ stableChecksRequired: 3,
210
+ });
186
211
  }
187
212
  catch (error) {
188
213
  output_1.out.taskLine(`Network sync failed: ${error}`, true);
189
214
  result.warnings.push(`Network sync failed: ${error}`);
190
215
  }
191
216
  }
192
- // Re-detect remote changes after network sync to ensure fresh state
193
- // This fixes race conditions where we detect changes before server propagation
194
- // NOTE: We DON'T update snapshot heads yet - that would prevent detecting remote changes!
217
+ // Re-detect changes after network sync for fresh state
195
218
  const freshChanges = await this.changeDetector.detectChanges(snapshot);
196
219
  const freshRemoteChanges = freshChanges.filter((c) => c.changeType === types_1.ChangeType.REMOTE_ONLY ||
197
220
  c.changeType === types_1.ChangeType.BOTH_CHANGED);
@@ -201,10 +224,7 @@ class SyncEngine {
201
224
  result.directoriesChanged += phase2Result.directoriesChanged;
202
225
  result.errors.push(...phase2Result.errors);
203
226
  result.warnings.push(...phase2Result.warnings);
204
- // CRITICAL FIX: Update snapshot heads AFTER pulling remote changes
205
- // This ensures that change detection can find remote changes, and we only
206
- // update the snapshot after the filesystem is in sync with the documents
207
- // Update file document heads
227
+ // Update snapshot heads after pulling remote changes
208
228
  for (const [filePath, snapshotEntry] of snapshot.files.entries()) {
209
229
  try {
210
230
  const handle = await this.repo.find(snapshotEntry.url);
@@ -340,8 +360,7 @@ class SyncEngine {
340
360
  */
341
361
  async applyLocalChangeToRemote(change, snapshot) {
342
362
  const snapshotEntry = snapshot.files.get(change.path);
343
- // CRITICAL: Check for null explicitly, not falsy values
344
- // Empty strings "" and empty Uint8Array are valid file content!
363
+ // Check for null (empty string/Uint8Array are valid content)
345
364
  if (change.localContent === null) {
346
365
  // File was deleted locally
347
366
  if (snapshotEntry) {
@@ -356,12 +375,12 @@ class SyncEngine {
356
375
  // New file
357
376
  const handle = await this.createRemoteFile(change);
358
377
  if (handle) {
359
- await this.addFileToDirectory(snapshot, change.path, handle.url);
360
- // CRITICAL FIX: Update snapshot with heads AFTER adding to directory
361
- // The addFileToDirectory call above may have changed the document heads
378
+ // Use versioned URL (includes heads) so clients fetch correct version
379
+ const versionedUrl = this.getVersionedUrl(handle);
380
+ await this.addFileToDirectory(snapshot, change.path, versionedUrl);
362
381
  this.snapshotManager.updateFileEntry(snapshot, change.path, {
363
382
  path: (0, utils_1.joinAndNormalizePath)(this.rootPath, change.path),
364
- url: handle.url,
383
+ url: versionedUrl,
365
384
  head: handle.heads(),
366
385
  extension: (0, utils_1.getFileExtension)(change.path),
367
386
  mimeType: (0, utils_1.getEnhancedMimeType)(change.path),
@@ -381,8 +400,7 @@ class SyncEngine {
381
400
  if (!change.remoteHead) {
382
401
  throw new Error(`No remote head found for remote change to ${change.path}`);
383
402
  }
384
- // CRITICAL: Check for null explicitly, not falsy values
385
- // Empty strings "" and empty Uint8Array are valid file content!
403
+ // Check for null (empty string/Uint8Array are valid content)
386
404
  if (change.remoteContent === null) {
387
405
  // File was deleted remotely
388
406
  await (0, utils_1.removePath)(localPath);
@@ -404,9 +422,12 @@ class SyncEngine {
404
422
  try {
405
423
  const fileEntry = await (0, utils_1.findFileInDirectoryHierarchy)(this.repo, snapshot.rootDirectoryUrl, change.path);
406
424
  if (fileEntry) {
425
+ // Get versioned URL from handle (includes heads)
426
+ const fileHandle = await this.repo.find(fileEntry.url);
427
+ const versionedUrl = this.getVersionedUrl(fileHandle);
407
428
  this.snapshotManager.updateFileEntry(snapshot, change.path, {
408
429
  path: localPath,
409
- url: fileEntry.url,
430
+ url: versionedUrl,
410
431
  head: change.remoteHead,
411
432
  extension: (0, utils_1.getFileExtension)(change.path),
412
433
  mimeType: (0, utils_1.getEnhancedMimeType)(change.path),
@@ -435,12 +456,12 @@ class SyncEngine {
435
456
  if (move.fromPath !== move.toPath) {
436
457
  await this.removeFileFromDirectory(snapshot, move.fromPath);
437
458
  }
438
- // 2) Ensure destination directory document exists and add file entry there
459
+ // 2) Ensure destination directory document exists
439
460
  await this.ensureDirectoryDocument(snapshot, toDirPath);
440
- await this.addFileToDirectory(snapshot, move.toPath, fromEntry.url);
441
461
  // 3) Update the FileDocument name and content to match new location/state
442
462
  try {
443
- const handle = await this.repo.find(fromEntry.url);
463
+ // Use plain URL for mutable handle
464
+ const handle = await this.repo.find((0, utils_1.getPlainUrl)(fromEntry.url));
444
465
  const heads = fromEntry.head;
445
466
  // Update both name and content (if content changed during move)
446
467
  if (heads && heads.length > 0) {
@@ -471,27 +492,30 @@ class SyncEngine {
471
492
  }
472
493
  });
473
494
  }
495
+ // Get versioned URL after changes (includes current heads)
496
+ const versionedUrl = this.getVersionedUrl(handle);
497
+ // 4) Add file entry to destination directory with versioned URL
498
+ await this.addFileToDirectory(snapshot, move.toPath, versionedUrl);
474
499
  // Track file handle for network sync
475
- this.handlesToWaitOn.push(handle);
500
+ this.handlesByPath.set(move.toPath, handle);
501
+ // 5) Update snapshot entries
502
+ this.snapshotManager.removeFileEntry(snapshot, move.fromPath);
503
+ this.snapshotManager.updateFileEntry(snapshot, move.toPath, {
504
+ ...fromEntry,
505
+ path: (0, utils_1.joinAndNormalizePath)(this.rootPath, move.toPath),
506
+ url: versionedUrl,
507
+ head: handle.heads(),
508
+ });
476
509
  }
477
510
  catch (e) {
478
511
  // Failed to update file name - file may have been deleted
479
512
  output_1.out.taskLine(`Warning: Failed to rename ${move.fromPath} to ${move.toPath}`, true);
480
513
  }
481
- // 4) Update snapshot entries
482
- this.snapshotManager.removeFileEntry(snapshot, move.fromPath);
483
- this.snapshotManager.updateFileEntry(snapshot, move.toPath, {
484
- ...fromEntry,
485
- path: (0, utils_1.joinAndNormalizePath)(this.rootPath, move.toPath),
486
- head: fromEntry.head, // will be updated later when heads advance
487
- });
488
514
  }
489
515
  /**
490
516
  * Create new remote file document
491
517
  */
492
518
  async createRemoteFile(change) {
493
- // CRITICAL: Check for null explicitly, not falsy values
494
- // Empty strings "" and empty Uint8Array are valid file content!
495
519
  if (change.localContent === null)
496
520
  return null;
497
521
  const isText = this.isTextContent(change.localContent);
@@ -519,20 +543,20 @@ class SyncEngine {
519
543
  }
520
544
  // Always track newly created files for network sync
521
545
  // (they always represent a change that needs to sync)
522
- this.handlesToWaitOn.push(handle);
546
+ this.handlesByPath.set(change.path, handle);
523
547
  return handle;
524
548
  }
525
549
  /**
526
550
  * Update existing remote file document
527
551
  */
528
552
  async updateRemoteFile(url, content, snapshot, filePath) {
529
- const handle = await this.repo.find(url);
553
+ // Use plain URL for mutable handle
554
+ const handle = await this.repo.find((0, utils_1.getPlainUrl)(url));
530
555
  // Check if content actually changed before tracking for sync
531
556
  const doc = await handle.doc();
532
557
  const currentContent = doc?.content;
533
558
  const contentChanged = !(0, content_1.isContentEqual)(content, currentContent);
534
- // CRITICAL FIX: Always update snapshot heads, even when content is identical
535
- // This prevents stale head issues that cause false change detection
559
+ // Update snapshot heads even when content is identical
536
560
  const snapshotEntry = snapshot.files.get(filePath);
537
561
  if (snapshotEntry) {
538
562
  // Update snapshot with current document heads
@@ -566,7 +590,7 @@ class SyncEngine {
566
590
  });
567
591
  }
568
592
  // Only track files that actually changed content
569
- this.handlesToWaitOn.push(handle);
593
+ this.handlesByPath.set(filePath, handle);
570
594
  }
571
595
  /**
572
596
  * Delete remote file document
@@ -575,7 +599,8 @@ class SyncEngine {
575
599
  // In Automerge, we don't actually delete documents
576
600
  // They become orphaned and will be garbage collected
577
601
  // For now, we just mark them as deleted by clearing content
578
- const handle = await this.repo.find(url);
602
+ // Use plain URL for mutable handle
603
+ const handle = await this.repo.find((0, utils_1.getPlainUrl)(url));
579
604
  // const doc = await handle.doc(); // no longer needed
580
605
  let heads;
581
606
  if (snapshot && filePath) {
@@ -603,7 +628,8 @@ class SyncEngine {
603
628
  const directoryPath = pathParts.join("/");
604
629
  // Get or create the parent directory document
605
630
  const parentDirUrl = await this.ensureDirectoryDocument(snapshot, directoryPath);
606
- const dirHandle = await this.repo.find(parentDirUrl);
631
+ // Use plain URL for mutable handle
632
+ const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(parentDirUrl));
607
633
  let didChange = false;
608
634
  const snapshotEntry = snapshot.directories.get(directoryPath);
609
635
  const heads = snapshotEntry?.head;
@@ -633,13 +659,10 @@ class SyncEngine {
633
659
  }
634
660
  });
635
661
  }
636
- if (didChange) {
637
- this.handlesToWaitOn.push(dirHandle);
638
- // CRITICAL FIX: Update snapshot with new directory heads immediately
639
- // This prevents stale head issues that cause convergence problems
640
- if (snapshotEntry) {
641
- snapshotEntry.head = dirHandle.heads();
642
- }
662
+ // Always track the directory (even if unchanged) for proper leaf-first sync ordering
663
+ this.handlesByPath.set(directoryPath, dirHandle);
664
+ if (didChange && snapshotEntry) {
665
+ snapshotEntry.head = dirHandle.heads();
643
666
  }
644
667
  }
645
668
  /**
@@ -673,15 +696,19 @@ class SyncEngine {
673
696
  // Directory entries in parent docs may not carry valid heads
674
697
  try {
675
698
  const childDirHandle = await this.repo.find(existingDirEntry.url);
676
- const childHeads = childDirHandle.heads();
677
- // Update snapshot with discovered directory using validated heads
699
+ // Track discovered directory for sync
700
+ this.handlesByPath.set(directoryPath, childDirHandle);
701
+ // Get versioned URL for storage (includes current heads)
702
+ const versionedUrl = this.getVersionedUrl(childDirHandle);
703
+ // Update snapshot with discovered directory using versioned URL
678
704
  this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
679
705
  path: (0, utils_1.joinAndNormalizePath)(this.rootPath, directoryPath),
680
- url: existingDirEntry.url,
681
- head: childHeads,
706
+ url: versionedUrl,
707
+ head: childDirHandle.heads(),
682
708
  entries: [],
683
709
  });
684
- return existingDirEntry.url;
710
+ // Return versioned URL (callers use getPlainUrl() when they need to modify)
711
+ return versionedUrl;
685
712
  }
686
713
  catch (resolveErr) {
687
714
  // Failed to resolve directory - fall through to create a fresh directory document
@@ -698,8 +725,11 @@ class SyncEngine {
698
725
  docs: [],
699
726
  };
700
727
  const dirHandle = this.repo.create(dirDoc);
728
+ // Get versioned URL for the new directory (includes heads)
729
+ const versionedDirUrl = this.getVersionedUrl(dirHandle);
701
730
  // Add this directory to its parent
702
- const parentHandle = await this.repo.find(parentDirUrl);
731
+ // Use plain URL for mutable handle
732
+ const parentHandle = await this.repo.find((0, utils_1.getPlainUrl)(parentDirUrl));
703
733
  let didChange = false;
704
734
  parentHandle.change((doc) => {
705
735
  // Double-check that entry doesn't exist (race condition protection)
@@ -708,30 +738,29 @@ class SyncEngine {
708
738
  doc.docs.push({
709
739
  name: currentDirName,
710
740
  type: "folder",
711
- url: dirHandle.url,
741
+ url: versionedDirUrl,
712
742
  });
713
743
  didChange = true;
714
744
  }
715
745
  });
716
746
  // Track directory handles for sync
717
- this.handlesToWaitOn.push(dirHandle);
747
+ this.handlesByPath.set(directoryPath, dirHandle);
718
748
  if (didChange) {
719
- this.handlesToWaitOn.push(parentHandle);
720
- // CRITICAL FIX: Update parent directory heads in snapshot immediately
721
- // This prevents stale head issues when parent directory is modified
749
+ this.handlesByPath.set(parentPath, parentHandle);
722
750
  const parentSnapshotEntry = snapshot.directories.get(parentPath);
723
751
  if (parentSnapshotEntry) {
724
752
  parentSnapshotEntry.head = parentHandle.heads();
725
753
  }
726
754
  }
727
- // Update snapshot with new directory
755
+ // Update snapshot with new directory (use versioned URL for storage)
728
756
  this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
729
757
  path: (0, utils_1.joinAndNormalizePath)(this.rootPath, directoryPath),
730
- url: dirHandle.url,
758
+ url: versionedDirUrl,
731
759
  head: dirHandle.heads(),
732
760
  entries: [],
733
761
  });
734
- return dirHandle.url;
762
+ // Return versioned URL (callers use getPlainUrl() when they need to modify)
763
+ return versionedDirUrl;
735
764
  }
736
765
  /**
737
766
  * Remove file entry from directory document
@@ -756,9 +785,10 @@ class SyncEngine {
756
785
  parentDirUrl = existingDir.url;
757
786
  }
758
787
  try {
759
- const dirHandle = await this.repo.find(parentDirUrl);
788
+ // Use plain URL for mutable handle
789
+ const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(parentDirUrl));
760
790
  // Track this handle for network sync waiting
761
- this.handlesToWaitOn.push(dirHandle);
791
+ this.handlesByPath.set(directoryPath, dirHandle);
762
792
  const snapshotEntry = snapshot.directories.get(directoryPath);
763
793
  const heads = snapshotEntry?.head;
764
794
  let didChange = false;
@@ -782,14 +812,11 @@ class SyncEngine {
782
812
  }
783
813
  });
784
814
  }
785
- // CRITICAL FIX: Update snapshot with new directory heads immediately
786
- // This prevents stale head issues that cause convergence problems
787
815
  if (didChange && snapshotEntry) {
788
816
  snapshotEntry.head = dirHandle.heads();
789
817
  }
790
818
  }
791
819
  catch (error) {
792
- // Failed to remove from directory - re-throw for caller to handle
793
820
  throw error;
794
821
  }
795
822
  }
@@ -892,9 +919,7 @@ class SyncEngine {
892
919
  });
893
920
  }
894
921
  // Track root directory for network sync
895
- this.handlesToWaitOn.push(rootHandle);
896
- // CRITICAL FIX: Update root directory heads in snapshot immediately
897
- // This prevents stale head issues when root directory is modified
922
+ this.handlesByPath.set("", rootHandle);
898
923
  if (snapshotEntry) {
899
924
  snapshotEntry.head = rootHandle.heads();
900
925
  }
@@ -903,6 +928,241 @@ class SyncEngine {
903
928
  // Failed to update root directory timestamp
904
929
  }
905
930
  }
931
+ /**
932
+ * Sort tracked handles leaf-first (deepest paths first).
933
+ * Returns handles in sorted order, logging URLs with heads for debugging.
934
+ */
935
+ sortHandlesLeafFirst() {
936
+ // Sort paths by depth (descending - deepest first), then alphabetically
937
+ const sortedPaths = Array.from(this.handlesByPath.keys()).sort((a, b) => {
938
+ const depthA = a ? a.split("/").length : 0;
939
+ const depthB = b ? b.split("/").length : 0;
940
+ // Deepest first
941
+ if (depthA !== depthB) {
942
+ return depthB - depthA;
943
+ }
944
+ // Alphabetically by path
945
+ return a.localeCompare(b);
946
+ });
947
+ // Log the sync order with versioned URLs for debugging (keep on complete)
948
+ const handles = [];
949
+ for (const path of sortedPaths) {
950
+ const handle = this.handlesByPath.get(path);
951
+ const versionedUrl = this.getVersionedUrl(handle);
952
+ output_1.out.taskLine(`Sync: ${path || "(root)"} -> ${versionedUrl}`, true);
953
+ handles.push(handle);
954
+ }
955
+ return handles;
956
+ }
957
+ /**
958
+ * Update all URLs (files and directories) in directory documents with current heads.
959
+ *
960
+ * This MUST be called AFTER all changes are applied but BEFORE network sync.
961
+ * The problem it solves:
962
+ * 1. When we create/update a file or directory and store its URL, the URL captures
963
+ * the heads at that moment
964
+ * 2. Later operations may advance the document's heads
965
+ * 3. But the URL stored in the parent directory has stale heads
966
+ * 4. Clients reading the directory would get old views of entries
967
+ *
968
+ * The fix: walk leaf-first and update all entry URLs with current heads,
969
+ * AFTER all changes have been applied. This ensures clients get consistent,
970
+ * up-to-date versioned URLs.
971
+ */
972
+ async updateDirectoryUrlsLeafFirst(snapshot) {
973
+ // First, update file URLs in their parent directories
974
+ await this.updateFileUrlsInDirectories(snapshot);
975
+ // Then, update directory URLs in their parent directories (leaf-first)
976
+ await this.updateSubdirectoryUrls(snapshot);
977
+ }
978
+ /**
979
+ * Update file URLs in directory documents with current heads.
980
+ */
981
+ async updateFileUrlsInDirectories(snapshot) {
982
+ // Group files by their parent directory
983
+ const filesByDir = new Map();
984
+ for (const filePath of snapshot.files.keys()) {
985
+ const pathParts = filePath.split("/");
986
+ pathParts.pop(); // Remove filename
987
+ const dirPath = pathParts.join("/");
988
+ if (!filesByDir.has(dirPath)) {
989
+ filesByDir.set(dirPath, []);
990
+ }
991
+ filesByDir.get(dirPath).push(filePath);
992
+ }
993
+ // Process each directory that has files
994
+ for (const [dirPath, filePaths] of filesByDir.entries()) {
995
+ try {
996
+ // Get the directory URL
997
+ let dirUrl;
998
+ if (!dirPath || dirPath === "") {
999
+ if (!snapshot.rootDirectoryUrl)
1000
+ continue;
1001
+ dirUrl = snapshot.rootDirectoryUrl;
1002
+ }
1003
+ else {
1004
+ const dirEntry = snapshot.directories.get(dirPath);
1005
+ if (!dirEntry)
1006
+ continue;
1007
+ dirUrl = dirEntry.url;
1008
+ }
1009
+ // Get directory handle
1010
+ const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(dirUrl));
1011
+ // Get current heads for changeAt
1012
+ const snapshotEntry = snapshot.directories.get(dirPath);
1013
+ const heads = snapshotEntry?.head;
1014
+ // Build a map of file names to their current versioned URLs
1015
+ const fileUrlUpdates = new Map();
1016
+ for (const filePath of filePaths) {
1017
+ const fileEntry = snapshot.files.get(filePath);
1018
+ if (!fileEntry)
1019
+ continue;
1020
+ // Get current handle for this file
1021
+ const fileHandle = await this.repo.find((0, utils_1.getPlainUrl)(fileEntry.url));
1022
+ // Get versioned URL with current heads
1023
+ const currentVersionedUrl = this.getVersionedUrl(fileHandle);
1024
+ // Update snapshot entry
1025
+ snapshot.files.set(filePath, {
1026
+ ...fileEntry,
1027
+ url: currentVersionedUrl,
1028
+ head: fileHandle.heads(),
1029
+ });
1030
+ // Store for directory update
1031
+ const fileName = filePath.split("/").pop() || "";
1032
+ fileUrlUpdates.set(fileName, currentVersionedUrl);
1033
+ }
1034
+ // Update all file entries in the directory document
1035
+ let didChange = false;
1036
+ if (heads) {
1037
+ dirHandle.changeAt(heads, (doc) => {
1038
+ for (const [fileName, newUrl] of fileUrlUpdates) {
1039
+ const existingIndex = doc.docs.findIndex((entry) => entry.name === fileName && entry.type === "file");
1040
+ if (existingIndex !== -1 && doc.docs[existingIndex].url !== newUrl) {
1041
+ doc.docs[existingIndex].url = newUrl;
1042
+ didChange = true;
1043
+ }
1044
+ }
1045
+ });
1046
+ }
1047
+ else {
1048
+ dirHandle.change((doc) => {
1049
+ for (const [fileName, newUrl] of fileUrlUpdates) {
1050
+ const existingIndex = doc.docs.findIndex((entry) => entry.name === fileName && entry.type === "file");
1051
+ if (existingIndex !== -1 && doc.docs[existingIndex].url !== newUrl) {
1052
+ doc.docs[existingIndex].url = newUrl;
1053
+ didChange = true;
1054
+ }
1055
+ }
1056
+ });
1057
+ }
1058
+ // Track directory and update heads
1059
+ if (didChange) {
1060
+ this.handlesByPath.set(dirPath, dirHandle);
1061
+ if (snapshotEntry) {
1062
+ snapshotEntry.head = dirHandle.heads();
1063
+ }
1064
+ }
1065
+ }
1066
+ catch (error) {
1067
+ output_1.out.taskLine(`Warning: Failed to update file URLs in directory ${dirPath}`, true);
1068
+ }
1069
+ }
1070
+ }
1071
+ /**
1072
+ * Update subdirectory URLs in parent directories with current heads.
1073
+ * Processes leaf-first (deepest directories first).
1074
+ */
1075
+ async updateSubdirectoryUrls(snapshot) {
1076
+ // Get all directory paths and sort leaf-first (deepest first)
1077
+ const directoryPaths = Array.from(snapshot.directories.keys()).sort((a, b) => {
1078
+ const depthA = a ? a.split("/").length : 0;
1079
+ const depthB = b ? b.split("/").length : 0;
1080
+ // Deepest first
1081
+ if (depthA !== depthB) {
1082
+ return depthB - depthA;
1083
+ }
1084
+ // Alphabetically by path
1085
+ return a.localeCompare(b);
1086
+ });
1087
+ // Update each directory's URL in its parent
1088
+ for (const dirPath of directoryPaths) {
1089
+ // Skip root directory (has no parent)
1090
+ if (!dirPath || dirPath === "") {
1091
+ continue;
1092
+ }
1093
+ const dirEntry = snapshot.directories.get(dirPath);
1094
+ if (!dirEntry)
1095
+ continue;
1096
+ try {
1097
+ // Get current handle for this directory (use plain URL to get mutable handle)
1098
+ const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(dirEntry.url));
1099
+ // Get versioned URL with CURRENT heads (after all children populated)
1100
+ const currentVersionedUrl = this.getVersionedUrl(dirHandle);
1101
+ // Update snapshot entry with current heads and versioned URL
1102
+ snapshot.directories.set(dirPath, {
1103
+ ...dirEntry,
1104
+ url: currentVersionedUrl,
1105
+ head: dirHandle.heads(),
1106
+ });
1107
+ // Get parent path
1108
+ const pathParts = dirPath.split("/");
1109
+ const dirName = pathParts.pop() || "";
1110
+ const parentPath = pathParts.join("/");
1111
+ // Get parent directory handle
1112
+ let parentDirUrl;
1113
+ if (!parentPath || parentPath === "") {
1114
+ // Parent is root
1115
+ if (!snapshot.rootDirectoryUrl)
1116
+ continue;
1117
+ parentDirUrl = snapshot.rootDirectoryUrl;
1118
+ }
1119
+ else {
1120
+ const parentEntry = snapshot.directories.get(parentPath);
1121
+ if (!parentEntry)
1122
+ continue;
1123
+ parentDirUrl = parentEntry.url;
1124
+ }
1125
+ // Update the directory entry in the parent with the new versioned URL
1126
+ const parentHandle = await this.repo.find((0, utils_1.getPlainUrl)(parentDirUrl));
1127
+ // Get parent's current heads for changeAt
1128
+ const parentSnapshotEntry = parentPath === ""
1129
+ ? snapshot.directories.get("")
1130
+ : snapshot.directories.get(parentPath);
1131
+ const parentHeads = parentSnapshotEntry?.head;
1132
+ let didChange = false;
1133
+ if (parentHeads) {
1134
+ parentHandle.changeAt(parentHeads, (doc) => {
1135
+ const existingIndex = doc.docs.findIndex((entry) => entry.name === dirName && entry.type === "folder");
1136
+ if (existingIndex !== -1) {
1137
+ // Update the URL with current versioned URL
1138
+ doc.docs[existingIndex].url = currentVersionedUrl;
1139
+ didChange = true;
1140
+ }
1141
+ });
1142
+ }
1143
+ else {
1144
+ parentHandle.change((doc) => {
1145
+ const existingIndex = doc.docs.findIndex((entry) => entry.name === dirName && entry.type === "folder");
1146
+ if (existingIndex !== -1) {
1147
+ // Update the URL with current versioned URL
1148
+ doc.docs[existingIndex].url = currentVersionedUrl;
1149
+ didChange = true;
1150
+ }
1151
+ });
1152
+ }
1153
+ // Track parent for sync and update its heads in snapshot
1154
+ if (didChange) {
1155
+ this.handlesByPath.set(parentPath, parentHandle);
1156
+ if (parentSnapshotEntry) {
1157
+ parentSnapshotEntry.head = parentHandle.heads();
1158
+ }
1159
+ }
1160
+ }
1161
+ catch (error) {
1162
+ output_1.out.taskLine(`Warning: Failed to update directory URL for ${dirPath}`, true);
1163
+ }
1164
+ }
1165
+ }
906
1166
  }
907
1167
  exports.SyncEngine = SyncEngine;
908
1168
  //# sourceMappingURL=sync-engine.js.map