pushwork 1.1.4 → 2.0.0-a.sub.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. package/CLAUDE.md +9 -5
  2. package/dist/cli.js +48 -55
  3. package/dist/cli.js.map +1 -1
  4. package/dist/commands.d.ts +5 -1
  5. package/dist/commands.d.ts.map +1 -1
  6. package/dist/commands.js +262 -263
  7. package/dist/commands.js.map +1 -1
  8. package/dist/core/change-detection.d.ts +1 -1
  9. package/dist/core/change-detection.d.ts.map +1 -1
  10. package/dist/core/change-detection.js +66 -103
  11. package/dist/core/change-detection.js.map +1 -1
  12. package/dist/core/config.d.ts +1 -1
  13. package/dist/core/config.d.ts.map +1 -1
  14. package/dist/core/config.js +14 -57
  15. package/dist/core/config.js.map +1 -1
  16. package/dist/core/index.d.ts +5 -5
  17. package/dist/core/index.d.ts.map +1 -1
  18. package/dist/core/index.js +5 -21
  19. package/dist/core/index.js.map +1 -1
  20. package/dist/core/move-detection.d.ts +2 -2
  21. package/dist/core/move-detection.d.ts.map +1 -1
  22. package/dist/core/move-detection.js +9 -13
  23. package/dist/core/move-detection.js.map +1 -1
  24. package/dist/core/snapshot.d.ts +1 -1
  25. package/dist/core/snapshot.d.ts.map +1 -1
  26. package/dist/core/snapshot.js +9 -46
  27. package/dist/core/snapshot.js.map +1 -1
  28. package/dist/core/sync-engine.d.ts +8 -2
  29. package/dist/core/sync-engine.d.ts.map +1 -1
  30. package/dist/core/sync-engine.js +171 -175
  31. package/dist/core/sync-engine.js.map +1 -1
  32. package/dist/index.d.ts +4 -4
  33. package/dist/index.d.ts.map +1 -1
  34. package/dist/index.js +4 -20
  35. package/dist/index.js.map +1 -1
  36. package/dist/types/config.d.ts +7 -6
  37. package/dist/types/config.d.ts.map +1 -1
  38. package/dist/types/config.js +1 -5
  39. package/dist/types/config.js.map +1 -1
  40. package/dist/types/documents.js +4 -7
  41. package/dist/types/documents.js.map +1 -1
  42. package/dist/types/index.d.ts +3 -3
  43. package/dist/types/index.d.ts.map +1 -1
  44. package/dist/types/index.js +3 -19
  45. package/dist/types/index.js.map +1 -1
  46. package/dist/types/snapshot.js +1 -2
  47. package/dist/utils/content.js +4 -8
  48. package/dist/utils/content.js.map +1 -1
  49. package/dist/utils/directory.js +5 -9
  50. package/dist/utils/directory.js.map +1 -1
  51. package/dist/utils/fs.d.ts +1 -1
  52. package/dist/utils/fs.d.ts.map +1 -1
  53. package/dist/utils/fs.js +34 -84
  54. package/dist/utils/fs.js.map +1 -1
  55. package/dist/utils/index.d.ts +4 -4
  56. package/dist/utils/index.d.ts.map +1 -1
  57. package/dist/utils/index.js +4 -20
  58. package/dist/utils/index.js.map +1 -1
  59. package/dist/utils/mime-types.js +5 -43
  60. package/dist/utils/mime-types.js.map +1 -1
  61. package/dist/utils/network-sync.d.ts +13 -8
  62. package/dist/utils/network-sync.d.ts.map +1 -1
  63. package/dist/utils/network-sync.js +65 -137
  64. package/dist/utils/network-sync.js.map +1 -1
  65. package/dist/utils/node-polyfills.d.ts +9 -0
  66. package/dist/utils/node-polyfills.d.ts.map +1 -0
  67. package/dist/utils/node-polyfills.js +9 -0
  68. package/dist/utils/node-polyfills.js.map +1 -0
  69. package/dist/utils/output.js +32 -39
  70. package/dist/utils/output.js.map +1 -1
  71. package/dist/utils/repo-factory.d.ts +8 -2
  72. package/dist/utils/repo-factory.d.ts.map +1 -1
  73. package/dist/utils/repo-factory.js +38 -47
  74. package/dist/utils/repo-factory.js.map +1 -1
  75. package/dist/utils/string-similarity.js +1 -5
  76. package/dist/utils/string-similarity.js.map +1 -1
  77. package/dist/utils/text-diff.js +5 -43
  78. package/dist/utils/text-diff.js.map +1 -1
  79. package/dist/utils/trace.js +6 -11
  80. package/dist/utils/trace.js.map +1 -1
  81. package/package.json +7 -5
  82. package/src/cli.ts +25 -34
  83. package/src/commands.ts +75 -11
  84. package/src/core/change-detection.ts +4 -4
  85. package/src/core/config.ts +2 -12
  86. package/src/core/index.ts +5 -5
  87. package/src/core/move-detection.ts +4 -4
  88. package/src/core/snapshot.ts +3 -3
  89. package/src/core/sync-engine.ts +82 -50
  90. package/src/index.ts +4 -4
  91. package/src/types/config.ts +8 -8
  92. package/src/types/index.ts +3 -3
  93. package/src/utils/directory.ts +1 -1
  94. package/src/utils/fs.ts +6 -4
  95. package/src/utils/index.ts +4 -4
  96. package/src/utils/network-sync.ts +62 -115
  97. package/src/utils/node-polyfills.ts +8 -0
  98. package/src/utils/repo-factory.ts +55 -10
  99. package/src/utils/trace.ts +1 -1
  100. package/tsconfig.json +2 -1
@@ -1,50 +1,14 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
- Object.defineProperty(o, "default", { enumerable: true, value: v });
15
- }) : function(o, v) {
16
- o["default"] = v;
17
- });
18
- var __importStar = (this && this.__importStar) || (function () {
19
- var ownKeys = function(o) {
20
- ownKeys = Object.getOwnPropertyNames || function (o) {
21
- var ar = [];
22
- for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
- return ar;
24
- };
25
- return ownKeys(o);
26
- };
27
- return function (mod) {
28
- if (mod && mod.__esModule) return mod;
29
- var result = {};
30
- if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
- __setModuleDefault(result, mod);
32
- return result;
33
- };
34
- })();
35
- Object.defineProperty(exports, "__esModule", { value: true });
36
- exports.SyncEngine = void 0;
37
- const automerge_repo_1 = require("@automerge/automerge-repo");
38
- const A = __importStar(require("@automerge/automerge"));
39
- const types_1 = require("../types");
40
- const utils_1 = require("../utils");
41
- const content_1 = require("../utils/content");
42
- const network_sync_1 = require("../utils/network-sync");
43
- const snapshot_1 = require("./snapshot");
44
- const change_detection_1 = require("./change-detection");
45
- const move_detection_1 = require("./move-detection");
46
- const output_1 = require("../utils/output");
47
- const path = __importStar(require("path"));
1
+ import { parseAutomergeUrl, stringifyAutomergeUrl, } from "@automerge/automerge-repo";
2
+ import * as A from "@automerge/automerge";
3
+ import { ChangeType, FileType, } from "../types/index.js";
4
+ import { writeFileContent, removePath, getFileExtension, getEnhancedMimeType, formatRelativePath, findFileInDirectoryHierarchy, joinAndNormalizePath, getPlainUrl, updateTextContent, readDocContent, } from "../utils/index.js";
5
+ import { isContentEqual, contentHash } from "../utils/content.js";
6
+ import { waitForSync, waitForBidirectionalSync } from "../utils/network-sync.js";
7
+ import { SnapshotManager } from "./snapshot.js";
8
+ import { ChangeDetector } from "./change-detection.js";
9
+ import { MoveDetector } from "./move-detection.js";
10
+ import { out } from "../utils/output.js";
11
+ import * as path from "path";
48
12
  const isDebug = !!process.env.DEBUG;
49
13
  function debug(...args) {
50
14
  if (isDebug)
@@ -69,7 +33,7 @@ const BIDIRECTIONAL_SYNC_TIMEOUT_MS = 5000; // Timeout for bidirectional sync st
69
33
  /**
70
34
  * Bidirectional sync engine implementing two-phase sync
71
35
  */
72
- class SyncEngine {
36
+ export class SyncEngine {
73
37
  constructor(repo, rootPath, config) {
74
38
  this.repo = repo;
75
39
  this.rootPath = rootPath;
@@ -77,9 +41,9 @@ class SyncEngine {
77
41
  // Path depth determines sync order (deepest first)
78
42
  this.handlesByPath = new Map();
79
43
  this.config = config;
80
- this.snapshotManager = new snapshot_1.SnapshotManager(rootPath);
81
- this.changeDetector = new change_detection_1.ChangeDetector(repo, rootPath, config.exclude_patterns, config.artifact_directories || []);
82
- this.moveDetector = new move_detection_1.MoveDetector(config.sync.move_detection_threshold);
44
+ this.snapshotManager = new SnapshotManager(rootPath);
45
+ this.changeDetector = new ChangeDetector(repo, rootPath, config.exclude_patterns, config.artifact_directories || []);
46
+ this.moveDetector = new MoveDetector(config.sync.move_detection_threshold);
83
47
  }
84
48
  /**
85
49
  * Determine if content should be treated as text for Automerge text operations
@@ -95,9 +59,9 @@ class SyncEngine {
95
59
  * This ensures clients can fetch the exact version of the document.
96
60
  */
97
61
  getVersionedUrl(handle) {
98
- const { documentId } = (0, automerge_repo_1.parseAutomergeUrl)(handle.url);
62
+ const { documentId } = parseAutomergeUrl(handle.url);
99
63
  const heads = handle.heads();
100
- return (0, automerge_repo_1.stringifyAutomergeUrl)({ documentId, heads });
64
+ return stringifyAutomergeUrl({ documentId, heads });
101
65
  }
102
66
  /**
103
67
  * Determine if a file path is inside an artifact directory.
@@ -109,7 +73,7 @@ class SyncEngine {
109
73
  return artifactDirs.some(dir => filePath === dir || filePath.startsWith(dir + "/"));
110
74
  }
111
75
  /**
112
- * Get the appropriate URL for a directory entry.
76
+ * Get the appropriate URL for a file's directory entry.
113
77
  * Artifact paths get versioned URLs (with heads) for exact version fetching.
114
78
  * Non-artifact paths get plain URLs for collaborative editing.
115
79
  */
@@ -117,7 +81,15 @@ class SyncEngine {
117
81
  if (this.isArtifactPath(filePath)) {
118
82
  return this.getVersionedUrl(handle);
119
83
  }
120
- return (0, utils_1.getPlainUrl)(handle.url);
84
+ return getPlainUrl(handle.url);
85
+ }
86
+ /**
87
+ * Get the appropriate URL for a subdirectory's directory entry.
88
+ * Always uses plain URLs — versioned URLs on directories can cause
89
+ * issues where consumers see a version without the docs array.
90
+ */
91
+ getDirEntryUrl(handle) {
92
+ return getPlainUrl(handle.url);
121
93
  }
122
94
  /**
123
95
  * Set the root directory URL in the snapshot
@@ -153,7 +125,7 @@ class SyncEngine {
153
125
  return;
154
126
  // Clear the root directory document's entries
155
127
  if (snapshot.rootDirectoryUrl) {
156
- const rootHandle = await this.repo.find((0, utils_1.getPlainUrl)(snapshot.rootDirectoryUrl));
128
+ const rootHandle = await this.repo.find(getPlainUrl(snapshot.rootDirectoryUrl));
157
129
  rootHandle.change((doc) => {
158
130
  doc.docs.splice(0, doc.docs.length);
159
131
  });
@@ -189,11 +161,8 @@ class SyncEngine {
189
161
  result.directoriesChanged += commitResult.directoriesChanged;
190
162
  result.errors.push(...commitResult.errors);
191
163
  result.warnings.push(...commitResult.warnings);
192
- // Touch root directory if any changes were made
193
- const hasChanges = result.filesChanged > 0 || result.directoriesChanged > 0;
194
- if (hasChanges) {
195
- await this.touchRootDirectory(snapshot);
196
- }
164
+ // Always touch root directory after commit
165
+ await this.touchRootDirectory(snapshot);
197
166
  // Save updated snapshot
198
167
  await this.snapshotManager.save(snapshot);
199
168
  result.success = result.errors.length === 0;
@@ -216,15 +185,15 @@ class SyncEngine {
216
185
  * Returns new handles that should be retried for sync.
217
186
  */
218
187
  async recreateFailedDocuments(failedHandles, snapshot) {
219
- const failedUrls = new Set(failedHandles.map(h => (0, utils_1.getPlainUrl)(h.url)));
188
+ const failedUrls = new Set(failedHandles.map(h => getPlainUrl(h.url)));
220
189
  const newHandles = [];
221
190
  // Find which paths correspond to the failed handles
222
191
  for (const [filePath, entry] of snapshot.files.entries()) {
223
- const plainUrl = (0, utils_1.getPlainUrl)(entry.url);
192
+ const plainUrl = getPlainUrl(entry.url);
224
193
  if (!failedUrls.has(plainUrl))
225
194
  continue;
226
195
  debug(`recreate: recreating document for ${filePath} (${plainUrl})`);
227
- output_1.out.taskLine(`Recreating document for ${filePath}`);
196
+ out.taskLine(`Recreating document for ${filePath}`);
228
197
  try {
229
198
  // Read the current content from the old handle
230
199
  const oldHandle = await this.repo.find(plainUrl);
@@ -233,7 +202,7 @@ class SyncEngine {
233
202
  debug(`recreate: could not read doc for ${filePath}, skipping`);
234
203
  continue;
235
204
  }
236
- const content = (0, utils_1.readDocContent)(doc.content);
205
+ const content = readDocContent(doc.content);
237
206
  if (content === null) {
238
207
  debug(`recreate: null content for ${filePath}, skipping`);
239
208
  continue;
@@ -241,8 +210,8 @@ class SyncEngine {
241
210
  // Create a fresh document
242
211
  const fakeChange = {
243
212
  path: filePath,
244
- changeType: types_1.ChangeType.LOCAL_ONLY,
245
- fileType: this.isTextContent(content) ? types_1.FileType.TEXT : types_1.FileType.BINARY,
213
+ changeType: ChangeType.LOCAL_ONLY,
214
+ fileType: this.isTextContent(content) ? FileType.TEXT : FileType.BINARY,
246
215
  localContent: content,
247
216
  remoteContent: null,
248
217
  };
@@ -255,7 +224,7 @@ class SyncEngine {
255
224
  ...entry,
256
225
  url: entryUrl,
257
226
  head: newHandle.heads(),
258
- ...(this.isArtifactPath(filePath) ? { contentHash: (0, content_1.contentHash)(content) } : {}),
227
+ ...(this.isArtifactPath(filePath) ? { contentHash: contentHash(content) } : {}),
259
228
  });
260
229
  // Update parent directory entry to point to new document
261
230
  const pathParts = filePath.split("/");
@@ -271,7 +240,7 @@ class SyncEngine {
271
240
  continue;
272
241
  dirUrl = dirEntry.url;
273
242
  }
274
- const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(dirUrl));
243
+ const dirHandle = await this.repo.find(getPlainUrl(dirUrl));
275
244
  dirHandle.change((d) => {
276
245
  const idx = d.docs.findIndex(e => e.name === fileName && e.type === "file");
277
246
  if (idx !== -1) {
@@ -287,18 +256,18 @@ class SyncEngine {
287
256
  }
288
257
  catch (error) {
289
258
  debug(`recreate: failed for ${filePath}: ${error}`);
290
- output_1.out.taskLine(`Failed to recreate ${filePath}: ${error}`, true);
259
+ out.taskLine(`Failed to recreate ${filePath}: ${error}`, true);
291
260
  }
292
261
  }
293
262
  // Also check directory documents
294
263
  for (const [dirPath, entry] of snapshot.directories.entries()) {
295
- const plainUrl = (0, utils_1.getPlainUrl)(entry.url);
264
+ const plainUrl = getPlainUrl(entry.url);
296
265
  if (!failedUrls.has(plainUrl))
297
266
  continue;
298
267
  // Directory docs can't be easily recreated (they reference children).
299
268
  // Just log a warning — the child recreation above should handle most cases.
300
269
  debug(`recreate: directory ${dirPath || "(root)"} failed to sync, cannot recreate`);
301
- output_1.out.taskLine(`Warning: directory ${dirPath || "(root)"} failed to sync`, true);
270
+ out.taskLine(`Warning: directory ${dirPath || "(root)"} failed to sync`, true);
302
271
  }
303
272
  return newHandles;
304
273
  }
@@ -324,12 +293,12 @@ class SyncEngine {
324
293
  // Wait for initial sync to receive any pending remote changes
325
294
  if (this.config.sync_enabled && snapshot.rootDirectoryUrl) {
326
295
  debug("sync: waiting for root document to be ready");
327
- output_1.out.update("Waiting for root document from server");
296
+ out.update("Waiting for root document from server");
328
297
  // Wait for the root document to be fetched from the network.
329
298
  // repo.find() rejects with "unavailable" if the server doesn't
330
299
  // have the document yet, so we retry with backoff.
331
300
  // This is critical for clone scenarios.
332
- const plainRootUrl = (0, utils_1.getPlainUrl)(snapshot.rootDirectoryUrl);
301
+ const plainRootUrl = getPlainUrl(snapshot.rootDirectoryUrl);
333
302
  const maxAttempts = 6;
334
303
  for (let attempt = 1; attempt <= maxAttempts; attempt++) {
335
304
  try {
@@ -343,32 +312,32 @@ class SyncEngine {
343
312
  if (isUnavailable && attempt < maxAttempts) {
344
313
  const delay = Math.min(1000 * Math.pow(2, attempt - 1), 10000);
345
314
  debug(`sync: root document not available (attempt ${attempt}/${maxAttempts}), retrying in ${delay}ms`);
346
- output_1.out.update(`Waiting for root document (attempt ${attempt}/${maxAttempts})`);
315
+ out.update(`Waiting for root document (attempt ${attempt}/${maxAttempts})`);
347
316
  await new Promise(r => setTimeout(r, delay));
348
317
  }
349
318
  else {
350
319
  debug(`sync: root document unavailable after ${maxAttempts} attempts: ${error}`);
351
- output_1.out.taskLine(`Root document unavailable: ${error}`, true);
320
+ out.taskLine(`Root document unavailable: ${error}`, true);
352
321
  break;
353
322
  }
354
323
  }
355
324
  }
356
325
  debug("sync: waiting for initial bidirectional sync");
357
- output_1.out.update("Waiting for initial sync from server");
326
+ out.update("Waiting for initial sync from server");
358
327
  try {
359
- await (0, network_sync_1.waitForBidirectionalSync)(this.repo, snapshot.rootDirectoryUrl, this.config.sync_server_storage_id, {
328
+ await waitForBidirectionalSync(this.repo, snapshot.rootDirectoryUrl, {
360
329
  timeoutMs: 5000, // Increased timeout for initial sync
361
330
  pollIntervalMs: 100,
362
331
  stableChecksRequired: 3,
363
332
  });
364
333
  }
365
334
  catch (error) {
366
- output_1.out.taskLine(`Initial sync: ${error}`, true);
335
+ out.taskLine(`Initial sync: ${error}`, true);
367
336
  }
368
337
  }
369
338
  // Detect all changes
370
339
  debug("sync: detecting changes");
371
- output_1.out.update("Detecting local and remote changes");
340
+ out.update("Detecting local and remote changes");
372
341
  // Capture pre-push snapshot file paths to detect deletions after push
373
342
  const prePushFilePaths = new Set(snapshot.files.keys());
374
343
  const changes = await this.changeDetector.detectChanges(snapshot);
@@ -396,17 +365,17 @@ class SyncEngine {
396
365
  const allHandles = Array.from(this.handlesByPath.values());
397
366
  const handlePaths = Array.from(this.handlesByPath.keys());
398
367
  debug(`sync: waiting for ${allHandles.length} handles to sync to server: ${handlePaths.slice(0, 10).map(p => p || "(root)").join(", ")}${handlePaths.length > 10 ? ` ...and ${handlePaths.length - 10} more` : ""}`);
399
- output_1.out.update(`Uploading ${allHandles.length} documents to sync server`);
400
- const { failed } = await (0, network_sync_1.waitForSync)(allHandles, this.config.sync_server_storage_id);
368
+ out.update(`Uploading ${allHandles.length} documents to sync server`);
369
+ const { failed } = await waitForSync(allHandles);
401
370
  // Recreate failed documents and retry once
402
371
  if (failed.length > 0) {
403
372
  debug(`sync: ${failed.length} documents failed, recreating`);
404
- output_1.out.update(`Recreating ${failed.length} failed documents`);
373
+ out.update(`Recreating ${failed.length} failed documents`);
405
374
  const retryHandles = await this.recreateFailedDocuments(failed, snapshot);
406
375
  if (retryHandles.length > 0) {
407
376
  debug(`sync: retrying ${retryHandles.length} recreated handles`);
408
- output_1.out.update(`Retrying ${retryHandles.length} recreated documents`);
409
- const retry = await (0, network_sync_1.waitForSync)(retryHandles, this.config.sync_server_storage_id);
377
+ out.update(`Retrying ${retryHandles.length} recreated documents`);
378
+ const retry = await waitForSync(retryHandles);
410
379
  if (retry.failed.length > 0) {
411
380
  const msg = `${retry.failed.length} documents failed to sync to server after recreation`;
412
381
  debug(`sync: ${msg}`);
@@ -425,29 +394,18 @@ class SyncEngine {
425
394
  // Use tracked handles for post-push check (cheaper than full tree scan)
426
395
  const changedHandles = Array.from(this.handlesByPath.values());
427
396
  debug(`sync: waiting for bidirectional sync to stabilize (${changedHandles.length} tracked handles)`);
428
- output_1.out.update("Waiting for bidirectional sync to stabilize");
429
- await (0, network_sync_1.waitForBidirectionalSync)(this.repo, snapshot.rootDirectoryUrl, this.config.sync_server_storage_id, {
397
+ out.update("Waiting for bidirectional sync to stabilize");
398
+ await waitForBidirectionalSync(this.repo, snapshot.rootDirectoryUrl, {
430
399
  timeoutMs: BIDIRECTIONAL_SYNC_TIMEOUT_MS,
431
400
  pollIntervalMs: 100,
432
401
  stableChecksRequired: 3,
433
402
  handles: changedHandles.length > 0 ? changedHandles : undefined,
434
403
  });
435
- // Touch root directory AFTER all docs are synced and stable.
436
- // This signals consumers (e.g. Patchwork) that new content is
437
- // available. Because file docs are already on the server,
438
- // consumers can immediately fetch them when they see the root change.
439
- const hasPhase1Changes = phase1Result.filesChanged > 0 || phase1Result.directoriesChanged > 0;
440
- if (hasPhase1Changes && snapshot.rootDirectoryUrl) {
441
- await this.touchRootDirectory(snapshot);
442
- const rootHandle = await this.repo.find(snapshot.rootDirectoryUrl);
443
- debug("sync: syncing root directory touch to server");
444
- output_1.out.update("Syncing root directory update");
445
- await (0, network_sync_1.waitForSync)([rootHandle], this.config.sync_server_storage_id);
446
- }
404
+ // Root directory touch + sync moved to end of sync() so it always runs
447
405
  }
448
406
  catch (error) {
449
407
  debug(`sync: network sync error: ${error}`);
450
- output_1.out.taskLine(`Network sync failed: ${error}`, true);
408
+ out.taskLine(`Network sync failed: ${error}`, true);
451
409
  result.errors.push({
452
410
  path: "sync",
453
411
  operation: "network-sync",
@@ -469,11 +427,11 @@ class SyncEngine {
469
427
  }
470
428
  debug("sync: re-detecting changes after network sync");
471
429
  const freshChanges = await this.changeDetector.detectChanges(snapshot, deletedPaths);
472
- const freshRemoteChanges = freshChanges.filter(c => c.changeType === types_1.ChangeType.REMOTE_ONLY ||
473
- c.changeType === types_1.ChangeType.BOTH_CHANGED);
430
+ const freshRemoteChanges = freshChanges.filter(c => c.changeType === ChangeType.REMOTE_ONLY ||
431
+ c.changeType === ChangeType.BOTH_CHANGED);
474
432
  debug(`sync: phase 2 - pulling ${freshRemoteChanges.length} remote changes`);
475
433
  if (freshRemoteChanges.length > 0) {
476
- output_1.out.update(`Pulling ${freshRemoteChanges.length} remote changes`);
434
+ out.update(`Pulling ${freshRemoteChanges.length} remote changes`);
477
435
  }
478
436
  // Phase 2: Pull remote changes to local using fresh detection
479
437
  const phase2Result = await this.pullRemoteChanges(freshRemoteChanges, snapshot);
@@ -492,7 +450,7 @@ class SyncEngine {
492
450
  // can't find the entries to splice out).
493
451
  for (const [filePath, snapshotEntry] of snapshot.files.entries()) {
494
452
  try {
495
- const handle = await this.repo.find((0, utils_1.getPlainUrl)(snapshotEntry.url));
453
+ const handle = await this.repo.find(getPlainUrl(snapshotEntry.url));
496
454
  const currentHeads = handle.heads();
497
455
  if (!A.equals(currentHeads, snapshotEntry.head)) {
498
456
  // Update snapshot with current heads after pulling changes
@@ -509,7 +467,7 @@ class SyncEngine {
509
467
  // Update directory document heads
510
468
  for (const [dirPath, snapshotEntry] of snapshot.directories.entries()) {
511
469
  try {
512
- const handle = await this.repo.find((0, utils_1.getPlainUrl)(snapshotEntry.url));
470
+ const handle = await this.repo.find(getPlainUrl(snapshotEntry.url));
513
471
  const currentHeads = handle.heads();
514
472
  if (!A.equals(currentHeads, snapshotEntry.head)) {
515
473
  // Update snapshot with current heads after pulling changes
@@ -523,7 +481,40 @@ class SyncEngine {
523
481
  // Handle might not exist if directory was deleted
524
482
  }
525
483
  }
526
- // Save updated snapshot if not dry run
484
+ // Small pause before touching root to let everything settle
485
+ await new Promise(r => setTimeout(r, 100));
486
+ // Always touch root directory after sync completes
487
+ await this.touchRootDirectory(snapshot);
488
+ if (this.config.sync_enabled && snapshot.rootDirectoryUrl) {
489
+ const rootHandle = await this.repo.find(snapshot.rootDirectoryUrl);
490
+ debug("sync: syncing root directory touch to server");
491
+ out.update("Syncing root directory update");
492
+ await waitForSync([rootHandle]);
493
+ // Wait for the touch to fully stabilize on the server
494
+ debug("sync: waiting for root touch to stabilize");
495
+ await waitForBidirectionalSync(this.repo, snapshot.rootDirectoryUrl, {
496
+ timeoutMs: 5000,
497
+ pollIntervalMs: 100,
498
+ stableChecksRequired: 3,
499
+ handles: [rootHandle],
500
+ });
501
+ // Flush repo to ensure everything is persisted
502
+ await this.repo.flush();
503
+ // Small grace period to ensure server has flushed
504
+ await new Promise(r => setTimeout(r, 100));
505
+ }
506
+ // Update root directory snapshot heads after touch
507
+ const rootSnapshotEntry = snapshot.directories.get("");
508
+ if (rootSnapshotEntry && snapshot.rootDirectoryUrl) {
509
+ try {
510
+ const rootHandle = await this.repo.find(getPlainUrl(snapshot.rootDirectoryUrl));
511
+ rootSnapshotEntry.head = rootHandle.heads();
512
+ }
513
+ catch (error) {
514
+ debug(`sync: failed to update root snapshot heads after touch: ${error}`);
515
+ }
516
+ }
517
+ // Save updated snapshot
527
518
  await this.snapshotManager.save(snapshot);
528
519
  result.success = result.errors.length === 0;
529
520
  return result;
@@ -557,13 +548,13 @@ class SyncEngine {
557
548
  // Process moves first - all detected moves are applied
558
549
  if (moves.length > 0) {
559
550
  debug(`push: processing ${moves.length} moves`);
560
- output_1.out.update(`Processing ${moves.length} move${moves.length > 1 ? "s" : ""}`);
551
+ out.update(`Processing ${moves.length} move${moves.length > 1 ? "s" : ""}`);
561
552
  }
562
553
  for (let i = 0; i < moves.length; i++) {
563
554
  const move = moves[i];
564
555
  try {
565
556
  debug(`push: move ${i + 1}/${moves.length}: ${move.fromPath} -> ${move.toPath}`);
566
- output_1.out.taskLine(`Moving ${move.fromPath} -> ${move.toPath}`);
557
+ out.taskLine(`Moving ${move.fromPath} -> ${move.toPath}`);
567
558
  await this.applyMoveToRemote(move, snapshot);
568
559
  result.filesChanged++;
569
560
  }
@@ -578,8 +569,8 @@ class SyncEngine {
578
569
  }
579
570
  }
580
571
  // Filter to local changes only
581
- const localChanges = changes.filter(c => c.changeType === types_1.ChangeType.LOCAL_ONLY ||
582
- c.changeType === types_1.ChangeType.BOTH_CHANGED);
572
+ const localChanges = changes.filter(c => c.changeType === ChangeType.LOCAL_ONLY ||
573
+ c.changeType === ChangeType.BOTH_CHANGED);
583
574
  if (localChanges.length === 0) {
584
575
  debug("push: no local changes to push");
585
576
  return result;
@@ -588,7 +579,7 @@ class SyncEngine {
588
579
  const modifiedFiles = localChanges.filter(c => snapshot.files.has(c.path) && c.localContent !== null);
589
580
  const deletedFiles = localChanges.filter(c => c.localContent === null && snapshot.files.has(c.path));
590
581
  debug(`push: ${localChanges.length} local changes (${newFiles.length} new, ${modifiedFiles.length} modified, ${deletedFiles.length} deleted)`);
591
- output_1.out.update(`Pushing ${localChanges.length} local changes (${newFiles.length} new, ${modifiedFiles.length} modified, ${deletedFiles.length} deleted)`);
582
+ out.update(`Pushing ${localChanges.length} local changes (${newFiles.length} new, ${modifiedFiles.length} modified, ${deletedFiles.length} deleted)`);
592
583
  // Group changes by parent directory path
593
584
  const changesByDir = new Map();
594
585
  for (const change of localChanges) {
@@ -647,7 +638,7 @@ class SyncEngine {
647
638
  if (change.localContent === null && snapshotEntry) {
648
639
  // Delete file
649
640
  debug(`push: [${filesProcessed}/${totalFiles}] delete ${change.path}`);
650
- output_1.out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] deleting ${change.path}`);
641
+ out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] deleting ${change.path}`);
651
642
  await this.deleteRemoteFile(snapshotEntry.url, snapshot, change.path);
652
643
  deletedNames.push(fileName);
653
644
  this.snapshotManager.removeFileEntry(snapshot, change.path);
@@ -656,19 +647,19 @@ class SyncEngine {
656
647
  else if (!snapshotEntry) {
657
648
  // New file
658
649
  debug(`push: [${filesProcessed}/${totalFiles}] create ${change.path} (${change.fileType})`);
659
- output_1.out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] creating ${change.path}`);
650
+ out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] creating ${change.path}`);
660
651
  const handle = await this.createRemoteFile(change);
661
652
  if (handle) {
662
653
  const entryUrl = this.getEntryUrl(handle, change.path);
663
654
  newEntries.push({ name: fileName, url: entryUrl });
664
655
  this.snapshotManager.updateFileEntry(snapshot, change.path, {
665
- path: (0, utils_1.joinAndNormalizePath)(this.rootPath, change.path),
656
+ path: joinAndNormalizePath(this.rootPath, change.path),
666
657
  url: entryUrl,
667
658
  head: handle.heads(),
668
- extension: (0, utils_1.getFileExtension)(change.path),
669
- mimeType: (0, utils_1.getEnhancedMimeType)(change.path),
659
+ extension: getFileExtension(change.path),
660
+ mimeType: getEnhancedMimeType(change.path),
670
661
  ...(this.isArtifactPath(change.path) && change.localContent
671
- ? { contentHash: (0, content_1.contentHash)(change.localContent) }
662
+ ? { contentHash: contentHash(change.localContent) }
672
663
  : {}),
673
664
  });
674
665
  result.filesChanged++;
@@ -681,12 +672,12 @@ class SyncEngine {
681
672
  ? `${change.localContent.length} chars`
682
673
  : `${change.localContent.length} bytes`;
683
674
  debug(`push: [${filesProcessed}/${totalFiles}] update ${change.path} (${contentSize})`);
684
- output_1.out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] updating ${change.path}`);
675
+ out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] updating ${change.path}`);
685
676
  await this.updateRemoteFile(snapshotEntry.url, change.localContent, snapshot, change.path);
686
677
  // Get current entry URL (updateRemoteFile updates snapshot)
687
678
  const updatedFileEntry = snapshot.files.get(change.path);
688
679
  if (updatedFileEntry) {
689
- const fileHandle = await this.repo.find((0, utils_1.getPlainUrl)(updatedFileEntry.url));
680
+ const fileHandle = await this.repo.find(getPlainUrl(updatedFileEntry.url));
690
681
  updatedEntries.push({
691
682
  name: fileName,
692
683
  url: this.getEntryUrl(fileHandle, change.path),
@@ -697,7 +688,7 @@ class SyncEngine {
697
688
  }
698
689
  catch (error) {
699
690
  debug(`push: error processing ${change.path}: ${error}`);
700
- output_1.out.taskLine(`Error pushing ${change.path}: ${error}`, true);
691
+ out.taskLine(`Error pushing ${change.path}: ${error}`, true);
701
692
  result.errors.push({
702
693
  path: change.path,
703
694
  operation: "local-to-remote",
@@ -716,10 +707,10 @@ class SyncEngine {
716
707
  if (parentOfModified === dirPath) {
717
708
  const dirEntry = snapshot.directories.get(modifiedDir);
718
709
  if (dirEntry) {
719
- const childHandle = await this.repo.find((0, utils_1.getPlainUrl)(dirEntry.url));
710
+ const childHandle = await this.repo.find(getPlainUrl(dirEntry.url));
720
711
  subdirUpdates.push({
721
712
  name: childName,
722
- url: this.getEntryUrl(childHandle, modifiedDir),
713
+ url: this.getDirEntryUrl(childHandle),
723
714
  });
724
715
  }
725
716
  }
@@ -751,8 +742,8 @@ class SyncEngine {
751
742
  warnings: [],
752
743
  };
753
744
  // Process remote changes
754
- const remoteChanges = changes.filter(c => c.changeType === types_1.ChangeType.REMOTE_ONLY ||
755
- c.changeType === types_1.ChangeType.BOTH_CHANGED);
745
+ const remoteChanges = changes.filter(c => c.changeType === ChangeType.REMOTE_ONLY ||
746
+ c.changeType === ChangeType.BOTH_CHANGED);
756
747
  // Sort changes by dependency order (parents before children)
757
748
  const sortedChanges = this.sortChangesByDependency(remoteChanges);
758
749
  for (const change of sortedChanges) {
@@ -775,19 +766,19 @@ class SyncEngine {
775
766
  * Apply remote change to local filesystem
776
767
  */
777
768
  async applyRemoteChangeToLocal(change, snapshot) {
778
- const localPath = (0, utils_1.joinAndNormalizePath)(this.rootPath, change.path);
769
+ const localPath = joinAndNormalizePath(this.rootPath, change.path);
779
770
  if (!change.remoteHead) {
780
771
  throw new Error(`No remote head found for remote change to ${change.path}`);
781
772
  }
782
773
  // Check for null (empty string/Uint8Array are valid content)
783
774
  if (change.remoteContent === null) {
784
775
  // File was deleted remotely
785
- await (0, utils_1.removePath)(localPath);
776
+ await removePath(localPath);
786
777
  this.snapshotManager.removeFileEntry(snapshot, change.path);
787
778
  return;
788
779
  }
789
780
  // Create or update local file
790
- await (0, utils_1.writeFileContent)(localPath, change.remoteContent);
781
+ await writeFileContent(localPath, change.remoteContent);
791
782
  // Update or create snapshot entry for this file
792
783
  const snapshotEntry = snapshot.files.get(change.path);
793
784
  if (snapshotEntry) {
@@ -804,7 +795,7 @@ class SyncEngine {
804
795
  // We need to find the remote file's URL from the directory hierarchy
805
796
  if (snapshot.rootDirectoryUrl) {
806
797
  try {
807
- const fileEntry = await (0, utils_1.findFileInDirectoryHierarchy)(this.repo, snapshot.rootDirectoryUrl, change.path);
798
+ const fileEntry = await findFileInDirectoryHierarchy(this.repo, snapshot.rootDirectoryUrl, change.path);
808
799
  if (fileEntry) {
809
800
  const fileHandle = await this.repo.find(fileEntry.url);
810
801
  const entryUrl = this.getEntryUrl(fileHandle, change.path);
@@ -812,14 +803,14 @@ class SyncEngine {
812
803
  path: localPath,
813
804
  url: entryUrl,
814
805
  head: change.remoteHead,
815
- extension: (0, utils_1.getFileExtension)(change.path),
816
- mimeType: (0, utils_1.getEnhancedMimeType)(change.path),
806
+ extension: getFileExtension(change.path),
807
+ mimeType: getEnhancedMimeType(change.path),
817
808
  });
818
809
  }
819
810
  }
820
811
  catch (error) {
821
812
  // Failed to update snapshot - file may have been deleted
822
- output_1.out.taskLine(`Warning: Failed to update snapshot for remote file ${change.path}`, true);
813
+ out.taskLine(`Warning: Failed to update snapshot for remote file ${change.path}`, true);
823
814
  }
824
815
  }
825
816
  }
@@ -849,11 +840,11 @@ class SyncEngine {
849
840
  // Artifact files use RawString — no diffing needed, just create a fresh doc
850
841
  const content = move.newContent !== undefined
851
842
  ? move.newContent
852
- : (0, utils_1.readDocContent)((await (await this.repo.find((0, utils_1.getPlainUrl)(fromEntry.url))).doc())?.content);
843
+ : readDocContent((await (await this.repo.find(getPlainUrl(fromEntry.url))).doc())?.content);
853
844
  const fakeChange = {
854
845
  path: move.toPath,
855
- changeType: types_1.ChangeType.LOCAL_ONLY,
856
- fileType: content != null && typeof content === "string" ? types_1.FileType.TEXT : types_1.FileType.BINARY,
846
+ changeType: ChangeType.LOCAL_ONLY,
847
+ fileType: content != null && typeof content === "string" ? FileType.TEXT : FileType.BINARY,
857
848
  localContent: content,
858
849
  remoteContent: null,
859
850
  };
@@ -865,7 +856,7 @@ class SyncEngine {
865
856
  }
866
857
  else {
867
858
  // Use plain URL for mutable handle
868
- const handle = await this.repo.find((0, utils_1.getPlainUrl)(fromEntry.url));
859
+ const handle = await this.repo.find(getPlainUrl(fromEntry.url));
869
860
  const heads = fromEntry.head;
870
861
  // Update both name and content (if content changed during move)
871
862
  changeWithOptionalHeads(handle, heads, (doc) => {
@@ -873,7 +864,7 @@ class SyncEngine {
873
864
  // If new content is provided, update it (handles move + modification case)
874
865
  if (move.newContent !== undefined) {
875
866
  if (typeof move.newContent === "string") {
876
- (0, utils_1.updateTextContent)(doc, ["content"], move.newContent);
867
+ updateTextContent(doc, ["content"], move.newContent);
877
868
  }
878
869
  else {
879
870
  doc.content = move.newContent;
@@ -891,17 +882,17 @@ class SyncEngine {
891
882
  this.snapshotManager.removeFileEntry(snapshot, move.fromPath);
892
883
  this.snapshotManager.updateFileEntry(snapshot, move.toPath, {
893
884
  ...fromEntry,
894
- path: (0, utils_1.joinAndNormalizePath)(this.rootPath, move.toPath),
885
+ path: joinAndNormalizePath(this.rootPath, move.toPath),
895
886
  url: entryUrl,
896
887
  head: finalHeads,
897
888
  ...(this.isArtifactPath(move.toPath) && move.newContent != null
898
- ? { contentHash: (0, content_1.contentHash)(move.newContent) }
889
+ ? { contentHash: contentHash(move.newContent) }
899
890
  : {}),
900
891
  });
901
892
  }
902
893
  catch (e) {
903
894
  // Failed to update file name - file may have been deleted
904
- output_1.out.taskLine(`Warning: Failed to rename ${move.fromPath} to ${move.toPath}`, true);
895
+ out.taskLine(`Warning: Failed to rename ${move.fromPath} to ${move.toPath}`, true);
905
896
  }
906
897
  }
907
898
  /**
@@ -917,8 +908,8 @@ class SyncEngine {
917
908
  const fileDoc = {
918
909
  "@patchwork": { type: "file" },
919
910
  name: change.path.split("/").pop() || "",
920
- extension: (0, utils_1.getFileExtension)(change.path),
921
- mimeType: (0, utils_1.getEnhancedMimeType)(change.path),
911
+ extension: getFileExtension(change.path),
912
+ mimeType: getEnhancedMimeType(change.path),
922
913
  content: isText && isArtifact
923
914
  ? new A.RawString(change.localContent)
924
915
  : isText
@@ -932,7 +923,7 @@ class SyncEngine {
932
923
  // For non-artifact text files, splice in the content so it's stored as collaborative text
933
924
  if (isText && !isArtifact && typeof change.localContent === "string") {
934
925
  handle.change((doc) => {
935
- (0, utils_1.updateTextContent)(doc, ["content"], change.localContent);
926
+ updateTextContent(doc, ["content"], change.localContent);
936
927
  });
937
928
  }
938
929
  // Always track newly created files for network sync
@@ -945,7 +936,7 @@ class SyncEngine {
945
936
  */
946
937
  async updateRemoteFile(url, content, snapshot, filePath) {
947
938
  // Use plain URL for mutable handle
948
- const handle = await this.repo.find((0, utils_1.getPlainUrl)(url));
939
+ const handle = await this.repo.find(getPlainUrl(url));
949
940
  // Check if content actually changed before tracking for sync
950
941
  const doc = await handle.doc();
951
942
  const rawContent = doc?.content;
@@ -957,14 +948,14 @@ class SyncEngine {
957
948
  !doc ||
958
949
  (rawContent != null && A.isImmutableString(rawContent))) {
959
950
  if (!isArtifact) {
960
- output_1.out.taskLine(`Replacing ${!doc ? 'unavailable' : 'immutable string'} document for ${filePath}`, true);
951
+ out.taskLine(`Replacing ${!doc ? 'unavailable' : 'immutable string'} document for ${filePath}`, true);
961
952
  }
962
953
  const fakeChange = {
963
954
  path: filePath,
964
- changeType: types_1.ChangeType.LOCAL_ONLY,
955
+ changeType: ChangeType.LOCAL_ONLY,
965
956
  fileType: this.isTextContent(content)
966
- ? types_1.FileType.TEXT
967
- : types_1.FileType.BINARY,
957
+ ? FileType.TEXT
958
+ : FileType.BINARY,
968
959
  localContent: content,
969
960
  remoteContent: null,
970
961
  };
@@ -972,20 +963,20 @@ class SyncEngine {
972
963
  if (newHandle) {
973
964
  const entryUrl = this.getEntryUrl(newHandle, filePath);
974
965
  this.snapshotManager.updateFileEntry(snapshot, filePath, {
975
- path: (0, utils_1.joinAndNormalizePath)(this.rootPath, filePath),
966
+ path: joinAndNormalizePath(this.rootPath, filePath),
976
967
  url: entryUrl,
977
968
  head: newHandle.heads(),
978
- extension: (0, utils_1.getFileExtension)(filePath),
979
- mimeType: (0, utils_1.getEnhancedMimeType)(filePath),
969
+ extension: getFileExtension(filePath),
970
+ mimeType: getEnhancedMimeType(filePath),
980
971
  ...(this.isArtifactPath(filePath)
981
- ? { contentHash: (0, content_1.contentHash)(content) }
972
+ ? { contentHash: contentHash(content) }
982
973
  : {}),
983
974
  });
984
975
  }
985
976
  return;
986
977
  }
987
- const currentContent = (0, utils_1.readDocContent)(rawContent);
988
- const contentChanged = !(0, content_1.isContentEqual)(content, currentContent);
978
+ const currentContent = readDocContent(rawContent);
979
+ const contentChanged = !isContentEqual(content, currentContent);
989
980
  // Update snapshot heads even when content is identical
990
981
  const snapshotEntry = snapshot.files.get(filePath);
991
982
  if (snapshotEntry) {
@@ -1006,7 +997,7 @@ class SyncEngine {
1006
997
  }
1007
998
  handle.changeAt(heads, (doc) => {
1008
999
  if (typeof content === "string") {
1009
- (0, utils_1.updateTextContent)(doc, ["content"], content);
1000
+ updateTextContent(doc, ["content"], content);
1010
1001
  }
1011
1002
  else {
1012
1003
  doc.content = content;
@@ -1043,7 +1034,7 @@ class SyncEngine {
1043
1034
  // Get or create the parent directory document
1044
1035
  const parentDirUrl = await this.ensureDirectoryDocument(snapshot, directoryPath);
1045
1036
  // Use plain URL for mutable handle
1046
- const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(parentDirUrl));
1037
+ const dirHandle = await this.repo.find(getPlainUrl(parentDirUrl));
1047
1038
  let didChange = false;
1048
1039
  const snapshotEntry = snapshot.directories.get(directoryPath);
1049
1040
  const heads = snapshotEntry?.head;
@@ -1098,10 +1089,10 @@ class SyncEngine {
1098
1089
  // Track discovered directory for sync
1099
1090
  this.handlesByPath.set(directoryPath, childDirHandle);
1100
1091
  // Get appropriate URL for directory entry
1101
- const entryUrl = this.getEntryUrl(childDirHandle, directoryPath);
1092
+ const entryUrl = this.getDirEntryUrl(childDirHandle);
1102
1093
  // Update snapshot with discovered directory
1103
1094
  this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
1104
- path: (0, utils_1.joinAndNormalizePath)(this.rootPath, directoryPath),
1095
+ path: joinAndNormalizePath(this.rootPath, directoryPath),
1105
1096
  url: entryUrl,
1106
1097
  head: childDirHandle.heads(),
1107
1098
  entries: [],
@@ -1126,10 +1117,10 @@ class SyncEngine {
1126
1117
  };
1127
1118
  const dirHandle = this.repo.create(dirDoc);
1128
1119
  // Get appropriate URL for directory entry
1129
- const dirEntryUrl = this.getEntryUrl(dirHandle, directoryPath);
1120
+ const dirEntryUrl = this.getDirEntryUrl(dirHandle);
1130
1121
  // Add this directory to its parent
1131
1122
  // Use plain URL for mutable handle
1132
- const parentHandle = await this.repo.find((0, utils_1.getPlainUrl)(parentDirUrl));
1123
+ const parentHandle = await this.repo.find(getPlainUrl(parentDirUrl));
1133
1124
  let didChange = false;
1134
1125
  parentHandle.change((doc) => {
1135
1126
  // Double-check that entry doesn't exist (race condition protection)
@@ -1154,7 +1145,7 @@ class SyncEngine {
1154
1145
  }
1155
1146
  // Update snapshot with new directory
1156
1147
  this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
1157
- path: (0, utils_1.joinAndNormalizePath)(this.rootPath, directoryPath),
1148
+ path: joinAndNormalizePath(this.rootPath, directoryPath),
1158
1149
  url: dirEntryUrl,
1159
1150
  head: dirHandle.heads(),
1160
1151
  entries: [],
@@ -1185,7 +1176,7 @@ class SyncEngine {
1185
1176
  }
1186
1177
  try {
1187
1178
  // Use plain URL for mutable handle
1188
- const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(parentDirUrl));
1179
+ const dirHandle = await this.repo.find(getPlainUrl(parentDirUrl));
1189
1180
  // Track this handle for network sync waiting
1190
1181
  this.handlesByPath.set(directoryPath, dirHandle);
1191
1182
  const snapshotEntry = snapshot.directories.get(directoryPath);
@@ -1196,7 +1187,7 @@ class SyncEngine {
1196
1187
  if (indexToRemove !== -1) {
1197
1188
  doc.docs.splice(indexToRemove, 1);
1198
1189
  didChange = true;
1199
- output_1.out.taskLine(`Removed ${fileName} from ${(0, utils_1.formatRelativePath)(directoryPath) || "root"}`);
1190
+ out.taskLine(`Removed ${fileName} from ${formatRelativePath(directoryPath) || "root"}`);
1200
1191
  }
1201
1192
  });
1202
1193
  if (didChange && snapshotEntry) {
@@ -1224,7 +1215,7 @@ class SyncEngine {
1224
1215
  return;
1225
1216
  dirUrl = dirEntry.url;
1226
1217
  }
1227
- const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(dirUrl));
1218
+ const dirHandle = await this.repo.find(getPlainUrl(dirUrl));
1228
1219
  const snapshotEntry = snapshot.directories.get(dirPath);
1229
1220
  const heads = snapshotEntry?.head;
1230
1221
  // Determine directory name
@@ -1240,7 +1231,7 @@ class SyncEngine {
1240
1231
  const idx = doc.docs.findIndex(entry => entry.name === name && entry.type === "file");
1241
1232
  if (idx !== -1) {
1242
1233
  doc.docs.splice(idx, 1);
1243
- output_1.out.taskLine(`Removed ${name} from ${(0, utils_1.formatRelativePath)(dirPath) || "root"}`);
1234
+ out.taskLine(`Removed ${name} from ${formatRelativePath(dirPath) || "root"}`);
1244
1235
  }
1245
1236
  }
1246
1237
  // Update URLs for modified files
@@ -1328,11 +1319,11 @@ class SyncEngine {
1328
1319
  * Generate human-readable summary of changes
1329
1320
  */
1330
1321
  generateChangeSummary(changes, moves) {
1331
- const localChanges = changes.filter(c => c.changeType === types_1.ChangeType.LOCAL_ONLY ||
1332
- c.changeType === types_1.ChangeType.BOTH_CHANGED).length;
1333
- const remoteChanges = changes.filter(c => c.changeType === types_1.ChangeType.REMOTE_ONLY ||
1334
- c.changeType === types_1.ChangeType.BOTH_CHANGED).length;
1335
- const conflicts = changes.filter(c => c.changeType === types_1.ChangeType.BOTH_CHANGED).length;
1322
+ const localChanges = changes.filter(c => c.changeType === ChangeType.LOCAL_ONLY ||
1323
+ c.changeType === ChangeType.BOTH_CHANGED).length;
1324
+ const remoteChanges = changes.filter(c => c.changeType === ChangeType.REMOTE_ONLY ||
1325
+ c.changeType === ChangeType.BOTH_CHANGED).length;
1326
+ const conflicts = changes.filter(c => c.changeType === ChangeType.BOTH_CHANGED).length;
1336
1327
  const parts = [];
1337
1328
  if (localChanges > 0) {
1338
1329
  parts.push(`${localChanges} local change${localChanges > 1 ? "s" : ""}`);
@@ -1360,24 +1351,29 @@ class SyncEngine {
1360
1351
  }
1361
1352
  try {
1362
1353
  const rootHandle = await this.repo.find(snapshot.rootDirectoryUrl);
1363
- const snapshotEntry = snapshot.directories.get("");
1364
- const heads = snapshotEntry?.head;
1365
1354
  const timestamp = Date.now();
1366
- const version = require("../../package.json").version;
1367
- changeWithOptionalHeads(rootHandle, heads, (doc) => {
1355
+ let version;
1356
+ try {
1357
+ version = require("../../package.json").version;
1358
+ }
1359
+ catch {
1360
+ version = "unknown";
1361
+ }
1362
+ debug(`touchRootDirectory: setting lastSyncAt=${timestamp} with=pushwork@${version}`);
1363
+ rootHandle.change((doc) => {
1368
1364
  doc.lastSyncAt = timestamp;
1369
1365
  doc.with = `pushwork@${version}`;
1370
1366
  });
1371
1367
  // Track root directory for network sync
1372
1368
  this.handlesByPath.set("", rootHandle);
1369
+ const snapshotEntry = snapshot.directories.get("");
1373
1370
  if (snapshotEntry) {
1374
1371
  snapshotEntry.head = rootHandle.heads();
1375
1372
  }
1376
1373
  }
1377
1374
  catch (error) {
1378
- // Failed to update root directory timestamp
1375
+ debug(`touchRootDirectory: failed: ${error}`);
1379
1376
  }
1380
1377
  }
1381
1378
  }
1382
- exports.SyncEngine = SyncEngine;
1383
1379
  //# sourceMappingURL=sync-engine.js.map