pushwork 2.0.0-preview.4 → 2.0.0-preview.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (122) hide show
  1. package/dist/branches.d.ts +2 -1
  2. package/dist/branches.d.ts.map +1 -1
  3. package/dist/branches.js +23 -4
  4. package/dist/branches.js.map +1 -1
  5. package/dist/checkpoints.d.ts +41 -0
  6. package/dist/checkpoints.d.ts.map +1 -0
  7. package/dist/checkpoints.js +210 -0
  8. package/dist/checkpoints.js.map +1 -0
  9. package/dist/stash.d.ts +2 -0
  10. package/dist/stash.d.ts.map +1 -1
  11. package/dist/stash.js +1 -0
  12. package/dist/stash.js.map +1 -1
  13. package/package.json +5 -5
  14. package/dist/cli/commands.d.ts +0 -71
  15. package/dist/cli/commands.d.ts.map +0 -1
  16. package/dist/cli/commands.js +0 -794
  17. package/dist/cli/commands.js.map +0 -1
  18. package/dist/cli/index.d.ts +0 -2
  19. package/dist/cli/index.d.ts.map +0 -1
  20. package/dist/cli/index.js +0 -19
  21. package/dist/cli/index.js.map +0 -1
  22. package/dist/commands.d.ts +0 -58
  23. package/dist/commands.d.ts.map +0 -1
  24. package/dist/commands.js +0 -975
  25. package/dist/commands.js.map +0 -1
  26. package/dist/config/index.d.ts +0 -71
  27. package/dist/config/index.d.ts.map +0 -1
  28. package/dist/config/index.js +0 -314
  29. package/dist/config/index.js.map +0 -1
  30. package/dist/core/change-detection.d.ts +0 -80
  31. package/dist/core/change-detection.d.ts.map +0 -1
  32. package/dist/core/change-detection.js +0 -560
  33. package/dist/core/change-detection.js.map +0 -1
  34. package/dist/core/config.d.ts +0 -81
  35. package/dist/core/config.d.ts.map +0 -1
  36. package/dist/core/config.js +0 -304
  37. package/dist/core/config.js.map +0 -1
  38. package/dist/core/index.d.ts +0 -6
  39. package/dist/core/index.d.ts.map +0 -1
  40. package/dist/core/index.js +0 -22
  41. package/dist/core/index.js.map +0 -1
  42. package/dist/core/move-detection.d.ts +0 -34
  43. package/dist/core/move-detection.d.ts.map +0 -1
  44. package/dist/core/move-detection.js +0 -128
  45. package/dist/core/move-detection.js.map +0 -1
  46. package/dist/core/snapshot.d.ts +0 -105
  47. package/dist/core/snapshot.d.ts.map +0 -1
  48. package/dist/core/snapshot.js +0 -254
  49. package/dist/core/snapshot.js.map +0 -1
  50. package/dist/core/sync-engine.d.ts +0 -177
  51. package/dist/core/sync-engine.d.ts.map +0 -1
  52. package/dist/core/sync-engine.js +0 -1471
  53. package/dist/core/sync-engine.js.map +0 -1
  54. package/dist/types/config.d.ts +0 -102
  55. package/dist/types/config.d.ts.map +0 -1
  56. package/dist/types/config.js +0 -10
  57. package/dist/types/config.js.map +0 -1
  58. package/dist/types/documents.d.ts +0 -88
  59. package/dist/types/documents.d.ts.map +0 -1
  60. package/dist/types/documents.js +0 -23
  61. package/dist/types/documents.js.map +0 -1
  62. package/dist/types/index.d.ts +0 -4
  63. package/dist/types/index.d.ts.map +0 -1
  64. package/dist/types/index.js +0 -20
  65. package/dist/types/index.js.map +0 -1
  66. package/dist/types/snapshot.d.ts +0 -64
  67. package/dist/types/snapshot.d.ts.map +0 -1
  68. package/dist/types/snapshot.js +0 -3
  69. package/dist/types/snapshot.js.map +0 -1
  70. package/dist/utils/content-similarity.d.ts +0 -53
  71. package/dist/utils/content-similarity.d.ts.map +0 -1
  72. package/dist/utils/content-similarity.js +0 -155
  73. package/dist/utils/content-similarity.js.map +0 -1
  74. package/dist/utils/content.d.ts +0 -10
  75. package/dist/utils/content.d.ts.map +0 -1
  76. package/dist/utils/content.js +0 -35
  77. package/dist/utils/content.js.map +0 -1
  78. package/dist/utils/directory.d.ts +0 -24
  79. package/dist/utils/directory.d.ts.map +0 -1
  80. package/dist/utils/directory.js +0 -56
  81. package/dist/utils/directory.js.map +0 -1
  82. package/dist/utils/fs.d.ts +0 -74
  83. package/dist/utils/fs.d.ts.map +0 -1
  84. package/dist/utils/fs.js +0 -298
  85. package/dist/utils/fs.js.map +0 -1
  86. package/dist/utils/index.d.ts +0 -5
  87. package/dist/utils/index.d.ts.map +0 -1
  88. package/dist/utils/index.js +0 -21
  89. package/dist/utils/index.js.map +0 -1
  90. package/dist/utils/mime-types.d.ts +0 -13
  91. package/dist/utils/mime-types.d.ts.map +0 -1
  92. package/dist/utils/mime-types.js +0 -247
  93. package/dist/utils/mime-types.js.map +0 -1
  94. package/dist/utils/network-sync.d.ts +0 -30
  95. package/dist/utils/network-sync.d.ts.map +0 -1
  96. package/dist/utils/network-sync.js +0 -391
  97. package/dist/utils/network-sync.js.map +0 -1
  98. package/dist/utils/node-polyfills.d.ts +0 -9
  99. package/dist/utils/node-polyfills.d.ts.map +0 -1
  100. package/dist/utils/node-polyfills.js +0 -9
  101. package/dist/utils/node-polyfills.js.map +0 -1
  102. package/dist/utils/output.d.ts +0 -129
  103. package/dist/utils/output.d.ts.map +0 -1
  104. package/dist/utils/output.js +0 -375
  105. package/dist/utils/output.js.map +0 -1
  106. package/dist/utils/repo-factory.d.ts +0 -15
  107. package/dist/utils/repo-factory.d.ts.map +0 -1
  108. package/dist/utils/repo-factory.js +0 -156
  109. package/dist/utils/repo-factory.js.map +0 -1
  110. package/dist/utils/string-similarity.d.ts +0 -14
  111. package/dist/utils/string-similarity.d.ts.map +0 -1
  112. package/dist/utils/string-similarity.js +0 -43
  113. package/dist/utils/string-similarity.js.map +0 -1
  114. package/dist/utils/text-diff.d.ts +0 -37
  115. package/dist/utils/text-diff.d.ts.map +0 -1
  116. package/dist/utils/text-diff.js +0 -131
  117. package/dist/utils/text-diff.js.map +0 -1
  118. package/dist/utils/trace.d.ts +0 -19
  119. package/dist/utils/trace.d.ts.map +0 -1
  120. package/dist/utils/trace.js +0 -68
  121. package/dist/utils/trace.js.map +0 -1
  122. package/patches/@automerge__automerge-repo@2.6.0-subduction.15.patch +0 -26
@@ -1,1471 +0,0 @@
1
- "use strict";
2
- var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
3
- if (k2 === undefined) k2 = k;
4
- var desc = Object.getOwnPropertyDescriptor(m, k);
5
- if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
6
- desc = { enumerable: true, get: function() { return m[k]; } };
7
- }
8
- Object.defineProperty(o, k2, desc);
9
- }) : (function(o, m, k, k2) {
10
- if (k2 === undefined) k2 = k;
11
- o[k2] = m[k];
12
- }));
13
- var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
14
- Object.defineProperty(o, "default", { enumerable: true, value: v });
15
- }) : function(o, v) {
16
- o["default"] = v;
17
- });
18
- var __importStar = (this && this.__importStar) || (function () {
19
- var ownKeys = function(o) {
20
- ownKeys = Object.getOwnPropertyNames || function (o) {
21
- var ar = [];
22
- for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
23
- return ar;
24
- };
25
- return ownKeys(o);
26
- };
27
- return function (mod) {
28
- if (mod && mod.__esModule) return mod;
29
- var result = {};
30
- if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
31
- __setModuleDefault(result, mod);
32
- return result;
33
- };
34
- })();
35
- Object.defineProperty(exports, "__esModule", { value: true });
36
- exports.SyncEngine = void 0;
37
- exports.nukeAndRebuildDocs = nukeAndRebuildDocs;
38
- const automerge_repo_1 = require("@automerge/automerge-repo");
39
- const A = __importStar(require("@automerge/automerge"));
40
- const types_1 = require("../types");
41
- const utils_1 = require("../utils");
42
- const content_1 = require("../utils/content");
43
- const network_sync_1 = require("../utils/network-sync");
44
- const snapshot_1 = require("./snapshot");
45
- const change_detection_1 = require("./change-detection");
46
- const move_detection_1 = require("./move-detection");
47
- const output_1 = require("../utils/output");
48
- const path = __importStar(require("path"));
49
- const isDebug = !!process.env.DEBUG;
50
- function debug(...args) {
51
- if (isDebug)
52
- console.error("[pushwork:engine]", ...args);
53
- }
54
- /**
55
- * Apply a change to a document handle, using changeAt when heads are available
56
- * to branch from a known version, otherwise falling back to change.
57
- */
58
- function changeWithOptionalHeads(handle, heads, callback) {
59
- if (heads && heads.length > 0) {
60
- handle.changeAt(heads, callback);
61
- }
62
- else {
63
- handle.change(callback);
64
- }
65
- }
66
- /**
67
- * Nuke an artifact directory's docs array and rebuild it from scratch.
68
- * Entries must be spread into plain objects — pushing Automerge proxy objects
69
- * back after splicing them out throws "Cannot create a reference to an
70
- * existing document object".
71
- */
72
- function nukeAndRebuildDocs(doc, dirPath, newEntries, updatedEntries, deletedNames, subdirUpdates) {
73
- const deletedSet = new Set(deletedNames);
74
- const updatedMap = new Map(updatedEntries.map(e => [e.name, e.url]));
75
- const newMap = new Map(newEntries.map(e => [e.name, e.url]));
76
- const subdirMap = new Map(subdirUpdates.map(e => [e.name, e.url]));
77
- const kept = [];
78
- for (const entry of doc.docs) {
79
- if (entry.type === "file" && deletedSet.has(entry.name)) {
80
- output_1.out.taskLine(`Removed ${entry.name} from ${(0, utils_1.formatRelativePath)(dirPath) || "root"}`);
81
- continue;
82
- }
83
- if (entry.type === "file" && updatedMap.has(entry.name)) {
84
- kept.push({ ...entry, url: updatedMap.get(entry.name) });
85
- continue;
86
- }
87
- if (entry.type === "file" && newMap.has(entry.name)) {
88
- // Existing entry being re-added (e.g. from immutable string replacement)
89
- kept.push({ ...entry, url: newMap.get(entry.name) });
90
- newMap.delete(entry.name);
91
- continue;
92
- }
93
- if (entry.type === "folder" && subdirMap.has(entry.name)) {
94
- kept.push({ ...entry, url: subdirMap.get(entry.name) });
95
- continue;
96
- }
97
- kept.push({ ...entry });
98
- }
99
- // Add genuinely new file entries
100
- for (const [name, url] of newMap) {
101
- kept.push({ name, type: "file", url });
102
- }
103
- // Nuke and rebuild
104
- doc.docs.splice(0, doc.docs.length);
105
- for (const entry of kept) {
106
- doc.docs.push(entry);
107
- }
108
- }
109
- /**
110
- * Sync configuration constants
111
- */
112
- const BIDIRECTIONAL_SYNC_TIMEOUT_MS = 5000; // Timeout for bidirectional sync stability check
113
- /**
114
- * Bidirectional sync engine implementing two-phase sync
115
- */
116
- class SyncEngine {
117
- constructor(repo, rootPath, config) {
118
- this.repo = repo;
119
- this.rootPath = rootPath;
120
- // Map from path to handle for leaf-first sync ordering
121
- // Path depth determines sync order (deepest first)
122
- this.handlesByPath = new Map();
123
- this.config = config;
124
- this.snapshotManager = new snapshot_1.SnapshotManager(rootPath);
125
- this.changeDetector = new change_detection_1.ChangeDetector(repo, rootPath, config.exclude_patterns, config.artifact_directories || []);
126
- this.moveDetector = new move_detection_1.MoveDetector(config.sync.move_detection_threshold);
127
- }
128
- /**
129
- * Determine if content should be treated as text for Automerge text operations
130
- * Note: This method checks the runtime type. File type detection happens
131
- * during reading with isEnhancedTextFile() which now has better dev file support.
132
- */
133
- isTextContent(content) {
134
- // Simply check the actual type of the content
135
- return typeof content === "string";
136
- }
137
- /**
138
- * Get a versioned URL from a handle (includes current heads).
139
- * This ensures clients can fetch the exact version of the document.
140
- */
141
- getVersionedUrl(handle) {
142
- const { documentId } = (0, automerge_repo_1.parseAutomergeUrl)(handle.url);
143
- const heads = handle.heads();
144
- return (0, automerge_repo_1.stringifyAutomergeUrl)({ documentId, heads });
145
- }
146
- /**
147
- * Determine if a file path is inside an artifact directory.
148
- * Artifact files are stored as immutable strings (RawString) and
149
- * referenced with versioned URLs in directory entries.
150
- */
151
- isArtifactPath(filePath) {
152
- const artifactDirs = this.config.artifact_directories || [];
153
- return artifactDirs.some(dir => filePath === dir || filePath.startsWith(dir + "/"));
154
- }
155
- /**
156
- * Get the appropriate URL for a file's directory entry.
157
- * Artifact paths get versioned URLs (with heads) for exact version fetching.
158
- * Non-artifact paths get plain URLs for collaborative editing.
159
- */
160
- getEntryUrl(handle, filePath) {
161
- if (this.isArtifactPath(filePath)) {
162
- return this.getVersionedUrl(handle);
163
- }
164
- return (0, utils_1.getPlainUrl)(handle.url);
165
- }
166
- /**
167
- * Get the appropriate URL for a subdirectory's directory entry.
168
- * Artifact directories get versioned URLs (with heads) so consumers can
169
- * fetch the exact snapshotted version, matching how artifact files work.
170
- * Non-artifact directories get plain URLs for collaborative editing.
171
- */
172
- getDirEntryUrl(handle, dirPath) {
173
- if (this.isArtifactPath(dirPath)) {
174
- return this.getVersionedUrl(handle);
175
- }
176
- return (0, utils_1.getPlainUrl)(handle.url);
177
- }
178
- /**
179
- * Set the root directory URL in the snapshot
180
- */
181
- async getRootDirectoryUrl() {
182
- const snapshot = await this.snapshotManager.load();
183
- return snapshot?.rootDirectoryUrl;
184
- }
185
- async setRootDirectoryUrl(url) {
186
- let snapshot = await this.snapshotManager.load();
187
- if (!snapshot) {
188
- snapshot = this.snapshotManager.createEmpty();
189
- }
190
- snapshot.rootDirectoryUrl = url;
191
- await this.snapshotManager.save(snapshot);
192
- }
193
- /**
194
- * Reset the snapshot, clearing all tracked files and directories.
195
- * Preserves the rootDirectoryUrl so sync can still operate.
196
- * Used by --force to re-sync every file.
197
- */
198
- async resetSnapshot() {
199
- let snapshot = await this.snapshotManager.load();
200
- if (!snapshot)
201
- return;
202
- this.snapshotManager.clear(snapshot);
203
- await this.snapshotManager.save(snapshot);
204
- }
205
- /**
206
- * Nuclear reset: clear the snapshot AND wipe the root directory document's
207
- * entries so that every file and subdirectory gets brand-new Automerge
208
- * documents. The root directory document itself is preserved.
209
- */
210
- async nuclearReset() {
211
- let snapshot = await this.snapshotManager.load();
212
- if (!snapshot)
213
- return;
214
- // Clear the root directory document's entries
215
- if (snapshot.rootDirectoryUrl) {
216
- const rootHandle = await this.repo.find((0, utils_1.getPlainUrl)(snapshot.rootDirectoryUrl));
217
- rootHandle.change((doc) => {
218
- doc.docs.splice(0, doc.docs.length);
219
- });
220
- }
221
- // Clear all tracked files and directories from snapshot
222
- this.snapshotManager.clear(snapshot);
223
- await this.snapshotManager.save(snapshot);
224
- }
225
- /**
226
- * Commit local changes only (no network sync)
227
- */
228
- async commitLocal() {
229
- const result = {
230
- success: false,
231
- filesChanged: 0,
232
- directoriesChanged: 0,
233
- errors: [],
234
- warnings: [],
235
- };
236
- try {
237
- // Load current snapshot
238
- let snapshot = await this.snapshotManager.load();
239
- if (!snapshot) {
240
- snapshot = this.snapshotManager.createEmpty();
241
- }
242
- // Detect all changes
243
- const changes = await this.changeDetector.detectChanges(snapshot);
244
- // Detect moves
245
- const { moves, remainingChanges } = await this.moveDetector.detectMoves(changes, snapshot);
246
- // Apply local changes only (no network sync)
247
- const commitResult = await this.pushLocalChanges(remainingChanges, moves, snapshot);
248
- result.filesChanged += commitResult.filesChanged;
249
- result.directoriesChanged += commitResult.directoriesChanged;
250
- result.errors.push(...commitResult.errors);
251
- result.warnings.push(...commitResult.warnings);
252
- // Always touch root directory after commit
253
- await this.touchRootDirectory(snapshot);
254
- // Save updated snapshot
255
- await this.snapshotManager.save(snapshot);
256
- result.success = result.errors.length === 0;
257
- return result;
258
- }
259
- catch (error) {
260
- result.errors.push({
261
- path: this.rootPath,
262
- operation: "commitLocal",
263
- error: error instanceof Error ? error : new Error(String(error)),
264
- recoverable: true,
265
- });
266
- result.success = false;
267
- return result;
268
- }
269
- }
270
- /**
271
- * Recreate documents that failed to sync. Creates new Automerge documents
272
- * with the same content and updates all references (snapshot, parent directory).
273
- * Returns new handles that should be retried for sync.
274
- */
275
- async recreateFailedDocuments(failedHandles, snapshot) {
276
- const failedUrls = new Set(failedHandles.map(h => (0, utils_1.getPlainUrl)(h.url)));
277
- const newHandles = [];
278
- // Find which paths correspond to the failed handles
279
- for (const [filePath, entry] of snapshot.files.entries()) {
280
- const plainUrl = (0, utils_1.getPlainUrl)(entry.url);
281
- if (!failedUrls.has(plainUrl))
282
- continue;
283
- debug(`recreate: recreating document for ${filePath} (${plainUrl})`);
284
- output_1.out.taskLine(`Recreating document for ${filePath}`);
285
- try {
286
- // Read the current content from the old handle
287
- const oldHandle = await this.repo.find(plainUrl);
288
- const doc = await oldHandle.doc();
289
- if (!doc) {
290
- debug(`recreate: could not read doc for ${filePath}, skipping`);
291
- continue;
292
- }
293
- const content = (0, utils_1.readDocContent)(doc.content);
294
- if (content === null) {
295
- debug(`recreate: null content for ${filePath}, skipping`);
296
- continue;
297
- }
298
- // Create a fresh document
299
- const fakeChange = {
300
- path: filePath,
301
- changeType: types_1.ChangeType.LOCAL_ONLY,
302
- fileType: this.isTextContent(content) ? types_1.FileType.TEXT : types_1.FileType.BINARY,
303
- localContent: content,
304
- remoteContent: null,
305
- };
306
- const newHandle = await this.createRemoteFile(fakeChange);
307
- if (!newHandle)
308
- continue;
309
- const entryUrl = this.getEntryUrl(newHandle, filePath);
310
- // Update snapshot entry
311
- this.snapshotManager.updateFileEntry(snapshot, filePath, {
312
- ...entry,
313
- url: entryUrl,
314
- head: newHandle.heads(),
315
- ...(this.isArtifactPath(filePath) ? { contentHash: (0, content_1.contentHash)(content) } : {}),
316
- });
317
- // Update parent directory entry to point to new document
318
- const pathParts = filePath.split("/");
319
- const fileName = pathParts.pop() || "";
320
- const dirPath = pathParts.join("/");
321
- let dirUrl;
322
- if (!dirPath || dirPath === "") {
323
- dirUrl = snapshot.rootDirectoryUrl;
324
- }
325
- else {
326
- const dirEntry = snapshot.directories.get(dirPath);
327
- if (!dirEntry)
328
- continue;
329
- dirUrl = dirEntry.url;
330
- }
331
- const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(dirUrl));
332
- dirHandle.change((d) => {
333
- const idx = d.docs.findIndex(e => e.name === fileName && e.type === "file");
334
- if (idx !== -1) {
335
- d.docs[idx].url = entryUrl;
336
- }
337
- });
338
- // Track new handles
339
- this.handlesByPath.set(filePath, newHandle);
340
- this.handlesByPath.set(dirPath, dirHandle);
341
- newHandles.push(newHandle);
342
- newHandles.push(dirHandle);
343
- debug(`recreate: created new doc for ${filePath} -> ${newHandle.url}`);
344
- }
345
- catch (error) {
346
- debug(`recreate: failed for ${filePath}: ${error}`);
347
- output_1.out.taskLine(`Failed to recreate ${filePath}: ${error}`, true);
348
- }
349
- }
350
- // Also check directory documents
351
- for (const [dirPath, entry] of snapshot.directories.entries()) {
352
- const plainUrl = (0, utils_1.getPlainUrl)(entry.url);
353
- if (!failedUrls.has(plainUrl))
354
- continue;
355
- // Directory docs can't be easily recreated (they reference children).
356
- // Just log a warning — the child recreation above should handle most cases.
357
- debug(`recreate: directory ${dirPath || "(root)"} failed to sync, cannot recreate`);
358
- output_1.out.taskLine(`Warning: directory ${dirPath || "(root)"} failed to sync`, true);
359
- }
360
- return newHandles;
361
- }
362
- /**
363
- * Run full bidirectional sync
364
- */
365
- async sync(options) {
366
- const result = {
367
- success: false,
368
- filesChanged: 0,
369
- directoriesChanged: 0,
370
- errors: [],
371
- warnings: [],
372
- timings: {},
373
- };
374
- // Reset tracked handles for sync
375
- this.handlesByPath = new Map();
376
- try {
377
- // Load current snapshot
378
- const snapshot = (await this.snapshotManager.load()) ||
379
- this.snapshotManager.createEmpty();
380
- debug(`sync: rootDirectoryUrl=${snapshot.rootDirectoryUrl}, files=${snapshot.files.size}, dirs=${snapshot.directories.size}`);
381
- // Wait for initial sync to receive any pending remote changes
382
- if (this.config.sync_enabled && snapshot.rootDirectoryUrl) {
383
- debug("sync: waiting for root document to be ready");
384
- output_1.out.update("Waiting for root document from server");
385
- // Wait for the root document to be fetched from the network.
386
- // repo.find() rejects with "unavailable" if the server doesn't
387
- // have the document yet, so we retry with backoff.
388
- // This is critical for clone scenarios.
389
- const plainRootUrl = (0, utils_1.getPlainUrl)(snapshot.rootDirectoryUrl);
390
- const maxAttempts = 6;
391
- for (let attempt = 1; attempt <= maxAttempts; attempt++) {
392
- try {
393
- const rootHandle = await this.repo.find(plainRootUrl);
394
- rootHandle.doc(); // throws if not ready
395
- debug(`sync: root document ready (attempt ${attempt})`);
396
- break;
397
- }
398
- catch (error) {
399
- const isUnavailable = String(error).includes("unavailable") || String(error).includes("not ready");
400
- if (isUnavailable && attempt < maxAttempts) {
401
- const delay = Math.min(1000 * Math.pow(2, attempt - 1), 10000);
402
- debug(`sync: root document not available (attempt ${attempt}/${maxAttempts}), retrying in ${delay}ms`);
403
- output_1.out.update(`Waiting for root document (attempt ${attempt}/${maxAttempts})`);
404
- await new Promise(r => setTimeout(r, delay));
405
- }
406
- else {
407
- debug(`sync: root document unavailable after ${maxAttempts} attempts: ${error}`);
408
- output_1.out.taskLine(`Root document unavailable: ${error}`, true);
409
- break;
410
- }
411
- }
412
- }
413
- debug("sync: waiting for initial bidirectional sync");
414
- output_1.out.update("Waiting for initial sync from server");
415
- try {
416
- await (0, network_sync_1.waitForBidirectionalSync)(this.repo, snapshot.rootDirectoryUrl, {
417
- timeoutMs: 5000, // Increased timeout for initial sync
418
- pollIntervalMs: 100,
419
- stableChecksRequired: 3,
420
- });
421
- }
422
- catch (error) {
423
- output_1.out.taskLine(`Initial sync: ${error}`, true);
424
- }
425
- }
426
- // Detect all changes
427
- debug("sync: detecting changes");
428
- output_1.out.update("Detecting local and remote changes");
429
- // Capture pre-push snapshot file paths to detect deletions after push
430
- const prePushFilePaths = new Set(snapshot.files.keys());
431
- const changes = await this.changeDetector.detectChanges(snapshot);
432
- // Detect moves
433
- const { moves, remainingChanges } = await this.moveDetector.detectMoves(changes, snapshot);
434
- debug(`sync: detected ${changes.length} changes, ${moves.length} moves, ${remainingChanges.length} remaining`);
435
- // Phase 1: Push local changes to remote
436
- debug("sync: phase 1 - pushing local changes");
437
- const phase1Result = await this.pushLocalChanges(remainingChanges, moves, snapshot);
438
- result.filesChanged += phase1Result.filesChanged;
439
- result.directoriesChanged += phase1Result.directoriesChanged;
440
- result.errors.push(...phase1Result.errors);
441
- result.warnings.push(...phase1Result.warnings);
442
- debug(`sync: phase 1 complete - ${phase1Result.filesChanged} files, ${phase1Result.directoriesChanged} dirs changed`);
443
- // Wait for network sync (important for clone scenarios)
444
- if (this.config.sync_enabled) {
445
- const sub = options?.sub ?? false;
446
- // In Subduction mode, pass no StorageId so waitForSync
447
- // falls back to head-stability polling. In WebSocket mode,
448
- // pass the StorageId for precise getSyncInfo-based verification.
449
- const storageId = sub ? undefined : this.config.sync_server_storage_id;
450
- try {
451
- // Ensure root directory handle is tracked for sync
452
- if (snapshot.rootDirectoryUrl) {
453
- const rootHandle = await this.repo.find(snapshot.rootDirectoryUrl);
454
- this.handlesByPath.set("", rootHandle);
455
- }
456
- // Single waitForSync with ALL tracked handles at once
457
- if (this.handlesByPath.size > 0) {
458
- const allHandles = Array.from(this.handlesByPath.values());
459
- const handlePaths = Array.from(this.handlesByPath.keys());
460
- debug(`sync: waiting for ${allHandles.length} handles to sync to server: ${handlePaths.slice(0, 10).map(p => p || "(root)").join(", ")}${handlePaths.length > 10 ? ` ...and ${handlePaths.length - 10} more` : ""}`);
461
- output_1.out.update(`Uploading ${allHandles.length} documents to sync server`);
462
- const { failed } = await (0, network_sync_1.waitForSync)(allHandles, storageId);
463
- // Recreate failed documents and retry once.
464
- // Skip in Subduction mode — SubductionSource has its
465
- // own heal-sync retry logic.
466
- if (failed.length > 0 && !sub) {
467
- debug(`sync: ${failed.length} documents failed, recreating`);
468
- output_1.out.update(`Recreating ${failed.length} failed documents`);
469
- const retryHandles = await this.recreateFailedDocuments(failed, snapshot);
470
- if (retryHandles.length > 0) {
471
- debug(`sync: retrying ${retryHandles.length} recreated handles`);
472
- output_1.out.update(`Retrying ${retryHandles.length} recreated documents`);
473
- const retry = await (0, network_sync_1.waitForSync)(retryHandles, storageId);
474
- if (retry.failed.length > 0) {
475
- const msg = `${retry.failed.length} documents failed to sync to server after recreation`;
476
- debug(`sync: ${msg}`);
477
- result.errors.push({
478
- path: "sync",
479
- operation: "upload",
480
- error: new Error(msg),
481
- recoverable: true,
482
- });
483
- }
484
- }
485
- }
486
- else if (failed.length > 0 && sub) {
487
- const msg = `${failed.length} document${failed.length === 1 ? '' : 's'} did not converge during sync (Subduction will retry in the background; re-run sync to confirm)`;
488
- debug(`sync: ${msg}`);
489
- output_1.out.taskLine(msg, true);
490
- result.warnings.push(msg);
491
- }
492
- debug("sync: all handles synced to server");
493
- }
494
- // Wait for bidirectional sync to stabilize
495
- // Use tracked handles for post-push check (cheaper than full tree scan)
496
- const changedHandles = Array.from(this.handlesByPath.values());
497
- debug(`sync: waiting for bidirectional sync to stabilize (${changedHandles.length} tracked handles)`);
498
- output_1.out.update("Waiting for bidirectional sync to stabilize");
499
- await (0, network_sync_1.waitForBidirectionalSync)(this.repo, snapshot.rootDirectoryUrl, {
500
- timeoutMs: BIDIRECTIONAL_SYNC_TIMEOUT_MS,
501
- pollIntervalMs: 100,
502
- stableChecksRequired: 3,
503
- handles: changedHandles.length > 0 ? changedHandles : undefined,
504
- });
505
- // Touch root directory AFTER all docs are synced and stable.
506
- // This signals consumers (e.g. Patchwork) that new content is
507
- // available. Because file docs are already on the server,
508
- // consumers can immediately fetch them when they see the root change.
509
- const hasPhase1Changes = phase1Result.filesChanged > 0 || phase1Result.directoriesChanged > 0;
510
- if (hasPhase1Changes && snapshot.rootDirectoryUrl) {
511
- await this.touchRootDirectory(snapshot);
512
- const rootHandle = await this.repo.find(snapshot.rootDirectoryUrl);
513
- debug("sync: syncing root directory touch to server");
514
- output_1.out.update("Syncing root directory update");
515
- const rootSync = await (0, network_sync_1.waitForSync)([rootHandle], storageId);
516
- if (rootSync.failed.length > 0) {
517
- const msg = "Root directory update did not converge to server; consumers may not see recent changes until next sync";
518
- debug(`sync: ${msg}`);
519
- result.warnings.push(msg);
520
- }
521
- }
522
- }
523
- catch (error) {
524
- debug(`sync: network sync error: ${error}`);
525
- output_1.out.taskLine(`Network sync failed: ${error}`, true);
526
- result.errors.push({
527
- path: "sync",
528
- operation: "network-sync",
529
- error: error instanceof Error ? error : new Error(String(error)),
530
- recoverable: true,
531
- });
532
- }
533
- }
534
- // Re-detect changes after network sync for fresh state
535
- // Compute paths deleted during push so they aren't resurrected during pull
536
- const deletedPaths = new Set();
537
- for (const p of prePushFilePaths) {
538
- if (!snapshot.files.has(p)) {
539
- deletedPaths.add(p);
540
- }
541
- }
542
- if (deletedPaths.size > 0) {
543
- debug(`sync: excluding ${deletedPaths.size} deleted paths from re-detection`);
544
- }
545
- debug("sync: re-detecting changes after network sync");
546
- const freshChanges = await this.changeDetector.detectChanges(snapshot, deletedPaths);
547
- const freshRemoteChanges = freshChanges.filter(c => c.changeType === types_1.ChangeType.REMOTE_ONLY ||
548
- c.changeType === types_1.ChangeType.BOTH_CHANGED);
549
- debug(`sync: phase 2 - pulling ${freshRemoteChanges.length} remote changes`);
550
- if (freshRemoteChanges.length > 0) {
551
- output_1.out.update(`Pulling ${freshRemoteChanges.length} remote changes`);
552
- }
553
- // Phase 2: Pull remote changes to local using fresh detection
554
- const phase2Result = await this.pullRemoteChanges(freshRemoteChanges, snapshot);
555
- result.filesChanged += phase2Result.filesChanged;
556
- result.directoriesChanged += phase2Result.directoriesChanged;
557
- result.errors.push(...phase2Result.errors);
558
- result.warnings.push(...phase2Result.warnings);
559
- // Update snapshot heads after pulling remote changes
560
- // IMPORTANT: Use getPlainUrl() to strip version/heads from URLs.
561
- // Artifact entries store versioned URLs (with heads baked in).
562
- // repo.find(versionedUrl) returns a view handle whose .heads()
563
- // returns the VERSION heads, not the current document heads.
564
- // Using the versioned URL here would overwrite correct heads with
565
- // stale ones, causing changeAt() to fork from the wrong point
566
- // on the next sync (e.g. an empty directory state where deletions
567
- // can't find the entries to splice out).
568
- for (const [filePath, snapshotEntry] of snapshot.files.entries()) {
569
- try {
570
- const handle = await this.repo.find((0, utils_1.getPlainUrl)(snapshotEntry.url));
571
- const currentHeads = handle.heads();
572
- if (!A.equals(currentHeads, snapshotEntry.head)) {
573
- // Update snapshot with current heads after pulling changes
574
- snapshot.files.set(filePath, {
575
- ...snapshotEntry,
576
- head: currentHeads,
577
- });
578
- }
579
- }
580
- catch (error) {
581
- // Handle might not exist if file was deleted
582
- }
583
- }
584
- // Update directory document heads
585
- for (const [dirPath, snapshotEntry] of snapshot.directories.entries()) {
586
- try {
587
- const handle = await this.repo.find((0, utils_1.getPlainUrl)(snapshotEntry.url));
588
- const currentHeads = handle.heads();
589
- if (!A.equals(currentHeads, snapshotEntry.head)) {
590
- // Update snapshot with current heads after pulling changes
591
- snapshot.directories.set(dirPath, {
592
- ...snapshotEntry,
593
- head: currentHeads,
594
- });
595
- }
596
- }
597
- catch (error) {
598
- // Handle might not exist if directory was deleted
599
- }
600
- }
601
- // Save updated snapshot if not dry run
602
- await this.snapshotManager.save(snapshot);
603
- result.success = result.errors.length === 0;
604
- return result;
605
- }
606
- catch (error) {
607
- result.errors.push({
608
- path: "sync",
609
- operation: "full-sync",
610
- error: error,
611
- recoverable: false,
612
- });
613
- return result;
614
- }
615
- }
616
- /**
617
- * Phase 1: Push local changes to Automerge documents.
618
- *
619
- * Works depth-first: processes the deepest files first, creates/updates all
620
- * file docs at each level, then batch-updates the parent directory document
621
- * in a single change. Propagates subdirectory URL updates as we walk up
622
- * toward the root. This eliminates the need for a separate URL update pass.
623
- */
624
- async pushLocalChanges(changes, moves, snapshot) {
625
- const result = {
626
- success: true,
627
- filesChanged: 0,
628
- directoriesChanged: 0,
629
- errors: [],
630
- warnings: [],
631
- };
632
- // Process moves first - all detected moves are applied
633
- if (moves.length > 0) {
634
- debug(`push: processing ${moves.length} moves`);
635
- output_1.out.update(`Processing ${moves.length} move${moves.length > 1 ? "s" : ""}`);
636
- }
637
- for (let i = 0; i < moves.length; i++) {
638
- const move = moves[i];
639
- try {
640
- debug(`push: move ${i + 1}/${moves.length}: ${move.fromPath} -> ${move.toPath}`);
641
- output_1.out.taskLine(`Moving ${move.fromPath} -> ${move.toPath}`);
642
- await this.applyMoveToRemote(move, snapshot);
643
- result.filesChanged++;
644
- }
645
- catch (error) {
646
- debug(`push: move failed for ${move.fromPath}: ${error}`);
647
- result.errors.push({
648
- path: move.fromPath,
649
- operation: "move",
650
- error: error,
651
- recoverable: true,
652
- });
653
- }
654
- }
655
- // Filter to local changes only
656
- const localChanges = changes.filter(c => c.changeType === types_1.ChangeType.LOCAL_ONLY ||
657
- c.changeType === types_1.ChangeType.BOTH_CHANGED);
658
- if (localChanges.length === 0) {
659
- debug("push: no local changes to push");
660
- return result;
661
- }
662
- const newFiles = localChanges.filter(c => !snapshot.files.has(c.path) && c.localContent !== null);
663
- const modifiedFiles = localChanges.filter(c => snapshot.files.has(c.path) && c.localContent !== null);
664
- const deletedFiles = localChanges.filter(c => c.localContent === null && snapshot.files.has(c.path));
665
- debug(`push: ${localChanges.length} local changes (${newFiles.length} new, ${modifiedFiles.length} modified, ${deletedFiles.length} deleted)`);
666
- output_1.out.update(`Pushing ${localChanges.length} local changes (${newFiles.length} new, ${modifiedFiles.length} modified, ${deletedFiles.length} deleted)`);
667
- // Group changes by parent directory path
668
- const changesByDir = new Map();
669
- for (const change of localChanges) {
670
- const pathParts = change.path.split("/");
671
- pathParts.pop(); // remove filename
672
- const dirPath = pathParts.join("/");
673
- if (!changesByDir.has(dirPath)) {
674
- changesByDir.set(dirPath, []);
675
- }
676
- changesByDir.get(dirPath).push(change);
677
- }
678
- // Collect all directory paths that need processing:
679
- // directories with file changes + all ancestors up to root
680
- const allDirsToProcess = new Set();
681
- for (const dirPath of changesByDir.keys()) {
682
- allDirsToProcess.add(dirPath);
683
- // Add ancestors so subdirectory URL updates propagate to root
684
- let current = dirPath;
685
- while (current) {
686
- const parts = current.split("/");
687
- parts.pop();
688
- current = parts.join("/");
689
- allDirsToProcess.add(current);
690
- }
691
- }
692
- // Sort deepest-first
693
- const sortedDirPaths = Array.from(allDirsToProcess).sort((a, b) => {
694
- const depthA = a ? a.split("/").length : 0;
695
- const depthB = b ? b.split("/").length : 0;
696
- return depthB - depthA;
697
- });
698
- debug(`push: processing ${sortedDirPaths.length} directories (deepest first)`);
699
- // Track which directories were modified (for subdirectory URL propagation)
700
- const modifiedDirs = new Set();
701
- let filesProcessed = 0;
702
- const totalFiles = localChanges.length;
703
- for (const dirPath of sortedDirPaths) {
704
- const dirChanges = changesByDir.get(dirPath) || [];
705
- const dirLabel = dirPath || "(root)";
706
- if (dirChanges.length > 0) {
707
- debug(`push: directory "${dirLabel}": ${dirChanges.length} file changes`);
708
- }
709
- // Ensure directory document exists
710
- if (snapshot.rootDirectoryUrl) {
711
- await this.ensureDirectoryDocument(snapshot, dirPath);
712
- }
713
- // Process all file changes in this directory
714
- const newEntries = [];
715
- const updatedEntries = [];
716
- const deletedNames = [];
717
- for (const change of dirChanges) {
718
- const fileName = change.path.split("/").pop() || "";
719
- const snapshotEntry = snapshot.files.get(change.path);
720
- filesProcessed++;
721
- try {
722
- if (change.localContent === null && snapshotEntry) {
723
- // Delete file
724
- debug(`push: [${filesProcessed}/${totalFiles}] delete ${change.path}`);
725
- output_1.out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] deleting ${change.path}`);
726
- await this.deleteRemoteFile(snapshotEntry.url, snapshot, change.path);
727
- deletedNames.push(fileName);
728
- this.snapshotManager.removeFileEntry(snapshot, change.path);
729
- result.filesChanged++;
730
- }
731
- else if (!snapshotEntry) {
732
- // New file
733
- debug(`push: [${filesProcessed}/${totalFiles}] create ${change.path} (${change.fileType})`);
734
- output_1.out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] creating ${change.path}`);
735
- const handle = await this.createRemoteFile(change);
736
- if (handle) {
737
- const entryUrl = this.getEntryUrl(handle, change.path);
738
- newEntries.push({ name: fileName, url: entryUrl });
739
- this.snapshotManager.updateFileEntry(snapshot, change.path, {
740
- path: (0, utils_1.joinAndNormalizePath)(this.rootPath, change.path),
741
- url: entryUrl,
742
- head: handle.heads(),
743
- extension: (0, utils_1.getFileExtension)(change.path),
744
- mimeType: (0, utils_1.getEnhancedMimeType)(change.path),
745
- ...(this.isArtifactPath(change.path) && change.localContent
746
- ? { contentHash: (0, content_1.contentHash)(change.localContent) }
747
- : {}),
748
- });
749
- result.filesChanged++;
750
- debug(`push: created ${change.path} -> ${handle.url}`);
751
- }
752
- }
753
- else {
754
- // Update existing file
755
- const contentSize = typeof change.localContent === "string"
756
- ? `${change.localContent.length} chars`
757
- : `${change.localContent.length} bytes`;
758
- debug(`push: [${filesProcessed}/${totalFiles}] update ${change.path} (${contentSize})`);
759
- output_1.out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] updating ${change.path}`);
760
- await this.updateRemoteFile(snapshotEntry.url, change.localContent, snapshot, change.path);
761
- // Get current entry URL (updateRemoteFile updates snapshot)
762
- const updatedFileEntry = snapshot.files.get(change.path);
763
- if (updatedFileEntry) {
764
- const fileHandle = await this.repo.find((0, utils_1.getPlainUrl)(updatedFileEntry.url));
765
- updatedEntries.push({
766
- name: fileName,
767
- url: this.getEntryUrl(fileHandle, change.path),
768
- });
769
- }
770
- result.filesChanged++;
771
- }
772
- }
773
- catch (error) {
774
- debug(`push: error processing ${change.path}: ${error}`);
775
- output_1.out.taskLine(`Error pushing ${change.path}: ${error}`, true);
776
- result.errors.push({
777
- path: change.path,
778
- operation: "local-to-remote",
779
- error: error,
780
- recoverable: true,
781
- });
782
- }
783
- }
784
- // Collect subdirectory URL updates for child dirs already processed
785
- const subdirUpdates = [];
786
- for (const modifiedDir of modifiedDirs) {
787
- // Check if modifiedDir is a direct child of dirPath
788
- const parts = modifiedDir.split("/");
789
- const childName = parts.pop() || "";
790
- const parentOfModified = parts.join("/");
791
- if (parentOfModified === dirPath) {
792
- const dirEntry = snapshot.directories.get(modifiedDir);
793
- if (dirEntry) {
794
- const childHandle = await this.repo.find((0, utils_1.getPlainUrl)(dirEntry.url));
795
- subdirUpdates.push({
796
- name: childName,
797
- url: this.getDirEntryUrl(childHandle, modifiedDir),
798
- });
799
- }
800
- }
801
- }
802
- // Batch-update the directory document in a single change
803
- const hasChanges = newEntries.length > 0 ||
804
- updatedEntries.length > 0 ||
805
- deletedNames.length > 0 ||
806
- subdirUpdates.length > 0;
807
- if (hasChanges && snapshot.rootDirectoryUrl) {
808
- debug(`push: batch-updating directory "${dirLabel}" (+${newEntries.length} new, ~${updatedEntries.length} updated, -${deletedNames.length} deleted, ${subdirUpdates.length} subdir URL updates)`);
809
- await this.batchUpdateDirectory(snapshot, dirPath, newEntries, updatedEntries, deletedNames, subdirUpdates);
810
- modifiedDirs.add(dirPath);
811
- result.directoriesChanged++;
812
- }
813
- }
814
- debug(`push: complete - ${result.filesChanged} files, ${result.directoriesChanged} dirs changed, ${result.errors.length} errors`);
815
- return result;
816
- }
817
- /**
818
- * Phase 2: Pull remote changes to local filesystem
819
- */
820
- async pullRemoteChanges(changes, snapshot) {
821
- const result = {
822
- success: true,
823
- filesChanged: 0,
824
- directoriesChanged: 0,
825
- errors: [],
826
- warnings: [],
827
- };
828
- // Process remote changes
829
- const remoteChanges = changes.filter(c => c.changeType === types_1.ChangeType.REMOTE_ONLY ||
830
- c.changeType === types_1.ChangeType.BOTH_CHANGED);
831
- // Sort changes by dependency order (parents before children)
832
- const sortedChanges = this.sortChangesByDependency(remoteChanges);
833
- for (const change of sortedChanges) {
834
- try {
835
- await this.applyRemoteChangeToLocal(change, snapshot);
836
- result.filesChanged++;
837
- }
838
- catch (error) {
839
- result.errors.push({
840
- path: change.path,
841
- operation: "remote-to-local",
842
- error: error,
843
- recoverable: true,
844
- });
845
- }
846
- }
847
- return result;
848
- }
849
- /**
850
- * Apply remote change to local filesystem
851
- */
852
- async applyRemoteChangeToLocal(change, snapshot) {
853
- const localPath = (0, utils_1.joinAndNormalizePath)(this.rootPath, change.path);
854
- if (!change.remoteHead) {
855
- throw new Error(`No remote head found for remote change to ${change.path}`);
856
- }
857
- // Check for null (empty string/Uint8Array are valid content)
858
- if (change.remoteContent === null) {
859
- // File was deleted remotely
860
- await (0, utils_1.removePath)(localPath);
861
- this.snapshotManager.removeFileEntry(snapshot, change.path);
862
- return;
863
- }
864
- // Create or update local file
865
- await (0, utils_1.writeFileContent)(localPath, change.remoteContent);
866
- // Update or create snapshot entry for this file
867
- const snapshotEntry = snapshot.files.get(change.path);
868
- if (snapshotEntry) {
869
- // Update existing entry
870
- snapshotEntry.head = change.remoteHead;
871
- // If the remote document was replaced (new URL), update the snapshot URL
872
- if (change.remoteUrl) {
873
- const fileHandle = await this.repo.find(change.remoteUrl);
874
- snapshotEntry.url = this.getEntryUrl(fileHandle, change.path);
875
- }
876
- }
877
- else {
878
- // Create new snapshot entry for newly discovered remote file
879
- // We need to find the remote file's URL from the directory hierarchy
880
- if (snapshot.rootDirectoryUrl) {
881
- try {
882
- const fileEntry = await (0, utils_1.findFileInDirectoryHierarchy)(this.repo, snapshot.rootDirectoryUrl, change.path);
883
- if (fileEntry) {
884
- const fileHandle = await this.repo.find(fileEntry.url);
885
- const entryUrl = this.getEntryUrl(fileHandle, change.path);
886
- this.snapshotManager.updateFileEntry(snapshot, change.path, {
887
- path: localPath,
888
- url: entryUrl,
889
- head: change.remoteHead,
890
- extension: (0, utils_1.getFileExtension)(change.path),
891
- mimeType: (0, utils_1.getEnhancedMimeType)(change.path),
892
- });
893
- }
894
- }
895
- catch (error) {
896
- // Failed to update snapshot - file may have been deleted
897
- output_1.out.taskLine(`Warning: Failed to update snapshot for remote file ${change.path}`, true);
898
- }
899
- }
900
- }
901
- }
902
- /**
903
- * Apply move to remote documents
904
- */
905
- async applyMoveToRemote(move, snapshot) {
906
- const fromEntry = snapshot.files.get(move.fromPath);
907
- if (!fromEntry)
908
- return;
909
- // Parse paths
910
- const toParts = move.toPath.split("/");
911
- const toFileName = toParts.pop() || "";
912
- const toDirPath = toParts.join("/");
913
- // 1) Remove file entry from old directory document
914
- if (move.fromPath !== move.toPath) {
915
- await this.removeFileFromDirectory(snapshot, move.fromPath);
916
- }
917
- // 2) Ensure destination directory document exists
918
- await this.ensureDirectoryDocument(snapshot, toDirPath);
919
- // 3) Update the FileDocument name and content to match new location/state
920
- try {
921
- let entryUrl;
922
- let finalHeads;
923
- if (this.isArtifactPath(move.toPath)) {
924
- // Artifact files use RawString — no diffing needed, just create a fresh doc
925
- const content = move.newContent !== undefined
926
- ? move.newContent
927
- : (0, utils_1.readDocContent)((await (await this.repo.find((0, utils_1.getPlainUrl)(fromEntry.url))).doc())?.content);
928
- const fakeChange = {
929
- path: move.toPath,
930
- changeType: types_1.ChangeType.LOCAL_ONLY,
931
- fileType: content != null && typeof content === "string" ? types_1.FileType.TEXT : types_1.FileType.BINARY,
932
- localContent: content,
933
- remoteContent: null,
934
- };
935
- const newHandle = await this.createRemoteFile(fakeChange);
936
- if (!newHandle)
937
- return;
938
- entryUrl = this.getEntryUrl(newHandle, move.toPath);
939
- finalHeads = newHandle.heads();
940
- }
941
- else {
942
- // Use plain URL for mutable handle
943
- const handle = await this.repo.find((0, utils_1.getPlainUrl)(fromEntry.url));
944
- const heads = fromEntry.head;
945
- // Update both name and content (if content changed during move)
946
- changeWithOptionalHeads(handle, heads, (doc) => {
947
- doc.name = toFileName;
948
- // If new content is provided, update it (handles move + modification case)
949
- if (move.newContent !== undefined) {
950
- if (typeof move.newContent === "string") {
951
- (0, utils_1.updateTextContent)(doc, ["content"], move.newContent);
952
- }
953
- else {
954
- doc.content = move.newContent;
955
- }
956
- }
957
- });
958
- entryUrl = this.getEntryUrl(handle, move.toPath);
959
- finalHeads = handle.heads();
960
- // Track file handle for network sync
961
- this.handlesByPath.set(move.toPath, handle);
962
- }
963
- // 4) Add file entry to destination directory
964
- await this.addFileToDirectory(snapshot, move.toPath, entryUrl);
965
- // 5) Update snapshot entries
966
- this.snapshotManager.removeFileEntry(snapshot, move.fromPath);
967
- this.snapshotManager.updateFileEntry(snapshot, move.toPath, {
968
- ...fromEntry,
969
- path: (0, utils_1.joinAndNormalizePath)(this.rootPath, move.toPath),
970
- url: entryUrl,
971
- head: finalHeads,
972
- ...(this.isArtifactPath(move.toPath) && move.newContent != null
973
- ? { contentHash: (0, content_1.contentHash)(move.newContent) }
974
- : {}),
975
- });
976
- }
977
- catch (e) {
978
- // Failed to update file name - file may have been deleted
979
- output_1.out.taskLine(`Warning: Failed to rename ${move.fromPath} to ${move.toPath}`, true);
980
- }
981
- }
982
- /**
983
- * Create new remote file document
984
- */
985
- async createRemoteFile(change) {
986
- if (change.localContent === null)
987
- return null;
988
- const isText = this.isTextContent(change.localContent);
989
- const isArtifact = this.isArtifactPath(change.path);
990
- // For artifact files, store text as RawString (immutable snapshot).
991
- // For regular files, store as collaborative text (empty string + splice).
992
- const fileDoc = {
993
- "@patchwork": { type: "file" },
994
- name: change.path.split("/").pop() || "",
995
- extension: (0, utils_1.getFileExtension)(change.path),
996
- mimeType: (0, utils_1.getEnhancedMimeType)(change.path),
997
- content: isText && isArtifact
998
- ? new A.RawString(change.localContent)
999
- : isText
1000
- ? ""
1001
- : change.localContent,
1002
- metadata: {
1003
- permissions: 0o644,
1004
- },
1005
- };
1006
- const handle = this.repo.create(fileDoc);
1007
- // For non-artifact text files, splice in the content so it's stored as collaborative text
1008
- if (isText && !isArtifact && typeof change.localContent === "string") {
1009
- handle.change((doc) => {
1010
- (0, utils_1.updateTextContent)(doc, ["content"], change.localContent);
1011
- });
1012
- }
1013
- // Always track newly created files for network sync
1014
- // (they always represent a change that needs to sync)
1015
- this.handlesByPath.set(change.path, handle);
1016
- return handle;
1017
- }
1018
- /**
1019
- * Update existing remote file document
1020
- */
1021
- async updateRemoteFile(url, content, snapshot, filePath) {
1022
- // Use plain URL for mutable handle
1023
- const handle = await this.repo.find((0, utils_1.getPlainUrl)(url));
1024
- // Check if content actually changed before tracking for sync
1025
- const doc = await handle.doc();
1026
- const rawContent = doc?.content;
1027
- // For artifact paths, always replace with a new document containing RawString.
1028
- // For non-artifact paths with immutable strings, replace with mutable text.
1029
- // In both cases we create a new document and update the snapshot URL.
1030
- const isArtifact = this.isArtifactPath(filePath);
1031
- if (isArtifact ||
1032
- !doc ||
1033
- (rawContent != null && A.isImmutableString(rawContent))) {
1034
- if (!isArtifact) {
1035
- output_1.out.taskLine(`Replacing ${!doc ? 'unavailable' : 'immutable string'} document for ${filePath}`, true);
1036
- }
1037
- const fakeChange = {
1038
- path: filePath,
1039
- changeType: types_1.ChangeType.LOCAL_ONLY,
1040
- fileType: this.isTextContent(content)
1041
- ? types_1.FileType.TEXT
1042
- : types_1.FileType.BINARY,
1043
- localContent: content,
1044
- remoteContent: null,
1045
- };
1046
- const newHandle = await this.createRemoteFile(fakeChange);
1047
- if (newHandle) {
1048
- const entryUrl = this.getEntryUrl(newHandle, filePath);
1049
- this.snapshotManager.updateFileEntry(snapshot, filePath, {
1050
- path: (0, utils_1.joinAndNormalizePath)(this.rootPath, filePath),
1051
- url: entryUrl,
1052
- head: newHandle.heads(),
1053
- extension: (0, utils_1.getFileExtension)(filePath),
1054
- mimeType: (0, utils_1.getEnhancedMimeType)(filePath),
1055
- ...(this.isArtifactPath(filePath)
1056
- ? { contentHash: (0, content_1.contentHash)(content) }
1057
- : {}),
1058
- });
1059
- }
1060
- return;
1061
- }
1062
- const currentContent = (0, utils_1.readDocContent)(rawContent);
1063
- const contentChanged = !(0, content_1.isContentEqual)(content, currentContent);
1064
- // Update snapshot heads even when content is identical
1065
- const snapshotEntry = snapshot.files.get(filePath);
1066
- if (snapshotEntry) {
1067
- // Update snapshot with current document heads
1068
- snapshot.files.set(filePath, {
1069
- ...snapshotEntry,
1070
- head: handle.heads(),
1071
- });
1072
- }
1073
- if (!contentChanged) {
1074
- // Content is identical, but we've updated the snapshot heads above
1075
- // This prevents fresh change detection from seeing stale heads
1076
- return;
1077
- }
1078
- const heads = snapshotEntry?.head;
1079
- if (!heads) {
1080
- throw new Error(`No heads found for ${url}`);
1081
- }
1082
- handle.changeAt(heads, (doc) => {
1083
- if (typeof content === "string") {
1084
- (0, utils_1.updateTextContent)(doc, ["content"], content);
1085
- }
1086
- else {
1087
- doc.content = content;
1088
- }
1089
- });
1090
- // Update snapshot with new heads after content change
1091
- if (snapshotEntry) {
1092
- snapshot.files.set(filePath, {
1093
- ...snapshotEntry,
1094
- head: handle.heads(),
1095
- });
1096
- }
1097
- // Only track files that actually changed content
1098
- this.handlesByPath.set(filePath, handle);
1099
- }
1100
- /**
1101
- * Delete remote file document
1102
- */
1103
- async deleteRemoteFile(_url, _snapshot, _filePath) {
1104
- // In Automerge, we don't actually delete documents.
1105
- // The file entry is removed from its parent directory, making the
1106
- // document orphaned. Clearing content via splice is expensive for
1107
- // large text files (every character is a CRDT op), so we skip it.
1108
- }
1109
- /**
1110
- * Add file entry to appropriate directory document (maintains hierarchy)
1111
- */
1112
- async addFileToDirectory(snapshot, filePath, fileUrl) {
1113
- if (!snapshot.rootDirectoryUrl)
1114
- return;
1115
- const pathParts = filePath.split("/");
1116
- const fileName = pathParts.pop() || "";
1117
- const directoryPath = pathParts.join("/");
1118
- // Get or create the parent directory document
1119
- const parentDirUrl = await this.ensureDirectoryDocument(snapshot, directoryPath);
1120
- // Use plain URL for mutable handle
1121
- const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(parentDirUrl));
1122
- let didChange = false;
1123
- const snapshotEntry = snapshot.directories.get(directoryPath);
1124
- const heads = snapshotEntry?.head;
1125
- changeWithOptionalHeads(dirHandle, heads, (doc) => {
1126
- const existingIndex = doc.docs.findIndex(entry => entry.name === fileName && entry.type === "file");
1127
- if (existingIndex === -1) {
1128
- doc.docs.push({
1129
- name: fileName,
1130
- type: "file",
1131
- url: fileUrl,
1132
- });
1133
- didChange = true;
1134
- }
1135
- });
1136
- // Always track the directory (even if unchanged) for proper leaf-first sync ordering
1137
- this.handlesByPath.set(directoryPath, dirHandle);
1138
- if (didChange && snapshotEntry) {
1139
- snapshotEntry.head = dirHandle.heads();
1140
- }
1141
- }
1142
- /**
1143
- * Ensure directory document exists for the given path, creating hierarchy as needed
1144
- * First checks for existing shared directories before creating new ones
1145
- */
1146
- async ensureDirectoryDocument(snapshot, directoryPath) {
1147
- // Root directory case
1148
- if (!directoryPath || directoryPath === "") {
1149
- return snapshot.rootDirectoryUrl;
1150
- }
1151
- // Check if we already have this directory in snapshot
1152
- const existingDir = snapshot.directories.get(directoryPath);
1153
- if (existingDir) {
1154
- return existingDir.url;
1155
- }
1156
- // Split path into parent and current directory name
1157
- const pathParts = directoryPath.split("/");
1158
- const currentDirName = pathParts.pop() || "";
1159
- const parentPath = pathParts.join("/");
1160
- // Ensure parent directory exists first (recursive)
1161
- const parentDirUrl = await this.ensureDirectoryDocument(snapshot, parentPath);
1162
- // DISCOVERY: Check if directory already exists in parent on server
1163
- try {
1164
- const parentHandle = await this.repo.find(parentDirUrl);
1165
- const parentDoc = await parentHandle.doc();
1166
- if (parentDoc) {
1167
- const existingDirEntry = parentDoc.docs.find((entry) => entry.name === currentDirName && entry.type === "folder");
1168
- if (existingDirEntry) {
1169
- // Resolve the actual directory handle and use its current heads
1170
- // Directory entries in parent docs may not carry valid heads
1171
- try {
1172
- const childDirHandle = await this.repo.find(existingDirEntry.url);
1173
- // Track discovered directory for sync
1174
- this.handlesByPath.set(directoryPath, childDirHandle);
1175
- // Get appropriate URL for directory entry
1176
- const entryUrl = this.getDirEntryUrl(childDirHandle, directoryPath);
1177
- // Update snapshot with discovered directory
1178
- this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
1179
- path: (0, utils_1.joinAndNormalizePath)(this.rootPath, directoryPath),
1180
- url: entryUrl,
1181
- head: childDirHandle.heads(),
1182
- entries: [],
1183
- });
1184
- return entryUrl;
1185
- }
1186
- catch (resolveErr) {
1187
- // Failed to resolve directory - fall through to create a fresh directory document
1188
- }
1189
- }
1190
- }
1191
- }
1192
- catch (error) {
1193
- // Failed to check for existing directory - will create new one
1194
- }
1195
- // CREATE: Directory doesn't exist, create new one
1196
- const dirDoc = {
1197
- "@patchwork": { type: "folder" },
1198
- name: currentDirName,
1199
- title: currentDirName,
1200
- docs: [],
1201
- };
1202
- const dirHandle = this.repo.create(dirDoc);
1203
- // Get appropriate URL for directory entry
1204
- const dirEntryUrl = this.getDirEntryUrl(dirHandle, directoryPath);
1205
- // Add this directory to its parent
1206
- // Use plain URL for mutable handle
1207
- const parentHandle = await this.repo.find((0, utils_1.getPlainUrl)(parentDirUrl));
1208
- let didChange = false;
1209
- parentHandle.change((doc) => {
1210
- // Double-check that entry doesn't exist (race condition protection)
1211
- const existingIndex = doc.docs.findIndex((entry) => entry.name === currentDirName && entry.type === "folder");
1212
- if (existingIndex === -1) {
1213
- doc.docs.push({
1214
- name: currentDirName,
1215
- type: "folder",
1216
- url: dirEntryUrl,
1217
- });
1218
- didChange = true;
1219
- }
1220
- });
1221
- // Track directory handles for sync
1222
- this.handlesByPath.set(directoryPath, dirHandle);
1223
- if (didChange) {
1224
- this.handlesByPath.set(parentPath, parentHandle);
1225
- const parentSnapshotEntry = snapshot.directories.get(parentPath);
1226
- if (parentSnapshotEntry) {
1227
- parentSnapshotEntry.head = parentHandle.heads();
1228
- }
1229
- }
1230
- // Update snapshot with new directory
1231
- this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
1232
- path: (0, utils_1.joinAndNormalizePath)(this.rootPath, directoryPath),
1233
- url: dirEntryUrl,
1234
- head: dirHandle.heads(),
1235
- entries: [],
1236
- });
1237
- return dirEntryUrl;
1238
- }
1239
- /**
1240
- * Remove file entry from directory document
1241
- */
1242
- async removeFileFromDirectory(snapshot, filePath) {
1243
- if (!snapshot.rootDirectoryUrl)
1244
- return;
1245
- const pathParts = filePath.split("/");
1246
- const fileName = pathParts.pop() || "";
1247
- const directoryPath = pathParts.join("/");
1248
- // Get the parent directory URL
1249
- let parentDirUrl;
1250
- if (!directoryPath || directoryPath === "") {
1251
- parentDirUrl = snapshot.rootDirectoryUrl;
1252
- }
1253
- else {
1254
- const existingDir = snapshot.directories.get(directoryPath);
1255
- if (!existingDir) {
1256
- // Directory not found - file may already be removed
1257
- return;
1258
- }
1259
- parentDirUrl = existingDir.url;
1260
- }
1261
- try {
1262
- // Use plain URL for mutable handle
1263
- const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(parentDirUrl));
1264
- // Track this handle for network sync waiting
1265
- this.handlesByPath.set(directoryPath, dirHandle);
1266
- const snapshotEntry = snapshot.directories.get(directoryPath);
1267
- const heads = snapshotEntry?.head;
1268
- let didChange = false;
1269
- changeWithOptionalHeads(dirHandle, heads, (doc) => {
1270
- const indexToRemove = doc.docs.findIndex(entry => entry.name === fileName && entry.type === "file");
1271
- if (indexToRemove !== -1) {
1272
- doc.docs.splice(indexToRemove, 1);
1273
- didChange = true;
1274
- output_1.out.taskLine(`Removed ${fileName} from ${(0, utils_1.formatRelativePath)(directoryPath) || "root"}`);
1275
- }
1276
- });
1277
- if (didChange && snapshotEntry) {
1278
- snapshotEntry.head = dirHandle.heads();
1279
- }
1280
- }
1281
- catch (error) {
1282
- throw error;
1283
- }
1284
- }
1285
- /**
1286
- * Batch-update a directory document in a single change: add new file entries,
1287
- * update URLs for modified files, remove deleted entries, and update
1288
- * subdirectory URLs. This replaces the separate per-file directory mutations
1289
- * and the post-hoc URL update pass.
1290
- */
1291
- async batchUpdateDirectory(snapshot, dirPath, newEntries, updatedEntries, deletedNames, subdirUpdates) {
1292
- let dirUrl;
1293
- if (!dirPath || dirPath === "") {
1294
- dirUrl = snapshot.rootDirectoryUrl;
1295
- }
1296
- else {
1297
- const dirEntry = snapshot.directories.get(dirPath);
1298
- if (!dirEntry)
1299
- return;
1300
- dirUrl = dirEntry.url;
1301
- }
1302
- const dirHandle = await this.repo.find((0, utils_1.getPlainUrl)(dirUrl));
1303
- const snapshotEntry = snapshot.directories.get(dirPath);
1304
- const heads = snapshotEntry?.head;
1305
- // Determine directory name
1306
- const dirName = dirPath ? dirPath.split("/").pop() || "" : path.basename(this.rootPath);
1307
- if (this.isArtifactPath(dirPath)) {
1308
- // Artifact directories are always nuked: rebuild docs array from scratch
1309
- // using a plain change() to avoid changeAt forking from stale heads.
1310
- dirHandle.change((doc) => {
1311
- if (!doc.name)
1312
- doc.name = dirName;
1313
- if (!doc.title)
1314
- doc.title = dirName;
1315
- nukeAndRebuildDocs(doc, dirPath, newEntries, updatedEntries, deletedNames, subdirUpdates);
1316
- });
1317
- }
1318
- else {
1319
- changeWithOptionalHeads(dirHandle, heads, (doc) => {
1320
- // Ensure name and title fields are set
1321
- if (!doc.name)
1322
- doc.name = dirName;
1323
- if (!doc.title)
1324
- doc.title = dirName;
1325
- // Remove deleted file entries
1326
- for (const name of deletedNames) {
1327
- const idx = doc.docs.findIndex(entry => entry.name === name && entry.type === "file");
1328
- if (idx !== -1) {
1329
- doc.docs.splice(idx, 1);
1330
- output_1.out.taskLine(`Removed ${name} from ${(0, utils_1.formatRelativePath)(dirPath) || "root"}`);
1331
- }
1332
- }
1333
- // Update URLs for modified files
1334
- for (const { name, url } of updatedEntries) {
1335
- const idx = doc.docs.findIndex(entry => entry.name === name && entry.type === "file");
1336
- if (idx !== -1) {
1337
- doc.docs[idx].url = url;
1338
- }
1339
- }
1340
- // Add new file entries
1341
- for (const { name, url } of newEntries) {
1342
- const existing = doc.docs.findIndex(entry => entry.name === name && entry.type === "file");
1343
- if (existing === -1) {
1344
- doc.docs.push({ name, type: "file", url });
1345
- }
1346
- else {
1347
- // Entry already exists (e.g. from immutable string replacement)
1348
- doc.docs[existing].url = url;
1349
- }
1350
- }
1351
- // Update subdirectory URLs with current heads
1352
- for (const { name, url } of subdirUpdates) {
1353
- const idx = doc.docs.findIndex(entry => entry.name === name && entry.type === "folder");
1354
- if (idx !== -1) {
1355
- doc.docs[idx].url = url;
1356
- }
1357
- }
1358
- });
1359
- }
1360
- // Track directory handle and update snapshot heads
1361
- this.handlesByPath.set(dirPath, dirHandle);
1362
- if (snapshotEntry) {
1363
- snapshotEntry.head = dirHandle.heads();
1364
- }
1365
- }
1366
- /**
1367
- * Sort changes by dependency order
1368
- */
1369
- sortChangesByDependency(changes) {
1370
- // Sort by path depth (shallower paths first)
1371
- return changes.sort((a, b) => {
1372
- const depthA = a.path.split("/").length;
1373
- const depthB = b.path.split("/").length;
1374
- return depthA - depthB;
1375
- });
1376
- }
1377
- /**
1378
- * Get sync status
1379
- */
1380
- async getStatus() {
1381
- const snapshot = await this.snapshotManager.load();
1382
- if (!snapshot) {
1383
- return {
1384
- snapshot: null,
1385
- hasChanges: false,
1386
- changeCount: 0,
1387
- lastSync: null,
1388
- };
1389
- }
1390
- const changes = await this.changeDetector.detectChanges(snapshot);
1391
- return {
1392
- snapshot,
1393
- hasChanges: changes.length > 0,
1394
- changeCount: changes.length,
1395
- lastSync: new Date(snapshot.timestamp),
1396
- };
1397
- }
1398
- /**
1399
- * Preview changes without applying them
1400
- */
1401
- async previewChanges() {
1402
- const snapshot = await this.snapshotManager.load();
1403
- if (!snapshot) {
1404
- return {
1405
- changes: [],
1406
- moves: [],
1407
- summary: "No snapshot found - run init first",
1408
- };
1409
- }
1410
- const changes = await this.changeDetector.detectChanges(snapshot);
1411
- const { moves } = await this.moveDetector.detectMoves(changes, snapshot);
1412
- const summary = this.generateChangeSummary(changes, moves);
1413
- return { changes, moves, summary };
1414
- }
1415
- /**
1416
- * Generate human-readable summary of changes
1417
- */
1418
- generateChangeSummary(changes, moves) {
1419
- const localChanges = changes.filter(c => c.changeType === types_1.ChangeType.LOCAL_ONLY ||
1420
- c.changeType === types_1.ChangeType.BOTH_CHANGED).length;
1421
- const remoteChanges = changes.filter(c => c.changeType === types_1.ChangeType.REMOTE_ONLY ||
1422
- c.changeType === types_1.ChangeType.BOTH_CHANGED).length;
1423
- const conflicts = changes.filter(c => c.changeType === types_1.ChangeType.BOTH_CHANGED).length;
1424
- const parts = [];
1425
- if (localChanges > 0) {
1426
- parts.push(`${localChanges} local change${localChanges > 1 ? "s" : ""}`);
1427
- }
1428
- if (remoteChanges > 0) {
1429
- parts.push(`${remoteChanges} remote change${remoteChanges > 1 ? "s" : ""}`);
1430
- }
1431
- if (moves.length > 0) {
1432
- parts.push(`${moves.length} potential move${moves.length > 1 ? "s" : ""}`);
1433
- }
1434
- if (conflicts > 0) {
1435
- parts.push(`${conflicts} conflict${conflicts > 1 ? "s" : ""}`);
1436
- }
1437
- if (parts.length === 0) {
1438
- return "No changes detected";
1439
- }
1440
- return parts.join(", ");
1441
- }
1442
- /**
1443
- * Update the lastSyncAt timestamp on the root directory document
1444
- */
1445
- async touchRootDirectory(snapshot) {
1446
- if (!snapshot.rootDirectoryUrl) {
1447
- return;
1448
- }
1449
- try {
1450
- const rootHandle = await this.repo.find(snapshot.rootDirectoryUrl);
1451
- const snapshotEntry = snapshot.directories.get("");
1452
- const heads = snapshotEntry?.head;
1453
- const timestamp = Date.now();
1454
- const version = require("../../package.json").version;
1455
- changeWithOptionalHeads(rootHandle, heads, (doc) => {
1456
- doc.lastSyncAt = timestamp;
1457
- doc.with = `pushwork@${version}`;
1458
- });
1459
- // Track root directory for network sync
1460
- this.handlesByPath.set("", rootHandle);
1461
- if (snapshotEntry) {
1462
- snapshotEntry.head = rootHandle.heads();
1463
- }
1464
- }
1465
- catch (error) {
1466
- // Failed to update root directory timestamp
1467
- }
1468
- }
1469
- }
1470
- exports.SyncEngine = SyncEngine;
1471
- //# sourceMappingURL=sync-engine.js.map