pushwork 2.0.0-a.sub.0 → 2.0.0-preview

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (251) hide show
  1. package/dist/branches.d.ts +19 -0
  2. package/dist/branches.d.ts.map +1 -0
  3. package/dist/branches.js +111 -0
  4. package/dist/branches.js.map +1 -0
  5. package/dist/cli.d.ts +1 -1
  6. package/dist/cli.d.ts.map +1 -1
  7. package/dist/cli.js +238 -272
  8. package/dist/cli.js.map +1 -1
  9. package/dist/config.d.ts +17 -0
  10. package/dist/config.d.ts.map +1 -0
  11. package/dist/config.js +84 -0
  12. package/dist/config.js.map +1 -0
  13. package/dist/fs-tree.d.ts +6 -0
  14. package/dist/fs-tree.d.ts.map +1 -0
  15. package/dist/fs-tree.js +99 -0
  16. package/dist/fs-tree.js.map +1 -0
  17. package/dist/ignore.d.ts +6 -0
  18. package/dist/ignore.d.ts.map +1 -0
  19. package/dist/ignore.js +74 -0
  20. package/dist/ignore.js.map +1 -0
  21. package/dist/index.d.ts +8 -4
  22. package/dist/index.d.ts.map +1 -1
  23. package/dist/index.js +34 -4
  24. package/dist/index.js.map +1 -1
  25. package/dist/log.d.ts +3 -0
  26. package/dist/log.d.ts.map +1 -0
  27. package/dist/log.js +14 -0
  28. package/dist/log.js.map +1 -0
  29. package/dist/pushwork.d.ts +115 -0
  30. package/dist/pushwork.d.ts.map +1 -0
  31. package/dist/pushwork.js +918 -0
  32. package/dist/pushwork.js.map +1 -0
  33. package/dist/repo.d.ts +14 -0
  34. package/dist/repo.d.ts.map +1 -0
  35. package/dist/repo.js +60 -0
  36. package/dist/repo.js.map +1 -0
  37. package/dist/shapes/custom.d.ts +3 -0
  38. package/dist/shapes/custom.d.ts.map +1 -0
  39. package/dist/shapes/custom.js +57 -0
  40. package/dist/shapes/custom.js.map +1 -0
  41. package/dist/shapes/file.d.ts +20 -0
  42. package/dist/shapes/file.d.ts.map +1 -0
  43. package/dist/shapes/file.js +140 -0
  44. package/dist/shapes/file.js.map +1 -0
  45. package/dist/shapes/index.d.ts +10 -0
  46. package/dist/shapes/index.d.ts.map +1 -0
  47. package/dist/shapes/index.js +35 -0
  48. package/dist/shapes/index.js.map +1 -0
  49. package/dist/shapes/patchwork-folder.d.ts +3 -0
  50. package/dist/shapes/patchwork-folder.d.ts.map +1 -0
  51. package/dist/shapes/patchwork-folder.js +160 -0
  52. package/dist/shapes/patchwork-folder.js.map +1 -0
  53. package/dist/shapes/types.d.ts +37 -0
  54. package/dist/shapes/types.d.ts.map +1 -0
  55. package/dist/shapes/types.js +52 -0
  56. package/dist/shapes/types.js.map +1 -0
  57. package/dist/shapes/vfs.d.ts +3 -0
  58. package/dist/shapes/vfs.d.ts.map +1 -0
  59. package/dist/shapes/vfs.js +88 -0
  60. package/dist/shapes/vfs.js.map +1 -0
  61. package/dist/stash.d.ts +23 -0
  62. package/dist/stash.d.ts.map +1 -0
  63. package/dist/stash.js +118 -0
  64. package/dist/stash.js.map +1 -0
  65. package/flake.lock +128 -0
  66. package/flake.nix +66 -0
  67. package/package.json +15 -48
  68. package/patches/@automerge__automerge-repo@2.6.0-subduction.15.patch +26 -0
  69. package/pnpm-workspace.yaml +5 -0
  70. package/src/branches.ts +93 -0
  71. package/src/cli.ts +258 -408
  72. package/src/config.ts +64 -0
  73. package/src/fs-tree.ts +70 -0
  74. package/src/ignore.ts +33 -0
  75. package/src/index.ts +38 -4
  76. package/src/log.ts +8 -0
  77. package/src/pushwork.ts +1055 -0
  78. package/src/repo.ts +76 -0
  79. package/src/shapes/custom.ts +29 -0
  80. package/src/shapes/file.ts +115 -0
  81. package/src/shapes/index.ts +19 -0
  82. package/src/shapes/patchwork-folder.ts +156 -0
  83. package/src/shapes/types.ts +79 -0
  84. package/src/shapes/vfs.ts +93 -0
  85. package/src/stash.ts +106 -0
  86. package/test/integration/branches.test.ts +389 -0
  87. package/test/integration/pushwork.test.ts +547 -0
  88. package/test/setup.ts +29 -0
  89. package/test/unit/doc-shape.test.ts +612 -0
  90. package/tsconfig.json +2 -3
  91. package/vitest.config.ts +14 -0
  92. package/ARCHITECTURE-ACCORDING-TO-CLAUDE.md +0 -248
  93. package/CLAUDE.md +0 -141
  94. package/README.md +0 -221
  95. package/babel.config.js +0 -5
  96. package/dist/cli/commands.d.ts +0 -71
  97. package/dist/cli/commands.d.ts.map +0 -1
  98. package/dist/cli/commands.js +0 -794
  99. package/dist/cli/commands.js.map +0 -1
  100. package/dist/cli/index.d.ts +0 -2
  101. package/dist/cli/index.d.ts.map +0 -1
  102. package/dist/cli/index.js +0 -19
  103. package/dist/cli/index.js.map +0 -1
  104. package/dist/commands.d.ts +0 -61
  105. package/dist/commands.d.ts.map +0 -1
  106. package/dist/commands.js +0 -861
  107. package/dist/commands.js.map +0 -1
  108. package/dist/config/index.d.ts +0 -71
  109. package/dist/config/index.d.ts.map +0 -1
  110. package/dist/config/index.js +0 -314
  111. package/dist/config/index.js.map +0 -1
  112. package/dist/core/change-detection.d.ts +0 -80
  113. package/dist/core/change-detection.d.ts.map +0 -1
  114. package/dist/core/change-detection.js +0 -523
  115. package/dist/core/change-detection.js.map +0 -1
  116. package/dist/core/config.d.ts +0 -81
  117. package/dist/core/config.d.ts.map +0 -1
  118. package/dist/core/config.js +0 -258
  119. package/dist/core/config.js.map +0 -1
  120. package/dist/core/index.d.ts +0 -6
  121. package/dist/core/index.d.ts.map +0 -1
  122. package/dist/core/index.js +0 -6
  123. package/dist/core/index.js.map +0 -1
  124. package/dist/core/move-detection.d.ts +0 -34
  125. package/dist/core/move-detection.d.ts.map +0 -1
  126. package/dist/core/move-detection.js +0 -121
  127. package/dist/core/move-detection.js.map +0 -1
  128. package/dist/core/snapshot.d.ts +0 -105
  129. package/dist/core/snapshot.d.ts.map +0 -1
  130. package/dist/core/snapshot.js +0 -217
  131. package/dist/core/snapshot.js.map +0 -1
  132. package/dist/core/sync-engine.d.ts +0 -151
  133. package/dist/core/sync-engine.d.ts.map +0 -1
  134. package/dist/core/sync-engine.js +0 -1346
  135. package/dist/core/sync-engine.js.map +0 -1
  136. package/dist/types/config.d.ts +0 -99
  137. package/dist/types/config.d.ts.map +0 -1
  138. package/dist/types/config.js +0 -5
  139. package/dist/types/config.js.map +0 -1
  140. package/dist/types/documents.d.ts +0 -88
  141. package/dist/types/documents.d.ts.map +0 -1
  142. package/dist/types/documents.js +0 -20
  143. package/dist/types/documents.js.map +0 -1
  144. package/dist/types/index.d.ts +0 -4
  145. package/dist/types/index.d.ts.map +0 -1
  146. package/dist/types/index.js +0 -4
  147. package/dist/types/index.js.map +0 -1
  148. package/dist/types/snapshot.d.ts +0 -64
  149. package/dist/types/snapshot.d.ts.map +0 -1
  150. package/dist/types/snapshot.js +0 -2
  151. package/dist/types/snapshot.js.map +0 -1
  152. package/dist/utils/content-similarity.d.ts +0 -53
  153. package/dist/utils/content-similarity.d.ts.map +0 -1
  154. package/dist/utils/content-similarity.js +0 -155
  155. package/dist/utils/content-similarity.js.map +0 -1
  156. package/dist/utils/content.d.ts +0 -10
  157. package/dist/utils/content.d.ts.map +0 -1
  158. package/dist/utils/content.js +0 -31
  159. package/dist/utils/content.js.map +0 -1
  160. package/dist/utils/directory.d.ts +0 -24
  161. package/dist/utils/directory.d.ts.map +0 -1
  162. package/dist/utils/directory.js +0 -52
  163. package/dist/utils/directory.js.map +0 -1
  164. package/dist/utils/fs.d.ts +0 -74
  165. package/dist/utils/fs.d.ts.map +0 -1
  166. package/dist/utils/fs.js +0 -248
  167. package/dist/utils/fs.js.map +0 -1
  168. package/dist/utils/index.d.ts +0 -5
  169. package/dist/utils/index.d.ts.map +0 -1
  170. package/dist/utils/index.js +0 -5
  171. package/dist/utils/index.js.map +0 -1
  172. package/dist/utils/mime-types.d.ts +0 -13
  173. package/dist/utils/mime-types.d.ts.map +0 -1
  174. package/dist/utils/mime-types.js +0 -209
  175. package/dist/utils/mime-types.js.map +0 -1
  176. package/dist/utils/network-sync.d.ts +0 -36
  177. package/dist/utils/network-sync.d.ts.map +0 -1
  178. package/dist/utils/network-sync.js +0 -250
  179. package/dist/utils/network-sync.js.map +0 -1
  180. package/dist/utils/node-polyfills.d.ts +0 -9
  181. package/dist/utils/node-polyfills.d.ts.map +0 -1
  182. package/dist/utils/node-polyfills.js +0 -9
  183. package/dist/utils/node-polyfills.js.map +0 -1
  184. package/dist/utils/output.d.ts +0 -129
  185. package/dist/utils/output.d.ts.map +0 -1
  186. package/dist/utils/output.js +0 -368
  187. package/dist/utils/output.js.map +0 -1
  188. package/dist/utils/repo-factory.d.ts +0 -13
  189. package/dist/utils/repo-factory.d.ts.map +0 -1
  190. package/dist/utils/repo-factory.js +0 -46
  191. package/dist/utils/repo-factory.js.map +0 -1
  192. package/dist/utils/string-similarity.d.ts +0 -14
  193. package/dist/utils/string-similarity.d.ts.map +0 -1
  194. package/dist/utils/string-similarity.js +0 -39
  195. package/dist/utils/string-similarity.js.map +0 -1
  196. package/dist/utils/text-diff.d.ts +0 -37
  197. package/dist/utils/text-diff.d.ts.map +0 -1
  198. package/dist/utils/text-diff.js +0 -93
  199. package/dist/utils/text-diff.js.map +0 -1
  200. package/dist/utils/trace.d.ts +0 -19
  201. package/dist/utils/trace.d.ts.map +0 -1
  202. package/dist/utils/trace.js +0 -63
  203. package/dist/utils/trace.js.map +0 -1
  204. package/src/commands.ts +0 -1134
  205. package/src/core/change-detection.ts +0 -712
  206. package/src/core/config.ts +0 -313
  207. package/src/core/index.ts +0 -5
  208. package/src/core/move-detection.ts +0 -169
  209. package/src/core/snapshot.ts +0 -275
  210. package/src/core/sync-engine.ts +0 -1758
  211. package/src/types/config.ts +0 -111
  212. package/src/types/documents.ts +0 -91
  213. package/src/types/index.ts +0 -3
  214. package/src/types/snapshot.ts +0 -67
  215. package/src/utils/content.ts +0 -34
  216. package/src/utils/directory.ts +0 -73
  217. package/src/utils/fs.ts +0 -297
  218. package/src/utils/index.ts +0 -4
  219. package/src/utils/mime-types.ts +0 -244
  220. package/src/utils/network-sync.ts +0 -319
  221. package/src/utils/node-polyfills.ts +0 -8
  222. package/src/utils/output.ts +0 -450
  223. package/src/utils/repo-factory.ts +0 -73
  224. package/src/utils/string-similarity.ts +0 -54
  225. package/src/utils/text-diff.ts +0 -101
  226. package/src/utils/trace.ts +0 -70
  227. package/test/integration/README.md +0 -328
  228. package/test/integration/clone-test.sh +0 -310
  229. package/test/integration/conflict-resolution-test.sh +0 -309
  230. package/test/integration/debug-both-nested.sh +0 -74
  231. package/test/integration/debug-concurrent-nested.sh +0 -87
  232. package/test/integration/debug-nested.sh +0 -73
  233. package/test/integration/deletion-behavior-test.sh +0 -487
  234. package/test/integration/deletion-sync-test-simple.sh +0 -193
  235. package/test/integration/deletion-sync-test.sh +0 -297
  236. package/test/integration/exclude-patterns.test.ts +0 -144
  237. package/test/integration/full-integration-test.sh +0 -363
  238. package/test/integration/fuzzer.test.ts +0 -818
  239. package/test/integration/in-memory-sync.test.ts +0 -830
  240. package/test/integration/init-sync.test.ts +0 -89
  241. package/test/integration/manual-sync-test.sh +0 -84
  242. package/test/integration/sync-deletion.test.ts +0 -280
  243. package/test/integration/sync-flow.test.ts +0 -291
  244. package/test/jest.setup.ts +0 -34
  245. package/test/run-tests.sh +0 -225
  246. package/test/unit/deletion-behavior.test.ts +0 -249
  247. package/test/unit/enhanced-mime-detection.test.ts +0 -244
  248. package/test/unit/snapshot.test.ts +0 -404
  249. package/test/unit/sync-convergence.test.ts +0 -298
  250. package/test/unit/sync-timing.test.ts +0 -134
  251. package/test/unit/utils.test.ts +0 -366
@@ -1,1346 +0,0 @@
1
- import { parseAutomergeUrl, stringifyAutomergeUrl, } from "@automerge/automerge-repo";
2
- import * as A from "@automerge/automerge";
3
- import { ChangeType, FileType, } from "../types/index.js";
4
- import { writeFileContent, removePath, getFileExtension, getEnhancedMimeType, formatRelativePath, findFileInDirectoryHierarchy, joinAndNormalizePath, getPlainUrl, updateTextContent, readDocContent, } from "../utils/index.js";
5
- import { isContentEqual, contentHash } from "../utils/content.js";
6
- import { waitForSync, waitForBidirectionalSync } from "../utils/network-sync.js";
7
- import { SnapshotManager } from "./snapshot.js";
8
- import { ChangeDetector } from "./change-detection.js";
9
- import { MoveDetector } from "./move-detection.js";
10
- import { out } from "../utils/output.js";
11
- import * as path from "path";
12
- const isDebug = !!process.env.DEBUG;
13
- function debug(...args) {
14
- if (isDebug)
15
- console.error("[pushwork:engine]", ...args);
16
- }
17
- /**
18
- * Apply a change to a document handle, using changeAt when heads are available
19
- * to branch from a known version, otherwise falling back to change.
20
- */
21
- function changeWithOptionalHeads(handle, heads, callback) {
22
- if (heads && heads.length > 0) {
23
- handle.changeAt(heads, callback);
24
- }
25
- else {
26
- handle.change(callback);
27
- }
28
- }
29
- /**
30
- * Sync configuration constants
31
- */
32
- const BIDIRECTIONAL_SYNC_TIMEOUT_MS = 5000; // Timeout for bidirectional sync stability check
33
- /**
34
- * Bidirectional sync engine implementing two-phase sync
35
- */
36
- export class SyncEngine {
37
- constructor(repo, rootPath, config) {
38
- this.repo = repo;
39
- this.rootPath = rootPath;
40
- // Map from path to handle for leaf-first sync ordering
41
- // Path depth determines sync order (deepest first)
42
- this.handlesByPath = new Map();
43
- this.config = config;
44
- this.snapshotManager = new SnapshotManager(rootPath);
45
- this.changeDetector = new ChangeDetector(repo, rootPath, config.exclude_patterns, config.artifact_directories || []);
46
- this.moveDetector = new MoveDetector(config.sync.move_detection_threshold);
47
- }
48
- /**
49
- * Determine if content should be treated as text for Automerge text operations
50
- * Note: This method checks the runtime type. File type detection happens
51
- * during reading with isEnhancedTextFile() which now has better dev file support.
52
- */
53
- isTextContent(content) {
54
- // Simply check the actual type of the content
55
- return typeof content === "string";
56
- }
57
- /**
58
- * Get a versioned URL from a handle (includes current heads).
59
- * This ensures clients can fetch the exact version of the document.
60
- */
61
- getVersionedUrl(handle) {
62
- const { documentId } = parseAutomergeUrl(handle.url);
63
- const heads = handle.heads();
64
- return stringifyAutomergeUrl({ documentId, heads });
65
- }
66
- /**
67
- * Determine if a file path is inside an artifact directory.
68
- * Artifact files are stored as immutable strings (RawString) and
69
- * referenced with versioned URLs in directory entries.
70
- */
71
- isArtifactPath(filePath) {
72
- const artifactDirs = this.config.artifact_directories || [];
73
- return artifactDirs.some(dir => filePath === dir || filePath.startsWith(dir + "/"));
74
- }
75
- /**
76
- * Get the appropriate URL for a directory entry.
77
- * Artifact paths get versioned URLs (with heads) for exact version fetching.
78
- * Non-artifact paths get plain URLs for collaborative editing.
79
- */
80
- getEntryUrl(handle, filePath) {
81
- if (this.isArtifactPath(filePath)) {
82
- return this.getVersionedUrl(handle);
83
- }
84
- return getPlainUrl(handle.url);
85
- }
86
- /**
87
- * Set the root directory URL in the snapshot
88
- */
89
- async setRootDirectoryUrl(url) {
90
- let snapshot = await this.snapshotManager.load();
91
- if (!snapshot) {
92
- snapshot = this.snapshotManager.createEmpty();
93
- }
94
- snapshot.rootDirectoryUrl = url;
95
- await this.snapshotManager.save(snapshot);
96
- }
97
- /**
98
- * Reset the snapshot, clearing all tracked files and directories.
99
- * Preserves the rootDirectoryUrl so sync can still operate.
100
- * Used by --force to re-sync every file.
101
- */
102
- async resetSnapshot() {
103
- let snapshot = await this.snapshotManager.load();
104
- if (!snapshot)
105
- return;
106
- this.snapshotManager.clear(snapshot);
107
- await this.snapshotManager.save(snapshot);
108
- }
109
- /**
110
- * Nuclear reset: clear the snapshot AND wipe the root directory document's
111
- * entries so that every file and subdirectory gets brand-new Automerge
112
- * documents. The root directory document itself is preserved.
113
- */
114
- async nuclearReset() {
115
- let snapshot = await this.snapshotManager.load();
116
- if (!snapshot)
117
- return;
118
- // Clear the root directory document's entries
119
- if (snapshot.rootDirectoryUrl) {
120
- const rootHandle = await this.repo.find(getPlainUrl(snapshot.rootDirectoryUrl));
121
- rootHandle.change((doc) => {
122
- doc.docs.splice(0, doc.docs.length);
123
- });
124
- }
125
- // Clear all tracked files and directories from snapshot
126
- this.snapshotManager.clear(snapshot);
127
- await this.snapshotManager.save(snapshot);
128
- }
129
- /**
130
- * Commit local changes only (no network sync)
131
- */
132
- async commitLocal() {
133
- const result = {
134
- success: false,
135
- filesChanged: 0,
136
- directoriesChanged: 0,
137
- errors: [],
138
- warnings: [],
139
- };
140
- try {
141
- // Load current snapshot
142
- let snapshot = await this.snapshotManager.load();
143
- if (!snapshot) {
144
- snapshot = this.snapshotManager.createEmpty();
145
- }
146
- // Detect all changes
147
- const changes = await this.changeDetector.detectChanges(snapshot);
148
- // Detect moves
149
- const { moves, remainingChanges } = await this.moveDetector.detectMoves(changes, snapshot);
150
- // Apply local changes only (no network sync)
151
- const commitResult = await this.pushLocalChanges(remainingChanges, moves, snapshot);
152
- result.filesChanged += commitResult.filesChanged;
153
- result.directoriesChanged += commitResult.directoriesChanged;
154
- result.errors.push(...commitResult.errors);
155
- result.warnings.push(...commitResult.warnings);
156
- // Touch root directory if any changes were made
157
- const hasChanges = result.filesChanged > 0 || result.directoriesChanged > 0;
158
- if (hasChanges) {
159
- await this.touchRootDirectory(snapshot);
160
- }
161
- // Save updated snapshot
162
- await this.snapshotManager.save(snapshot);
163
- result.success = result.errors.length === 0;
164
- return result;
165
- }
166
- catch (error) {
167
- result.errors.push({
168
- path: this.rootPath,
169
- operation: "commitLocal",
170
- error: error instanceof Error ? error : new Error(String(error)),
171
- recoverable: true,
172
- });
173
- result.success = false;
174
- return result;
175
- }
176
- }
177
- /**
178
- * Recreate documents that failed to sync. Creates new Automerge documents
179
- * with the same content and updates all references (snapshot, parent directory).
180
- * Returns new handles that should be retried for sync.
181
- */
182
- async recreateFailedDocuments(failedHandles, snapshot) {
183
- const failedUrls = new Set(failedHandles.map(h => getPlainUrl(h.url)));
184
- const newHandles = [];
185
- // Find which paths correspond to the failed handles
186
- for (const [filePath, entry] of snapshot.files.entries()) {
187
- const plainUrl = getPlainUrl(entry.url);
188
- if (!failedUrls.has(plainUrl))
189
- continue;
190
- debug(`recreate: recreating document for ${filePath} (${plainUrl})`);
191
- out.taskLine(`Recreating document for ${filePath}`);
192
- try {
193
- // Read the current content from the old handle
194
- const oldHandle = await this.repo.find(plainUrl);
195
- const doc = await oldHandle.doc();
196
- if (!doc) {
197
- debug(`recreate: could not read doc for ${filePath}, skipping`);
198
- continue;
199
- }
200
- const content = readDocContent(doc.content);
201
- if (content === null) {
202
- debug(`recreate: null content for ${filePath}, skipping`);
203
- continue;
204
- }
205
- // Create a fresh document
206
- const fakeChange = {
207
- path: filePath,
208
- changeType: ChangeType.LOCAL_ONLY,
209
- fileType: this.isTextContent(content) ? FileType.TEXT : FileType.BINARY,
210
- localContent: content,
211
- remoteContent: null,
212
- };
213
- const newHandle = await this.createRemoteFile(fakeChange);
214
- if (!newHandle)
215
- continue;
216
- const entryUrl = this.getEntryUrl(newHandle, filePath);
217
- // Update snapshot entry
218
- this.snapshotManager.updateFileEntry(snapshot, filePath, {
219
- ...entry,
220
- url: entryUrl,
221
- head: newHandle.heads(),
222
- ...(this.isArtifactPath(filePath) ? { contentHash: contentHash(content) } : {}),
223
- });
224
- // Update parent directory entry to point to new document
225
- const pathParts = filePath.split("/");
226
- const fileName = pathParts.pop() || "";
227
- const dirPath = pathParts.join("/");
228
- let dirUrl;
229
- if (!dirPath || dirPath === "") {
230
- dirUrl = snapshot.rootDirectoryUrl;
231
- }
232
- else {
233
- const dirEntry = snapshot.directories.get(dirPath);
234
- if (!dirEntry)
235
- continue;
236
- dirUrl = dirEntry.url;
237
- }
238
- const dirHandle = await this.repo.find(getPlainUrl(dirUrl));
239
- dirHandle.change((d) => {
240
- const idx = d.docs.findIndex(e => e.name === fileName && e.type === "file");
241
- if (idx !== -1) {
242
- d.docs[idx].url = entryUrl;
243
- }
244
- });
245
- // Track new handles
246
- this.handlesByPath.set(filePath, newHandle);
247
- this.handlesByPath.set(dirPath, dirHandle);
248
- newHandles.push(newHandle);
249
- newHandles.push(dirHandle);
250
- debug(`recreate: created new doc for ${filePath} -> ${newHandle.url}`);
251
- }
252
- catch (error) {
253
- debug(`recreate: failed for ${filePath}: ${error}`);
254
- out.taskLine(`Failed to recreate ${filePath}: ${error}`, true);
255
- }
256
- }
257
- // Also check directory documents
258
- for (const [dirPath, entry] of snapshot.directories.entries()) {
259
- const plainUrl = getPlainUrl(entry.url);
260
- if (!failedUrls.has(plainUrl))
261
- continue;
262
- // Directory docs can't be easily recreated (they reference children).
263
- // Just log a warning — the child recreation above should handle most cases.
264
- debug(`recreate: directory ${dirPath || "(root)"} failed to sync, cannot recreate`);
265
- out.taskLine(`Warning: directory ${dirPath || "(root)"} failed to sync`, true);
266
- }
267
- return newHandles;
268
- }
269
- /**
270
- * Run full bidirectional sync
271
- */
272
- async sync() {
273
- const result = {
274
- success: false,
275
- filesChanged: 0,
276
- directoriesChanged: 0,
277
- errors: [],
278
- warnings: [],
279
- timings: {},
280
- };
281
- // Reset tracked handles for sync
282
- this.handlesByPath = new Map();
283
- try {
284
- // Load current snapshot
285
- const snapshot = (await this.snapshotManager.load()) ||
286
- this.snapshotManager.createEmpty();
287
- debug(`sync: rootDirectoryUrl=${snapshot.rootDirectoryUrl}, files=${snapshot.files.size}, dirs=${snapshot.directories.size}`);
288
- // Wait for initial sync to receive any pending remote changes
289
- if (this.config.sync_enabled && snapshot.rootDirectoryUrl) {
290
- debug("sync: waiting for root document to be ready");
291
- out.update("Waiting for root document from server");
292
- // Wait for the root document to be fetched from the network.
293
- // repo.find() rejects with "unavailable" if the server doesn't
294
- // have the document yet, so we retry with backoff.
295
- // This is critical for clone scenarios.
296
- const plainRootUrl = getPlainUrl(snapshot.rootDirectoryUrl);
297
- const maxAttempts = 6;
298
- for (let attempt = 1; attempt <= maxAttempts; attempt++) {
299
- try {
300
- const rootHandle = await this.repo.find(plainRootUrl);
301
- rootHandle.doc(); // throws if not ready
302
- debug(`sync: root document ready (attempt ${attempt})`);
303
- break;
304
- }
305
- catch (error) {
306
- const isUnavailable = String(error).includes("unavailable") || String(error).includes("not ready");
307
- if (isUnavailable && attempt < maxAttempts) {
308
- const delay = Math.min(1000 * Math.pow(2, attempt - 1), 10000);
309
- debug(`sync: root document not available (attempt ${attempt}/${maxAttempts}), retrying in ${delay}ms`);
310
- out.update(`Waiting for root document (attempt ${attempt}/${maxAttempts})`);
311
- await new Promise(r => setTimeout(r, delay));
312
- }
313
- else {
314
- debug(`sync: root document unavailable after ${maxAttempts} attempts: ${error}`);
315
- out.taskLine(`Root document unavailable: ${error}`, true);
316
- break;
317
- }
318
- }
319
- }
320
- debug("sync: waiting for initial bidirectional sync");
321
- out.update("Waiting for initial sync from server");
322
- try {
323
- await waitForBidirectionalSync(this.repo, snapshot.rootDirectoryUrl, {
324
- timeoutMs: 5000, // Increased timeout for initial sync
325
- pollIntervalMs: 100,
326
- stableChecksRequired: 3,
327
- });
328
- }
329
- catch (error) {
330
- out.taskLine(`Initial sync: ${error}`, true);
331
- }
332
- }
333
- // Detect all changes
334
- debug("sync: detecting changes");
335
- out.update("Detecting local and remote changes");
336
- // Capture pre-push snapshot file paths to detect deletions after push
337
- const prePushFilePaths = new Set(snapshot.files.keys());
338
- const changes = await this.changeDetector.detectChanges(snapshot);
339
- // Detect moves
340
- const { moves, remainingChanges } = await this.moveDetector.detectMoves(changes, snapshot);
341
- debug(`sync: detected ${changes.length} changes, ${moves.length} moves, ${remainingChanges.length} remaining`);
342
- // Phase 1: Push local changes to remote
343
- debug("sync: phase 1 - pushing local changes");
344
- const phase1Result = await this.pushLocalChanges(remainingChanges, moves, snapshot);
345
- result.filesChanged += phase1Result.filesChanged;
346
- result.directoriesChanged += phase1Result.directoriesChanged;
347
- result.errors.push(...phase1Result.errors);
348
- result.warnings.push(...phase1Result.warnings);
349
- debug(`sync: phase 1 complete - ${phase1Result.filesChanged} files, ${phase1Result.directoriesChanged} dirs changed`);
350
- // Wait for network sync (important for clone scenarios)
351
- if (this.config.sync_enabled) {
352
- try {
353
- // Ensure root directory handle is tracked for sync
354
- if (snapshot.rootDirectoryUrl) {
355
- const rootHandle = await this.repo.find(snapshot.rootDirectoryUrl);
356
- this.handlesByPath.set("", rootHandle);
357
- }
358
- // Single waitForSync with ALL tracked handles at once
359
- if (this.handlesByPath.size > 0) {
360
- const allHandles = Array.from(this.handlesByPath.values());
361
- const handlePaths = Array.from(this.handlesByPath.keys());
362
- debug(`sync: waiting for ${allHandles.length} handles to sync to server: ${handlePaths.slice(0, 10).map(p => p || "(root)").join(", ")}${handlePaths.length > 10 ? ` ...and ${handlePaths.length - 10} more` : ""}`);
363
- out.update(`Uploading ${allHandles.length} documents to sync server`);
364
- const { failed } = await waitForSync(allHandles);
365
- // Recreate failed documents and retry once
366
- if (failed.length > 0) {
367
- debug(`sync: ${failed.length} documents failed, recreating`);
368
- out.update(`Recreating ${failed.length} failed documents`);
369
- const retryHandles = await this.recreateFailedDocuments(failed, snapshot);
370
- if (retryHandles.length > 0) {
371
- debug(`sync: retrying ${retryHandles.length} recreated handles`);
372
- out.update(`Retrying ${retryHandles.length} recreated documents`);
373
- const retry = await waitForSync(retryHandles);
374
- if (retry.failed.length > 0) {
375
- const msg = `${retry.failed.length} documents failed to sync to server after recreation`;
376
- debug(`sync: ${msg}`);
377
- result.errors.push({
378
- path: "sync",
379
- operation: "upload",
380
- error: new Error(msg),
381
- recoverable: true,
382
- });
383
- }
384
- }
385
- }
386
- debug("sync: all handles synced to server");
387
- }
388
- // Wait for bidirectional sync to stabilize
389
- // Use tracked handles for post-push check (cheaper than full tree scan)
390
- const changedHandles = Array.from(this.handlesByPath.values());
391
- debug(`sync: waiting for bidirectional sync to stabilize (${changedHandles.length} tracked handles)`);
392
- out.update("Waiting for bidirectional sync to stabilize");
393
- await waitForBidirectionalSync(this.repo, snapshot.rootDirectoryUrl, {
394
- timeoutMs: BIDIRECTIONAL_SYNC_TIMEOUT_MS,
395
- pollIntervalMs: 100,
396
- stableChecksRequired: 3,
397
- handles: changedHandles.length > 0 ? changedHandles : undefined,
398
- });
399
- // Touch root directory AFTER all docs are synced and stable.
400
- // This signals consumers (e.g. Patchwork) that new content is
401
- // available. Because file docs are already on the server,
402
- // consumers can immediately fetch them when they see the root change.
403
- const hasPhase1Changes = phase1Result.filesChanged > 0 || phase1Result.directoriesChanged > 0;
404
- if (hasPhase1Changes && snapshot.rootDirectoryUrl) {
405
- await this.touchRootDirectory(snapshot);
406
- const rootHandle = await this.repo.find(snapshot.rootDirectoryUrl);
407
- debug("sync: syncing root directory touch to server");
408
- out.update("Syncing root directory update");
409
- await waitForSync([rootHandle]);
410
- }
411
- }
412
- catch (error) {
413
- debug(`sync: network sync error: ${error}`);
414
- out.taskLine(`Network sync failed: ${error}`, true);
415
- result.errors.push({
416
- path: "sync",
417
- operation: "network-sync",
418
- error: error instanceof Error ? error : new Error(String(error)),
419
- recoverable: true,
420
- });
421
- }
422
- }
423
- // Re-detect changes after network sync for fresh state
424
- // Compute paths deleted during push so they aren't resurrected during pull
425
- const deletedPaths = new Set();
426
- for (const p of prePushFilePaths) {
427
- if (!snapshot.files.has(p)) {
428
- deletedPaths.add(p);
429
- }
430
- }
431
- if (deletedPaths.size > 0) {
432
- debug(`sync: excluding ${deletedPaths.size} deleted paths from re-detection`);
433
- }
434
- debug("sync: re-detecting changes after network sync");
435
- const freshChanges = await this.changeDetector.detectChanges(snapshot, deletedPaths);
436
- const freshRemoteChanges = freshChanges.filter(c => c.changeType === ChangeType.REMOTE_ONLY ||
437
- c.changeType === ChangeType.BOTH_CHANGED);
438
- debug(`sync: phase 2 - pulling ${freshRemoteChanges.length} remote changes`);
439
- if (freshRemoteChanges.length > 0) {
440
- out.update(`Pulling ${freshRemoteChanges.length} remote changes`);
441
- }
442
- // Phase 2: Pull remote changes to local using fresh detection
443
- const phase2Result = await this.pullRemoteChanges(freshRemoteChanges, snapshot);
444
- result.filesChanged += phase2Result.filesChanged;
445
- result.directoriesChanged += phase2Result.directoriesChanged;
446
- result.errors.push(...phase2Result.errors);
447
- result.warnings.push(...phase2Result.warnings);
448
- // Update snapshot heads after pulling remote changes
449
- // IMPORTANT: Use getPlainUrl() to strip version/heads from URLs.
450
- // Artifact entries store versioned URLs (with heads baked in).
451
- // repo.find(versionedUrl) returns a view handle whose .heads()
452
- // returns the VERSION heads, not the current document heads.
453
- // Using the versioned URL here would overwrite correct heads with
454
- // stale ones, causing changeAt() to fork from the wrong point
455
- // on the next sync (e.g. an empty directory state where deletions
456
- // can't find the entries to splice out).
457
- for (const [filePath, snapshotEntry] of snapshot.files.entries()) {
458
- try {
459
- const handle = await this.repo.find(getPlainUrl(snapshotEntry.url));
460
- const currentHeads = handle.heads();
461
- if (!A.equals(currentHeads, snapshotEntry.head)) {
462
- // Update snapshot with current heads after pulling changes
463
- snapshot.files.set(filePath, {
464
- ...snapshotEntry,
465
- head: currentHeads,
466
- });
467
- }
468
- }
469
- catch (error) {
470
- // Handle might not exist if file was deleted
471
- }
472
- }
473
- // Update directory document heads
474
- for (const [dirPath, snapshotEntry] of snapshot.directories.entries()) {
475
- try {
476
- const handle = await this.repo.find(getPlainUrl(snapshotEntry.url));
477
- const currentHeads = handle.heads();
478
- if (!A.equals(currentHeads, snapshotEntry.head)) {
479
- // Update snapshot with current heads after pulling changes
480
- snapshot.directories.set(dirPath, {
481
- ...snapshotEntry,
482
- head: currentHeads,
483
- });
484
- }
485
- }
486
- catch (error) {
487
- // Handle might not exist if directory was deleted
488
- }
489
- }
490
- // Save updated snapshot if not dry run
491
- await this.snapshotManager.save(snapshot);
492
- result.success = result.errors.length === 0;
493
- return result;
494
- }
495
- catch (error) {
496
- result.errors.push({
497
- path: "sync",
498
- operation: "full-sync",
499
- error: error,
500
- recoverable: false,
501
- });
502
- return result;
503
- }
504
- }
505
- /**
506
- * Phase 1: Push local changes to Automerge documents.
507
- *
508
- * Works depth-first: processes the deepest files first, creates/updates all
509
- * file docs at each level, then batch-updates the parent directory document
510
- * in a single change. Propagates subdirectory URL updates as we walk up
511
- * toward the root. This eliminates the need for a separate URL update pass.
512
- */
513
- async pushLocalChanges(changes, moves, snapshot) {
514
- const result = {
515
- success: true,
516
- filesChanged: 0,
517
- directoriesChanged: 0,
518
- errors: [],
519
- warnings: [],
520
- };
521
- // Process moves first - all detected moves are applied
522
- if (moves.length > 0) {
523
- debug(`push: processing ${moves.length} moves`);
524
- out.update(`Processing ${moves.length} move${moves.length > 1 ? "s" : ""}`);
525
- }
526
- for (let i = 0; i < moves.length; i++) {
527
- const move = moves[i];
528
- try {
529
- debug(`push: move ${i + 1}/${moves.length}: ${move.fromPath} -> ${move.toPath}`);
530
- out.taskLine(`Moving ${move.fromPath} -> ${move.toPath}`);
531
- await this.applyMoveToRemote(move, snapshot);
532
- result.filesChanged++;
533
- }
534
- catch (error) {
535
- debug(`push: move failed for ${move.fromPath}: ${error}`);
536
- result.errors.push({
537
- path: move.fromPath,
538
- operation: "move",
539
- error: error,
540
- recoverable: true,
541
- });
542
- }
543
- }
544
- // Filter to local changes only
545
- const localChanges = changes.filter(c => c.changeType === ChangeType.LOCAL_ONLY ||
546
- c.changeType === ChangeType.BOTH_CHANGED);
547
- if (localChanges.length === 0) {
548
- debug("push: no local changes to push");
549
- return result;
550
- }
551
- const newFiles = localChanges.filter(c => !snapshot.files.has(c.path) && c.localContent !== null);
552
- const modifiedFiles = localChanges.filter(c => snapshot.files.has(c.path) && c.localContent !== null);
553
- const deletedFiles = localChanges.filter(c => c.localContent === null && snapshot.files.has(c.path));
554
- debug(`push: ${localChanges.length} local changes (${newFiles.length} new, ${modifiedFiles.length} modified, ${deletedFiles.length} deleted)`);
555
- out.update(`Pushing ${localChanges.length} local changes (${newFiles.length} new, ${modifiedFiles.length} modified, ${deletedFiles.length} deleted)`);
556
- // Group changes by parent directory path
557
- const changesByDir = new Map();
558
- for (const change of localChanges) {
559
- const pathParts = change.path.split("/");
560
- pathParts.pop(); // remove filename
561
- const dirPath = pathParts.join("/");
562
- if (!changesByDir.has(dirPath)) {
563
- changesByDir.set(dirPath, []);
564
- }
565
- changesByDir.get(dirPath).push(change);
566
- }
567
- // Collect all directory paths that need processing:
568
- // directories with file changes + all ancestors up to root
569
- const allDirsToProcess = new Set();
570
- for (const dirPath of changesByDir.keys()) {
571
- allDirsToProcess.add(dirPath);
572
- // Add ancestors so subdirectory URL updates propagate to root
573
- let current = dirPath;
574
- while (current) {
575
- const parts = current.split("/");
576
- parts.pop();
577
- current = parts.join("/");
578
- allDirsToProcess.add(current);
579
- }
580
- }
581
- // Sort deepest-first
582
- const sortedDirPaths = Array.from(allDirsToProcess).sort((a, b) => {
583
- const depthA = a ? a.split("/").length : 0;
584
- const depthB = b ? b.split("/").length : 0;
585
- return depthB - depthA;
586
- });
587
- debug(`push: processing ${sortedDirPaths.length} directories (deepest first)`);
588
- // Track which directories were modified (for subdirectory URL propagation)
589
- const modifiedDirs = new Set();
590
- let filesProcessed = 0;
591
- const totalFiles = localChanges.length;
592
- for (const dirPath of sortedDirPaths) {
593
- const dirChanges = changesByDir.get(dirPath) || [];
594
- const dirLabel = dirPath || "(root)";
595
- if (dirChanges.length > 0) {
596
- debug(`push: directory "${dirLabel}": ${dirChanges.length} file changes`);
597
- }
598
- // Ensure directory document exists
599
- if (snapshot.rootDirectoryUrl) {
600
- await this.ensureDirectoryDocument(snapshot, dirPath);
601
- }
602
- // Process all file changes in this directory
603
- const newEntries = [];
604
- const updatedEntries = [];
605
- const deletedNames = [];
606
- for (const change of dirChanges) {
607
- const fileName = change.path.split("/").pop() || "";
608
- const snapshotEntry = snapshot.files.get(change.path);
609
- filesProcessed++;
610
- try {
611
- if (change.localContent === null && snapshotEntry) {
612
- // Delete file
613
- debug(`push: [${filesProcessed}/${totalFiles}] delete ${change.path}`);
614
- out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] deleting ${change.path}`);
615
- await this.deleteRemoteFile(snapshotEntry.url, snapshot, change.path);
616
- deletedNames.push(fileName);
617
- this.snapshotManager.removeFileEntry(snapshot, change.path);
618
- result.filesChanged++;
619
- }
620
- else if (!snapshotEntry) {
621
- // New file
622
- debug(`push: [${filesProcessed}/${totalFiles}] create ${change.path} (${change.fileType})`);
623
- out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] creating ${change.path}`);
624
- const handle = await this.createRemoteFile(change);
625
- if (handle) {
626
- const entryUrl = this.getEntryUrl(handle, change.path);
627
- newEntries.push({ name: fileName, url: entryUrl });
628
- this.snapshotManager.updateFileEntry(snapshot, change.path, {
629
- path: joinAndNormalizePath(this.rootPath, change.path),
630
- url: entryUrl,
631
- head: handle.heads(),
632
- extension: getFileExtension(change.path),
633
- mimeType: getEnhancedMimeType(change.path),
634
- ...(this.isArtifactPath(change.path) && change.localContent
635
- ? { contentHash: contentHash(change.localContent) }
636
- : {}),
637
- });
638
- result.filesChanged++;
639
- debug(`push: created ${change.path} -> ${handle.url}`);
640
- }
641
- }
642
- else {
643
- // Update existing file
644
- const contentSize = typeof change.localContent === "string"
645
- ? `${change.localContent.length} chars`
646
- : `${change.localContent.length} bytes`;
647
- debug(`push: [${filesProcessed}/${totalFiles}] update ${change.path} (${contentSize})`);
648
- out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] updating ${change.path}`);
649
- await this.updateRemoteFile(snapshotEntry.url, change.localContent, snapshot, change.path);
650
- // Get current entry URL (updateRemoteFile updates snapshot)
651
- const updatedFileEntry = snapshot.files.get(change.path);
652
- if (updatedFileEntry) {
653
- const fileHandle = await this.repo.find(getPlainUrl(updatedFileEntry.url));
654
- updatedEntries.push({
655
- name: fileName,
656
- url: this.getEntryUrl(fileHandle, change.path),
657
- });
658
- }
659
- result.filesChanged++;
660
- }
661
- }
662
- catch (error) {
663
- debug(`push: error processing ${change.path}: ${error}`);
664
- out.taskLine(`Error pushing ${change.path}: ${error}`, true);
665
- result.errors.push({
666
- path: change.path,
667
- operation: "local-to-remote",
668
- error: error,
669
- recoverable: true,
670
- });
671
- }
672
- }
673
- // Collect subdirectory URL updates for child dirs already processed
674
- const subdirUpdates = [];
675
- for (const modifiedDir of modifiedDirs) {
676
- // Check if modifiedDir is a direct child of dirPath
677
- const parts = modifiedDir.split("/");
678
- const childName = parts.pop() || "";
679
- const parentOfModified = parts.join("/");
680
- if (parentOfModified === dirPath) {
681
- const dirEntry = snapshot.directories.get(modifiedDir);
682
- if (dirEntry) {
683
- const childHandle = await this.repo.find(getPlainUrl(dirEntry.url));
684
- subdirUpdates.push({
685
- name: childName,
686
- url: this.getEntryUrl(childHandle, modifiedDir),
687
- });
688
- }
689
- }
690
- }
691
- // Batch-update the directory document in a single change
692
- const hasChanges = newEntries.length > 0 ||
693
- updatedEntries.length > 0 ||
694
- deletedNames.length > 0 ||
695
- subdirUpdates.length > 0;
696
- if (hasChanges && snapshot.rootDirectoryUrl) {
697
- debug(`push: batch-updating directory "${dirLabel}" (+${newEntries.length} new, ~${updatedEntries.length} updated, -${deletedNames.length} deleted, ${subdirUpdates.length} subdir URL updates)`);
698
- await this.batchUpdateDirectory(snapshot, dirPath, newEntries, updatedEntries, deletedNames, subdirUpdates);
699
- modifiedDirs.add(dirPath);
700
- result.directoriesChanged++;
701
- }
702
- }
703
- debug(`push: complete - ${result.filesChanged} files, ${result.directoriesChanged} dirs changed, ${result.errors.length} errors`);
704
- return result;
705
- }
706
- /**
707
- * Phase 2: Pull remote changes to local filesystem
708
- */
709
- async pullRemoteChanges(changes, snapshot) {
710
- const result = {
711
- success: true,
712
- filesChanged: 0,
713
- directoriesChanged: 0,
714
- errors: [],
715
- warnings: [],
716
- };
717
- // Process remote changes
718
- const remoteChanges = changes.filter(c => c.changeType === ChangeType.REMOTE_ONLY ||
719
- c.changeType === ChangeType.BOTH_CHANGED);
720
- // Sort changes by dependency order (parents before children)
721
- const sortedChanges = this.sortChangesByDependency(remoteChanges);
722
- for (const change of sortedChanges) {
723
- try {
724
- await this.applyRemoteChangeToLocal(change, snapshot);
725
- result.filesChanged++;
726
- }
727
- catch (error) {
728
- result.errors.push({
729
- path: change.path,
730
- operation: "remote-to-local",
731
- error: error,
732
- recoverable: true,
733
- });
734
- }
735
- }
736
- return result;
737
- }
738
- /**
739
- * Apply remote change to local filesystem
740
- */
741
- async applyRemoteChangeToLocal(change, snapshot) {
742
- const localPath = joinAndNormalizePath(this.rootPath, change.path);
743
- if (!change.remoteHead) {
744
- throw new Error(`No remote head found for remote change to ${change.path}`);
745
- }
746
- // Check for null (empty string/Uint8Array are valid content)
747
- if (change.remoteContent === null) {
748
- // File was deleted remotely
749
- await removePath(localPath);
750
- this.snapshotManager.removeFileEntry(snapshot, change.path);
751
- return;
752
- }
753
- // Create or update local file
754
- await writeFileContent(localPath, change.remoteContent);
755
- // Update or create snapshot entry for this file
756
- const snapshotEntry = snapshot.files.get(change.path);
757
- if (snapshotEntry) {
758
- // Update existing entry
759
- snapshotEntry.head = change.remoteHead;
760
- // If the remote document was replaced (new URL), update the snapshot URL
761
- if (change.remoteUrl) {
762
- const fileHandle = await this.repo.find(change.remoteUrl);
763
- snapshotEntry.url = this.getEntryUrl(fileHandle, change.path);
764
- }
765
- }
766
- else {
767
- // Create new snapshot entry for newly discovered remote file
768
- // We need to find the remote file's URL from the directory hierarchy
769
- if (snapshot.rootDirectoryUrl) {
770
- try {
771
- const fileEntry = await findFileInDirectoryHierarchy(this.repo, snapshot.rootDirectoryUrl, change.path);
772
- if (fileEntry) {
773
- const fileHandle = await this.repo.find(fileEntry.url);
774
- const entryUrl = this.getEntryUrl(fileHandle, change.path);
775
- this.snapshotManager.updateFileEntry(snapshot, change.path, {
776
- path: localPath,
777
- url: entryUrl,
778
- head: change.remoteHead,
779
- extension: getFileExtension(change.path),
780
- mimeType: getEnhancedMimeType(change.path),
781
- });
782
- }
783
- }
784
- catch (error) {
785
- // Failed to update snapshot - file may have been deleted
786
- out.taskLine(`Warning: Failed to update snapshot for remote file ${change.path}`, true);
787
- }
788
- }
789
- }
790
- }
791
- /**
792
- * Apply move to remote documents
793
- */
794
- async applyMoveToRemote(move, snapshot) {
795
- const fromEntry = snapshot.files.get(move.fromPath);
796
- if (!fromEntry)
797
- return;
798
- // Parse paths
799
- const toParts = move.toPath.split("/");
800
- const toFileName = toParts.pop() || "";
801
- const toDirPath = toParts.join("/");
802
- // 1) Remove file entry from old directory document
803
- if (move.fromPath !== move.toPath) {
804
- await this.removeFileFromDirectory(snapshot, move.fromPath);
805
- }
806
- // 2) Ensure destination directory document exists
807
- await this.ensureDirectoryDocument(snapshot, toDirPath);
808
- // 3) Update the FileDocument name and content to match new location/state
809
- try {
810
- let entryUrl;
811
- let finalHeads;
812
- if (this.isArtifactPath(move.toPath)) {
813
- // Artifact files use RawString — no diffing needed, just create a fresh doc
814
- const content = move.newContent !== undefined
815
- ? move.newContent
816
- : readDocContent((await (await this.repo.find(getPlainUrl(fromEntry.url))).doc())?.content);
817
- const fakeChange = {
818
- path: move.toPath,
819
- changeType: ChangeType.LOCAL_ONLY,
820
- fileType: content != null && typeof content === "string" ? FileType.TEXT : FileType.BINARY,
821
- localContent: content,
822
- remoteContent: null,
823
- };
824
- const newHandle = await this.createRemoteFile(fakeChange);
825
- if (!newHandle)
826
- return;
827
- entryUrl = this.getEntryUrl(newHandle, move.toPath);
828
- finalHeads = newHandle.heads();
829
- }
830
- else {
831
- // Use plain URL for mutable handle
832
- const handle = await this.repo.find(getPlainUrl(fromEntry.url));
833
- const heads = fromEntry.head;
834
- // Update both name and content (if content changed during move)
835
- changeWithOptionalHeads(handle, heads, (doc) => {
836
- doc.name = toFileName;
837
- // If new content is provided, update it (handles move + modification case)
838
- if (move.newContent !== undefined) {
839
- if (typeof move.newContent === "string") {
840
- updateTextContent(doc, ["content"], move.newContent);
841
- }
842
- else {
843
- doc.content = move.newContent;
844
- }
845
- }
846
- });
847
- entryUrl = this.getEntryUrl(handle, move.toPath);
848
- finalHeads = handle.heads();
849
- // Track file handle for network sync
850
- this.handlesByPath.set(move.toPath, handle);
851
- }
852
- // 4) Add file entry to destination directory
853
- await this.addFileToDirectory(snapshot, move.toPath, entryUrl);
854
- // 5) Update snapshot entries
855
- this.snapshotManager.removeFileEntry(snapshot, move.fromPath);
856
- this.snapshotManager.updateFileEntry(snapshot, move.toPath, {
857
- ...fromEntry,
858
- path: joinAndNormalizePath(this.rootPath, move.toPath),
859
- url: entryUrl,
860
- head: finalHeads,
861
- ...(this.isArtifactPath(move.toPath) && move.newContent != null
862
- ? { contentHash: contentHash(move.newContent) }
863
- : {}),
864
- });
865
- }
866
- catch (e) {
867
- // Failed to update file name - file may have been deleted
868
- out.taskLine(`Warning: Failed to rename ${move.fromPath} to ${move.toPath}`, true);
869
- }
870
- }
871
- /**
872
- * Create new remote file document
873
- */
874
- async createRemoteFile(change) {
875
- if (change.localContent === null)
876
- return null;
877
- const isText = this.isTextContent(change.localContent);
878
- const isArtifact = this.isArtifactPath(change.path);
879
- // For artifact files, store text as RawString (immutable snapshot).
880
- // For regular files, store as collaborative text (empty string + splice).
881
- const fileDoc = {
882
- "@patchwork": { type: "file" },
883
- name: change.path.split("/").pop() || "",
884
- extension: getFileExtension(change.path),
885
- mimeType: getEnhancedMimeType(change.path),
886
- content: isText && isArtifact
887
- ? new A.RawString(change.localContent)
888
- : isText
889
- ? ""
890
- : change.localContent,
891
- metadata: {
892
- permissions: 0o644,
893
- },
894
- };
895
- const handle = this.repo.create(fileDoc);
896
- // For non-artifact text files, splice in the content so it's stored as collaborative text
897
- if (isText && !isArtifact && typeof change.localContent === "string") {
898
- handle.change((doc) => {
899
- updateTextContent(doc, ["content"], change.localContent);
900
- });
901
- }
902
- // Always track newly created files for network sync
903
- // (they always represent a change that needs to sync)
904
- this.handlesByPath.set(change.path, handle);
905
- return handle;
906
- }
907
- /**
908
- * Update existing remote file document
909
- */
910
- async updateRemoteFile(url, content, snapshot, filePath) {
911
- // Use plain URL for mutable handle
912
- const handle = await this.repo.find(getPlainUrl(url));
913
- // Check if content actually changed before tracking for sync
914
- const doc = await handle.doc();
915
- const rawContent = doc?.content;
916
- // For artifact paths, always replace with a new document containing RawString.
917
- // For non-artifact paths with immutable strings, replace with mutable text.
918
- // In both cases we create a new document and update the snapshot URL.
919
- const isArtifact = this.isArtifactPath(filePath);
920
- if (isArtifact ||
921
- !doc ||
922
- (rawContent != null && A.isImmutableString(rawContent))) {
923
- if (!isArtifact) {
924
- out.taskLine(`Replacing ${!doc ? 'unavailable' : 'immutable string'} document for ${filePath}`, true);
925
- }
926
- const fakeChange = {
927
- path: filePath,
928
- changeType: ChangeType.LOCAL_ONLY,
929
- fileType: this.isTextContent(content)
930
- ? FileType.TEXT
931
- : FileType.BINARY,
932
- localContent: content,
933
- remoteContent: null,
934
- };
935
- const newHandle = await this.createRemoteFile(fakeChange);
936
- if (newHandle) {
937
- const entryUrl = this.getEntryUrl(newHandle, filePath);
938
- this.snapshotManager.updateFileEntry(snapshot, filePath, {
939
- path: joinAndNormalizePath(this.rootPath, filePath),
940
- url: entryUrl,
941
- head: newHandle.heads(),
942
- extension: getFileExtension(filePath),
943
- mimeType: getEnhancedMimeType(filePath),
944
- ...(this.isArtifactPath(filePath)
945
- ? { contentHash: contentHash(content) }
946
- : {}),
947
- });
948
- }
949
- return;
950
- }
951
- const currentContent = readDocContent(rawContent);
952
- const contentChanged = !isContentEqual(content, currentContent);
953
- // Update snapshot heads even when content is identical
954
- const snapshotEntry = snapshot.files.get(filePath);
955
- if (snapshotEntry) {
956
- // Update snapshot with current document heads
957
- snapshot.files.set(filePath, {
958
- ...snapshotEntry,
959
- head: handle.heads(),
960
- });
961
- }
962
- if (!contentChanged) {
963
- // Content is identical, but we've updated the snapshot heads above
964
- // This prevents fresh change detection from seeing stale heads
965
- return;
966
- }
967
- const heads = snapshotEntry?.head;
968
- if (!heads) {
969
- throw new Error(`No heads found for ${url}`);
970
- }
971
- handle.changeAt(heads, (doc) => {
972
- if (typeof content === "string") {
973
- updateTextContent(doc, ["content"], content);
974
- }
975
- else {
976
- doc.content = content;
977
- }
978
- });
979
- // Update snapshot with new heads after content change
980
- if (snapshotEntry) {
981
- snapshot.files.set(filePath, {
982
- ...snapshotEntry,
983
- head: handle.heads(),
984
- });
985
- }
986
- // Only track files that actually changed content
987
- this.handlesByPath.set(filePath, handle);
988
- }
989
- /**
990
- * Delete remote file document
991
- */
992
- async deleteRemoteFile(_url, _snapshot, _filePath) {
993
- // In Automerge, we don't actually delete documents.
994
- // The file entry is removed from its parent directory, making the
995
- // document orphaned. Clearing content via splice is expensive for
996
- // large text files (every character is a CRDT op), so we skip it.
997
- }
998
- /**
999
- * Add file entry to appropriate directory document (maintains hierarchy)
1000
- */
1001
- async addFileToDirectory(snapshot, filePath, fileUrl) {
1002
- if (!snapshot.rootDirectoryUrl)
1003
- return;
1004
- const pathParts = filePath.split("/");
1005
- const fileName = pathParts.pop() || "";
1006
- const directoryPath = pathParts.join("/");
1007
- // Get or create the parent directory document
1008
- const parentDirUrl = await this.ensureDirectoryDocument(snapshot, directoryPath);
1009
- // Use plain URL for mutable handle
1010
- const dirHandle = await this.repo.find(getPlainUrl(parentDirUrl));
1011
- let didChange = false;
1012
- const snapshotEntry = snapshot.directories.get(directoryPath);
1013
- const heads = snapshotEntry?.head;
1014
- changeWithOptionalHeads(dirHandle, heads, (doc) => {
1015
- const existingIndex = doc.docs.findIndex(entry => entry.name === fileName && entry.type === "file");
1016
- if (existingIndex === -1) {
1017
- doc.docs.push({
1018
- name: fileName,
1019
- type: "file",
1020
- url: fileUrl,
1021
- });
1022
- didChange = true;
1023
- }
1024
- });
1025
- // Always track the directory (even if unchanged) for proper leaf-first sync ordering
1026
- this.handlesByPath.set(directoryPath, dirHandle);
1027
- if (didChange && snapshotEntry) {
1028
- snapshotEntry.head = dirHandle.heads();
1029
- }
1030
- }
1031
- /**
1032
- * Ensure directory document exists for the given path, creating hierarchy as needed
1033
- * First checks for existing shared directories before creating new ones
1034
- */
1035
- async ensureDirectoryDocument(snapshot, directoryPath) {
1036
- // Root directory case
1037
- if (!directoryPath || directoryPath === "") {
1038
- return snapshot.rootDirectoryUrl;
1039
- }
1040
- // Check if we already have this directory in snapshot
1041
- const existingDir = snapshot.directories.get(directoryPath);
1042
- if (existingDir) {
1043
- return existingDir.url;
1044
- }
1045
- // Split path into parent and current directory name
1046
- const pathParts = directoryPath.split("/");
1047
- const currentDirName = pathParts.pop() || "";
1048
- const parentPath = pathParts.join("/");
1049
- // Ensure parent directory exists first (recursive)
1050
- const parentDirUrl = await this.ensureDirectoryDocument(snapshot, parentPath);
1051
- // DISCOVERY: Check if directory already exists in parent on server
1052
- try {
1053
- const parentHandle = await this.repo.find(parentDirUrl);
1054
- const parentDoc = await parentHandle.doc();
1055
- if (parentDoc) {
1056
- const existingDirEntry = parentDoc.docs.find((entry) => entry.name === currentDirName && entry.type === "folder");
1057
- if (existingDirEntry) {
1058
- // Resolve the actual directory handle and use its current heads
1059
- // Directory entries in parent docs may not carry valid heads
1060
- try {
1061
- const childDirHandle = await this.repo.find(existingDirEntry.url);
1062
- // Track discovered directory for sync
1063
- this.handlesByPath.set(directoryPath, childDirHandle);
1064
- // Get appropriate URL for directory entry
1065
- const entryUrl = this.getEntryUrl(childDirHandle, directoryPath);
1066
- // Update snapshot with discovered directory
1067
- this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
1068
- path: joinAndNormalizePath(this.rootPath, directoryPath),
1069
- url: entryUrl,
1070
- head: childDirHandle.heads(),
1071
- entries: [],
1072
- });
1073
- return entryUrl;
1074
- }
1075
- catch (resolveErr) {
1076
- // Failed to resolve directory - fall through to create a fresh directory document
1077
- }
1078
- }
1079
- }
1080
- }
1081
- catch (error) {
1082
- // Failed to check for existing directory - will create new one
1083
- }
1084
- // CREATE: Directory doesn't exist, create new one
1085
- const dirDoc = {
1086
- "@patchwork": { type: "folder" },
1087
- name: currentDirName,
1088
- title: currentDirName,
1089
- docs: [],
1090
- };
1091
- const dirHandle = this.repo.create(dirDoc);
1092
- // Get appropriate URL for directory entry
1093
- const dirEntryUrl = this.getEntryUrl(dirHandle, directoryPath);
1094
- // Add this directory to its parent
1095
- // Use plain URL for mutable handle
1096
- const parentHandle = await this.repo.find(getPlainUrl(parentDirUrl));
1097
- let didChange = false;
1098
- parentHandle.change((doc) => {
1099
- // Double-check that entry doesn't exist (race condition protection)
1100
- const existingIndex = doc.docs.findIndex((entry) => entry.name === currentDirName && entry.type === "folder");
1101
- if (existingIndex === -1) {
1102
- doc.docs.push({
1103
- name: currentDirName,
1104
- type: "folder",
1105
- url: dirEntryUrl,
1106
- });
1107
- didChange = true;
1108
- }
1109
- });
1110
- // Track directory handles for sync
1111
- this.handlesByPath.set(directoryPath, dirHandle);
1112
- if (didChange) {
1113
- this.handlesByPath.set(parentPath, parentHandle);
1114
- const parentSnapshotEntry = snapshot.directories.get(parentPath);
1115
- if (parentSnapshotEntry) {
1116
- parentSnapshotEntry.head = parentHandle.heads();
1117
- }
1118
- }
1119
- // Update snapshot with new directory
1120
- this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
1121
- path: joinAndNormalizePath(this.rootPath, directoryPath),
1122
- url: dirEntryUrl,
1123
- head: dirHandle.heads(),
1124
- entries: [],
1125
- });
1126
- return dirEntryUrl;
1127
- }
1128
- /**
1129
- * Remove file entry from directory document
1130
- */
1131
- async removeFileFromDirectory(snapshot, filePath) {
1132
- if (!snapshot.rootDirectoryUrl)
1133
- return;
1134
- const pathParts = filePath.split("/");
1135
- const fileName = pathParts.pop() || "";
1136
- const directoryPath = pathParts.join("/");
1137
- // Get the parent directory URL
1138
- let parentDirUrl;
1139
- if (!directoryPath || directoryPath === "") {
1140
- parentDirUrl = snapshot.rootDirectoryUrl;
1141
- }
1142
- else {
1143
- const existingDir = snapshot.directories.get(directoryPath);
1144
- if (!existingDir) {
1145
- // Directory not found - file may already be removed
1146
- return;
1147
- }
1148
- parentDirUrl = existingDir.url;
1149
- }
1150
- try {
1151
- // Use plain URL for mutable handle
1152
- const dirHandle = await this.repo.find(getPlainUrl(parentDirUrl));
1153
- // Track this handle for network sync waiting
1154
- this.handlesByPath.set(directoryPath, dirHandle);
1155
- const snapshotEntry = snapshot.directories.get(directoryPath);
1156
- const heads = snapshotEntry?.head;
1157
- let didChange = false;
1158
- changeWithOptionalHeads(dirHandle, heads, (doc) => {
1159
- const indexToRemove = doc.docs.findIndex(entry => entry.name === fileName && entry.type === "file");
1160
- if (indexToRemove !== -1) {
1161
- doc.docs.splice(indexToRemove, 1);
1162
- didChange = true;
1163
- out.taskLine(`Removed ${fileName} from ${formatRelativePath(directoryPath) || "root"}`);
1164
- }
1165
- });
1166
- if (didChange && snapshotEntry) {
1167
- snapshotEntry.head = dirHandle.heads();
1168
- }
1169
- }
1170
- catch (error) {
1171
- throw error;
1172
- }
1173
- }
1174
- /**
1175
- * Batch-update a directory document in a single change: add new file entries,
1176
- * update URLs for modified files, remove deleted entries, and update
1177
- * subdirectory URLs. This replaces the separate per-file directory mutations
1178
- * and the post-hoc URL update pass.
1179
- */
1180
- async batchUpdateDirectory(snapshot, dirPath, newEntries, updatedEntries, deletedNames, subdirUpdates) {
1181
- let dirUrl;
1182
- if (!dirPath || dirPath === "") {
1183
- dirUrl = snapshot.rootDirectoryUrl;
1184
- }
1185
- else {
1186
- const dirEntry = snapshot.directories.get(dirPath);
1187
- if (!dirEntry)
1188
- return;
1189
- dirUrl = dirEntry.url;
1190
- }
1191
- const dirHandle = await this.repo.find(getPlainUrl(dirUrl));
1192
- const snapshotEntry = snapshot.directories.get(dirPath);
1193
- const heads = snapshotEntry?.head;
1194
- // Determine directory name
1195
- const dirName = dirPath ? dirPath.split("/").pop() || "" : path.basename(this.rootPath);
1196
- changeWithOptionalHeads(dirHandle, heads, (doc) => {
1197
- // Ensure name and title fields are set
1198
- if (!doc.name)
1199
- doc.name = dirName;
1200
- if (!doc.title)
1201
- doc.title = dirName;
1202
- // Remove deleted file entries
1203
- for (const name of deletedNames) {
1204
- const idx = doc.docs.findIndex(entry => entry.name === name && entry.type === "file");
1205
- if (idx !== -1) {
1206
- doc.docs.splice(idx, 1);
1207
- out.taskLine(`Removed ${name} from ${formatRelativePath(dirPath) || "root"}`);
1208
- }
1209
- }
1210
- // Update URLs for modified files
1211
- for (const { name, url } of updatedEntries) {
1212
- const idx = doc.docs.findIndex(entry => entry.name === name && entry.type === "file");
1213
- if (idx !== -1) {
1214
- doc.docs[idx].url = url;
1215
- }
1216
- }
1217
- // Add new file entries
1218
- for (const { name, url } of newEntries) {
1219
- const existing = doc.docs.findIndex(entry => entry.name === name && entry.type === "file");
1220
- if (existing === -1) {
1221
- doc.docs.push({ name, type: "file", url });
1222
- }
1223
- else {
1224
- // Entry already exists (e.g. from immutable string replacement)
1225
- doc.docs[existing].url = url;
1226
- }
1227
- }
1228
- // Update subdirectory URLs with current heads
1229
- for (const { name, url } of subdirUpdates) {
1230
- const idx = doc.docs.findIndex(entry => entry.name === name && entry.type === "folder");
1231
- if (idx !== -1) {
1232
- doc.docs[idx].url = url;
1233
- }
1234
- }
1235
- });
1236
- // Track directory handle and update snapshot heads
1237
- this.handlesByPath.set(dirPath, dirHandle);
1238
- if (snapshotEntry) {
1239
- snapshotEntry.head = dirHandle.heads();
1240
- }
1241
- }
1242
- /**
1243
- * Sort changes by dependency order
1244
- */
1245
- sortChangesByDependency(changes) {
1246
- // Sort by path depth (shallower paths first)
1247
- return changes.sort((a, b) => {
1248
- const depthA = a.path.split("/").length;
1249
- const depthB = b.path.split("/").length;
1250
- return depthA - depthB;
1251
- });
1252
- }
1253
- /**
1254
- * Get sync status
1255
- */
1256
- async getStatus() {
1257
- const snapshot = await this.snapshotManager.load();
1258
- if (!snapshot) {
1259
- return {
1260
- snapshot: null,
1261
- hasChanges: false,
1262
- changeCount: 0,
1263
- lastSync: null,
1264
- };
1265
- }
1266
- const changes = await this.changeDetector.detectChanges(snapshot);
1267
- return {
1268
- snapshot,
1269
- hasChanges: changes.length > 0,
1270
- changeCount: changes.length,
1271
- lastSync: new Date(snapshot.timestamp),
1272
- };
1273
- }
1274
- /**
1275
- * Preview changes without applying them
1276
- */
1277
- async previewChanges() {
1278
- const snapshot = await this.snapshotManager.load();
1279
- if (!snapshot) {
1280
- return {
1281
- changes: [],
1282
- moves: [],
1283
- summary: "No snapshot found - run init first",
1284
- };
1285
- }
1286
- const changes = await this.changeDetector.detectChanges(snapshot);
1287
- const { moves } = await this.moveDetector.detectMoves(changes, snapshot);
1288
- const summary = this.generateChangeSummary(changes, moves);
1289
- return { changes, moves, summary };
1290
- }
1291
- /**
1292
- * Generate human-readable summary of changes
1293
- */
1294
- generateChangeSummary(changes, moves) {
1295
- const localChanges = changes.filter(c => c.changeType === ChangeType.LOCAL_ONLY ||
1296
- c.changeType === ChangeType.BOTH_CHANGED).length;
1297
- const remoteChanges = changes.filter(c => c.changeType === ChangeType.REMOTE_ONLY ||
1298
- c.changeType === ChangeType.BOTH_CHANGED).length;
1299
- const conflicts = changes.filter(c => c.changeType === ChangeType.BOTH_CHANGED).length;
1300
- const parts = [];
1301
- if (localChanges > 0) {
1302
- parts.push(`${localChanges} local change${localChanges > 1 ? "s" : ""}`);
1303
- }
1304
- if (remoteChanges > 0) {
1305
- parts.push(`${remoteChanges} remote change${remoteChanges > 1 ? "s" : ""}`);
1306
- }
1307
- if (moves.length > 0) {
1308
- parts.push(`${moves.length} potential move${moves.length > 1 ? "s" : ""}`);
1309
- }
1310
- if (conflicts > 0) {
1311
- parts.push(`${conflicts} conflict${conflicts > 1 ? "s" : ""}`);
1312
- }
1313
- if (parts.length === 0) {
1314
- return "No changes detected";
1315
- }
1316
- return parts.join(", ");
1317
- }
1318
- /**
1319
- * Update the lastSyncAt timestamp on the root directory document
1320
- */
1321
- async touchRootDirectory(snapshot) {
1322
- if (!snapshot.rootDirectoryUrl) {
1323
- return;
1324
- }
1325
- try {
1326
- const rootHandle = await this.repo.find(snapshot.rootDirectoryUrl);
1327
- const snapshotEntry = snapshot.directories.get("");
1328
- const heads = snapshotEntry?.head;
1329
- const timestamp = Date.now();
1330
- const version = require("../../package.json").version;
1331
- changeWithOptionalHeads(rootHandle, heads, (doc) => {
1332
- doc.lastSyncAt = timestamp;
1333
- doc.with = `pushwork@${version}`;
1334
- });
1335
- // Track root directory for network sync
1336
- this.handlesByPath.set("", rootHandle);
1337
- if (snapshotEntry) {
1338
- snapshotEntry.head = rootHandle.heads();
1339
- }
1340
- }
1341
- catch (error) {
1342
- // Failed to update root directory timestamp
1343
- }
1344
- }
1345
- }
1346
- //# sourceMappingURL=sync-engine.js.map