pushwork 2.0.0-a.sub.1 → 2.0.0-preview

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (251) hide show
  1. package/dist/branches.d.ts +19 -0
  2. package/dist/branches.d.ts.map +1 -0
  3. package/dist/branches.js +111 -0
  4. package/dist/branches.js.map +1 -0
  5. package/dist/cli.d.ts +1 -1
  6. package/dist/cli.d.ts.map +1 -1
  7. package/dist/cli.js +238 -272
  8. package/dist/cli.js.map +1 -1
  9. package/dist/config.d.ts +17 -0
  10. package/dist/config.d.ts.map +1 -0
  11. package/dist/config.js +84 -0
  12. package/dist/config.js.map +1 -0
  13. package/dist/fs-tree.d.ts +6 -0
  14. package/dist/fs-tree.d.ts.map +1 -0
  15. package/dist/fs-tree.js +99 -0
  16. package/dist/fs-tree.js.map +1 -0
  17. package/dist/ignore.d.ts +6 -0
  18. package/dist/ignore.d.ts.map +1 -0
  19. package/dist/ignore.js +74 -0
  20. package/dist/ignore.js.map +1 -0
  21. package/dist/index.d.ts +8 -4
  22. package/dist/index.d.ts.map +1 -1
  23. package/dist/index.js +34 -4
  24. package/dist/index.js.map +1 -1
  25. package/dist/log.d.ts +3 -0
  26. package/dist/log.d.ts.map +1 -0
  27. package/dist/log.js +14 -0
  28. package/dist/log.js.map +1 -0
  29. package/dist/pushwork.d.ts +115 -0
  30. package/dist/pushwork.d.ts.map +1 -0
  31. package/dist/pushwork.js +918 -0
  32. package/dist/pushwork.js.map +1 -0
  33. package/dist/repo.d.ts +14 -0
  34. package/dist/repo.d.ts.map +1 -0
  35. package/dist/repo.js +60 -0
  36. package/dist/repo.js.map +1 -0
  37. package/dist/shapes/custom.d.ts +3 -0
  38. package/dist/shapes/custom.d.ts.map +1 -0
  39. package/dist/shapes/custom.js +57 -0
  40. package/dist/shapes/custom.js.map +1 -0
  41. package/dist/shapes/file.d.ts +20 -0
  42. package/dist/shapes/file.d.ts.map +1 -0
  43. package/dist/shapes/file.js +140 -0
  44. package/dist/shapes/file.js.map +1 -0
  45. package/dist/shapes/index.d.ts +10 -0
  46. package/dist/shapes/index.d.ts.map +1 -0
  47. package/dist/shapes/index.js +35 -0
  48. package/dist/shapes/index.js.map +1 -0
  49. package/dist/shapes/patchwork-folder.d.ts +3 -0
  50. package/dist/shapes/patchwork-folder.d.ts.map +1 -0
  51. package/dist/shapes/patchwork-folder.js +160 -0
  52. package/dist/shapes/patchwork-folder.js.map +1 -0
  53. package/dist/shapes/types.d.ts +37 -0
  54. package/dist/shapes/types.d.ts.map +1 -0
  55. package/dist/shapes/types.js +52 -0
  56. package/dist/shapes/types.js.map +1 -0
  57. package/dist/shapes/vfs.d.ts +3 -0
  58. package/dist/shapes/vfs.d.ts.map +1 -0
  59. package/dist/shapes/vfs.js +88 -0
  60. package/dist/shapes/vfs.js.map +1 -0
  61. package/dist/stash.d.ts +23 -0
  62. package/dist/stash.d.ts.map +1 -0
  63. package/dist/stash.js +118 -0
  64. package/dist/stash.js.map +1 -0
  65. package/flake.lock +128 -0
  66. package/flake.nix +66 -0
  67. package/package.json +15 -48
  68. package/patches/@automerge__automerge-repo@2.6.0-subduction.15.patch +26 -0
  69. package/pnpm-workspace.yaml +5 -0
  70. package/src/branches.ts +93 -0
  71. package/src/cli.ts +258 -408
  72. package/src/config.ts +64 -0
  73. package/src/fs-tree.ts +70 -0
  74. package/src/ignore.ts +33 -0
  75. package/src/index.ts +38 -4
  76. package/src/log.ts +8 -0
  77. package/src/pushwork.ts +1055 -0
  78. package/src/repo.ts +76 -0
  79. package/src/shapes/custom.ts +29 -0
  80. package/src/shapes/file.ts +115 -0
  81. package/src/shapes/index.ts +19 -0
  82. package/src/shapes/patchwork-folder.ts +156 -0
  83. package/src/shapes/types.ts +79 -0
  84. package/src/shapes/vfs.ts +93 -0
  85. package/src/stash.ts +106 -0
  86. package/test/integration/branches.test.ts +389 -0
  87. package/test/integration/pushwork.test.ts +547 -0
  88. package/test/setup.ts +29 -0
  89. package/test/unit/doc-shape.test.ts +612 -0
  90. package/tsconfig.json +2 -3
  91. package/vitest.config.ts +14 -0
  92. package/ARCHITECTURE-ACCORDING-TO-CLAUDE.md +0 -248
  93. package/CLAUDE.md +0 -141
  94. package/README.md +0 -221
  95. package/babel.config.js +0 -5
  96. package/dist/cli/commands.d.ts +0 -71
  97. package/dist/cli/commands.d.ts.map +0 -1
  98. package/dist/cli/commands.js +0 -794
  99. package/dist/cli/commands.js.map +0 -1
  100. package/dist/cli/index.d.ts +0 -2
  101. package/dist/cli/index.d.ts.map +0 -1
  102. package/dist/cli/index.js +0 -19
  103. package/dist/cli/index.js.map +0 -1
  104. package/dist/commands.d.ts +0 -61
  105. package/dist/commands.d.ts.map +0 -1
  106. package/dist/commands.js +0 -861
  107. package/dist/commands.js.map +0 -1
  108. package/dist/config/index.d.ts +0 -71
  109. package/dist/config/index.d.ts.map +0 -1
  110. package/dist/config/index.js +0 -314
  111. package/dist/config/index.js.map +0 -1
  112. package/dist/core/change-detection.d.ts +0 -80
  113. package/dist/core/change-detection.d.ts.map +0 -1
  114. package/dist/core/change-detection.js +0 -523
  115. package/dist/core/change-detection.js.map +0 -1
  116. package/dist/core/config.d.ts +0 -81
  117. package/dist/core/config.d.ts.map +0 -1
  118. package/dist/core/config.js +0 -258
  119. package/dist/core/config.js.map +0 -1
  120. package/dist/core/index.d.ts +0 -6
  121. package/dist/core/index.d.ts.map +0 -1
  122. package/dist/core/index.js +0 -6
  123. package/dist/core/index.js.map +0 -1
  124. package/dist/core/move-detection.d.ts +0 -34
  125. package/dist/core/move-detection.d.ts.map +0 -1
  126. package/dist/core/move-detection.js +0 -121
  127. package/dist/core/move-detection.js.map +0 -1
  128. package/dist/core/snapshot.d.ts +0 -105
  129. package/dist/core/snapshot.d.ts.map +0 -1
  130. package/dist/core/snapshot.js +0 -217
  131. package/dist/core/snapshot.js.map +0 -1
  132. package/dist/core/sync-engine.d.ts +0 -157
  133. package/dist/core/sync-engine.d.ts.map +0 -1
  134. package/dist/core/sync-engine.js +0 -1379
  135. package/dist/core/sync-engine.js.map +0 -1
  136. package/dist/types/config.d.ts +0 -99
  137. package/dist/types/config.d.ts.map +0 -1
  138. package/dist/types/config.js +0 -5
  139. package/dist/types/config.js.map +0 -1
  140. package/dist/types/documents.d.ts +0 -88
  141. package/dist/types/documents.d.ts.map +0 -1
  142. package/dist/types/documents.js +0 -20
  143. package/dist/types/documents.js.map +0 -1
  144. package/dist/types/index.d.ts +0 -4
  145. package/dist/types/index.d.ts.map +0 -1
  146. package/dist/types/index.js +0 -4
  147. package/dist/types/index.js.map +0 -1
  148. package/dist/types/snapshot.d.ts +0 -64
  149. package/dist/types/snapshot.d.ts.map +0 -1
  150. package/dist/types/snapshot.js +0 -2
  151. package/dist/types/snapshot.js.map +0 -1
  152. package/dist/utils/content-similarity.d.ts +0 -53
  153. package/dist/utils/content-similarity.d.ts.map +0 -1
  154. package/dist/utils/content-similarity.js +0 -155
  155. package/dist/utils/content-similarity.js.map +0 -1
  156. package/dist/utils/content.d.ts +0 -10
  157. package/dist/utils/content.d.ts.map +0 -1
  158. package/dist/utils/content.js +0 -31
  159. package/dist/utils/content.js.map +0 -1
  160. package/dist/utils/directory.d.ts +0 -24
  161. package/dist/utils/directory.d.ts.map +0 -1
  162. package/dist/utils/directory.js +0 -52
  163. package/dist/utils/directory.js.map +0 -1
  164. package/dist/utils/fs.d.ts +0 -74
  165. package/dist/utils/fs.d.ts.map +0 -1
  166. package/dist/utils/fs.js +0 -248
  167. package/dist/utils/fs.js.map +0 -1
  168. package/dist/utils/index.d.ts +0 -5
  169. package/dist/utils/index.d.ts.map +0 -1
  170. package/dist/utils/index.js +0 -5
  171. package/dist/utils/index.js.map +0 -1
  172. package/dist/utils/mime-types.d.ts +0 -13
  173. package/dist/utils/mime-types.d.ts.map +0 -1
  174. package/dist/utils/mime-types.js +0 -209
  175. package/dist/utils/mime-types.js.map +0 -1
  176. package/dist/utils/network-sync.d.ts +0 -36
  177. package/dist/utils/network-sync.d.ts.map +0 -1
  178. package/dist/utils/network-sync.js +0 -250
  179. package/dist/utils/network-sync.js.map +0 -1
  180. package/dist/utils/node-polyfills.d.ts +0 -9
  181. package/dist/utils/node-polyfills.d.ts.map +0 -1
  182. package/dist/utils/node-polyfills.js +0 -9
  183. package/dist/utils/node-polyfills.js.map +0 -1
  184. package/dist/utils/output.d.ts +0 -129
  185. package/dist/utils/output.d.ts.map +0 -1
  186. package/dist/utils/output.js +0 -368
  187. package/dist/utils/output.js.map +0 -1
  188. package/dist/utils/repo-factory.d.ts +0 -13
  189. package/dist/utils/repo-factory.d.ts.map +0 -1
  190. package/dist/utils/repo-factory.js +0 -46
  191. package/dist/utils/repo-factory.js.map +0 -1
  192. package/dist/utils/string-similarity.d.ts +0 -14
  193. package/dist/utils/string-similarity.d.ts.map +0 -1
  194. package/dist/utils/string-similarity.js +0 -39
  195. package/dist/utils/string-similarity.js.map +0 -1
  196. package/dist/utils/text-diff.d.ts +0 -37
  197. package/dist/utils/text-diff.d.ts.map +0 -1
  198. package/dist/utils/text-diff.js +0 -93
  199. package/dist/utils/text-diff.js.map +0 -1
  200. package/dist/utils/trace.d.ts +0 -19
  201. package/dist/utils/trace.d.ts.map +0 -1
  202. package/dist/utils/trace.js +0 -63
  203. package/dist/utils/trace.js.map +0 -1
  204. package/src/commands.ts +0 -1134
  205. package/src/core/change-detection.ts +0 -712
  206. package/src/core/config.ts +0 -313
  207. package/src/core/index.ts +0 -5
  208. package/src/core/move-detection.ts +0 -169
  209. package/src/core/snapshot.ts +0 -275
  210. package/src/core/sync-engine.ts +0 -1795
  211. package/src/types/config.ts +0 -111
  212. package/src/types/documents.ts +0 -91
  213. package/src/types/index.ts +0 -3
  214. package/src/types/snapshot.ts +0 -67
  215. package/src/utils/content.ts +0 -34
  216. package/src/utils/directory.ts +0 -73
  217. package/src/utils/fs.ts +0 -297
  218. package/src/utils/index.ts +0 -4
  219. package/src/utils/mime-types.ts +0 -244
  220. package/src/utils/network-sync.ts +0 -319
  221. package/src/utils/node-polyfills.ts +0 -8
  222. package/src/utils/output.ts +0 -450
  223. package/src/utils/repo-factory.ts +0 -73
  224. package/src/utils/string-similarity.ts +0 -54
  225. package/src/utils/text-diff.ts +0 -101
  226. package/src/utils/trace.ts +0 -70
  227. package/test/integration/README.md +0 -328
  228. package/test/integration/clone-test.sh +0 -310
  229. package/test/integration/conflict-resolution-test.sh +0 -309
  230. package/test/integration/debug-both-nested.sh +0 -74
  231. package/test/integration/debug-concurrent-nested.sh +0 -87
  232. package/test/integration/debug-nested.sh +0 -73
  233. package/test/integration/deletion-behavior-test.sh +0 -487
  234. package/test/integration/deletion-sync-test-simple.sh +0 -193
  235. package/test/integration/deletion-sync-test.sh +0 -297
  236. package/test/integration/exclude-patterns.test.ts +0 -144
  237. package/test/integration/full-integration-test.sh +0 -363
  238. package/test/integration/fuzzer.test.ts +0 -818
  239. package/test/integration/in-memory-sync.test.ts +0 -830
  240. package/test/integration/init-sync.test.ts +0 -89
  241. package/test/integration/manual-sync-test.sh +0 -84
  242. package/test/integration/sync-deletion.test.ts +0 -280
  243. package/test/integration/sync-flow.test.ts +0 -291
  244. package/test/jest.setup.ts +0 -34
  245. package/test/run-tests.sh +0 -225
  246. package/test/unit/deletion-behavior.test.ts +0 -249
  247. package/test/unit/enhanced-mime-detection.test.ts +0 -244
  248. package/test/unit/snapshot.test.ts +0 -404
  249. package/test/unit/sync-convergence.test.ts +0 -298
  250. package/test/unit/sync-timing.test.ts +0 -134
  251. package/test/unit/utils.test.ts +0 -366
@@ -1,1795 +0,0 @@
1
- import {
2
- AutomergeUrl,
3
- Repo,
4
- DocHandle,
5
- UrlHeads,
6
- parseAutomergeUrl,
7
- stringifyAutomergeUrl,
8
- } from "@automerge/automerge-repo"
9
- import * as A from "@automerge/automerge"
10
- import {
11
- SyncSnapshot,
12
- SyncResult,
13
- FileDocument,
14
- DirectoryDocument,
15
- ChangeType,
16
- FileType,
17
- MoveCandidate,
18
- DirectoryConfig,
19
- DetectedChange,
20
- } from "../types/index.js"
21
- import {
22
- writeFileContent,
23
- removePath,
24
- getFileExtension,
25
- getEnhancedMimeType,
26
- formatRelativePath,
27
- findFileInDirectoryHierarchy,
28
- joinAndNormalizePath,
29
- getPlainUrl,
30
- updateTextContent,
31
- readDocContent,
32
- } from "../utils/index.js"
33
- import {isContentEqual, contentHash} from "../utils/content.js"
34
- import {waitForSync, waitForBidirectionalSync} from "../utils/network-sync.js"
35
- import {SnapshotManager} from "./snapshot.js"
36
- import {ChangeDetector} from "./change-detection.js"
37
- import {MoveDetector} from "./move-detection.js"
38
- import {out} from "../utils/output.js"
39
- import * as path from "path"
40
-
41
- const isDebug = !!process.env.DEBUG
42
- function debug(...args: any[]) {
43
- if (isDebug) console.error("[pushwork:engine]", ...args)
44
- }
45
-
46
- /**
47
- * Apply a change to a document handle, using changeAt when heads are available
48
- * to branch from a known version, otherwise falling back to change.
49
- */
50
- function changeWithOptionalHeads<T>(
51
- handle: DocHandle<T>,
52
- heads: UrlHeads | undefined,
53
- callback: A.ChangeFn<T>
54
- ): void {
55
- if (heads && heads.length > 0) {
56
- handle.changeAt(heads, callback)
57
- } else {
58
- handle.change(callback)
59
- }
60
- }
61
-
62
- /**
63
- * Sync configuration constants
64
- */
65
- const BIDIRECTIONAL_SYNC_TIMEOUT_MS = 5000 // Timeout for bidirectional sync stability check
66
-
67
- /**
68
- * Bidirectional sync engine implementing two-phase sync
69
- */
70
- export class SyncEngine {
71
- private snapshotManager: SnapshotManager
72
- private changeDetector: ChangeDetector
73
- private moveDetector: MoveDetector
74
- // Map from path to handle for leaf-first sync ordering
75
- // Path depth determines sync order (deepest first)
76
- private handlesByPath: Map<string, DocHandle<unknown>> = new Map()
77
- private config: DirectoryConfig
78
-
79
- constructor(
80
- private repo: Repo,
81
- private rootPath: string,
82
- config: DirectoryConfig
83
- ) {
84
- this.config = config
85
- this.snapshotManager = new SnapshotManager(rootPath)
86
- this.changeDetector = new ChangeDetector(
87
- repo,
88
- rootPath,
89
- config.exclude_patterns,
90
- config.artifact_directories || []
91
- )
92
- this.moveDetector = new MoveDetector(config.sync.move_detection_threshold)
93
- }
94
-
95
- /**
96
- * Determine if content should be treated as text for Automerge text operations
97
- * Note: This method checks the runtime type. File type detection happens
98
- * during reading with isEnhancedTextFile() which now has better dev file support.
99
- */
100
- private isTextContent(content: string | Uint8Array): boolean {
101
- // Simply check the actual type of the content
102
- return typeof content === "string"
103
- }
104
-
105
- /**
106
- * Get a versioned URL from a handle (includes current heads).
107
- * This ensures clients can fetch the exact version of the document.
108
- */
109
- private getVersionedUrl(handle: DocHandle<unknown>): AutomergeUrl {
110
- const {documentId} = parseAutomergeUrl(handle.url)
111
- const heads = handle.heads()
112
- return stringifyAutomergeUrl({documentId, heads})
113
- }
114
-
115
- /**
116
- * Determine if a file path is inside an artifact directory.
117
- * Artifact files are stored as immutable strings (RawString) and
118
- * referenced with versioned URLs in directory entries.
119
- */
120
- private isArtifactPath(filePath: string): boolean {
121
- const artifactDirs = this.config.artifact_directories || []
122
- return artifactDirs.some(
123
- dir => filePath === dir || filePath.startsWith(dir + "/")
124
- )
125
- }
126
-
127
- /**
128
- * Get the appropriate URL for a file's directory entry.
129
- * Artifact paths get versioned URLs (with heads) for exact version fetching.
130
- * Non-artifact paths get plain URLs for collaborative editing.
131
- */
132
- private getEntryUrl(handle: DocHandle<unknown>, filePath: string): AutomergeUrl {
133
- if (this.isArtifactPath(filePath)) {
134
- return this.getVersionedUrl(handle)
135
- }
136
- return getPlainUrl(handle.url)
137
- }
138
-
139
- /**
140
- * Get the appropriate URL for a subdirectory's directory entry.
141
- * Always uses plain URLs — versioned URLs on directories can cause
142
- * issues where consumers see a version without the docs array.
143
- */
144
- private getDirEntryUrl(handle: DocHandle<unknown>): AutomergeUrl {
145
- return getPlainUrl(handle.url)
146
- }
147
-
148
- /**
149
- * Set the root directory URL in the snapshot
150
- */
151
- async setRootDirectoryUrl(url: AutomergeUrl): Promise<void> {
152
- let snapshot = await this.snapshotManager.load()
153
- if (!snapshot) {
154
- snapshot = this.snapshotManager.createEmpty()
155
- }
156
- snapshot.rootDirectoryUrl = url
157
- await this.snapshotManager.save(snapshot)
158
- }
159
-
160
- /**
161
- * Reset the snapshot, clearing all tracked files and directories.
162
- * Preserves the rootDirectoryUrl so sync can still operate.
163
- * Used by --force to re-sync every file.
164
- */
165
- async resetSnapshot(): Promise<void> {
166
- let snapshot = await this.snapshotManager.load()
167
- if (!snapshot) return
168
- this.snapshotManager.clear(snapshot)
169
- await this.snapshotManager.save(snapshot)
170
- }
171
-
172
- /**
173
- * Nuclear reset: clear the snapshot AND wipe the root directory document's
174
- * entries so that every file and subdirectory gets brand-new Automerge
175
- * documents. The root directory document itself is preserved.
176
- */
177
- async nuclearReset(): Promise<void> {
178
- let snapshot = await this.snapshotManager.load()
179
- if (!snapshot) return
180
-
181
- // Clear the root directory document's entries
182
- if (snapshot.rootDirectoryUrl) {
183
- const rootHandle = await this.repo.find<DirectoryDocument>(
184
- getPlainUrl(snapshot.rootDirectoryUrl)
185
- )
186
- rootHandle.change((doc: DirectoryDocument) => {
187
- doc.docs.splice(0, doc.docs.length)
188
- })
189
- }
190
-
191
- // Clear all tracked files and directories from snapshot
192
- this.snapshotManager.clear(snapshot)
193
- await this.snapshotManager.save(snapshot)
194
- }
195
-
196
- /**
197
- * Commit local changes only (no network sync)
198
- */
199
- async commitLocal(): Promise<SyncResult> {
200
- const result: SyncResult = {
201
- success: false,
202
- filesChanged: 0,
203
- directoriesChanged: 0,
204
- errors: [],
205
- warnings: [],
206
- }
207
-
208
- try {
209
- // Load current snapshot
210
- let snapshot = await this.snapshotManager.load()
211
- if (!snapshot) {
212
- snapshot = this.snapshotManager.createEmpty()
213
- }
214
-
215
- // Detect all changes
216
- const changes = await this.changeDetector.detectChanges(snapshot)
217
-
218
- // Detect moves
219
- const {moves, remainingChanges} = await this.moveDetector.detectMoves(
220
- changes,
221
- snapshot
222
- )
223
-
224
- // Apply local changes only (no network sync)
225
- const commitResult = await this.pushLocalChanges(
226
- remainingChanges,
227
- moves,
228
- snapshot
229
- )
230
-
231
- result.filesChanged += commitResult.filesChanged
232
- result.directoriesChanged += commitResult.directoriesChanged
233
- result.errors.push(...commitResult.errors)
234
- result.warnings.push(...commitResult.warnings)
235
-
236
- // Always touch root directory after commit
237
- await this.touchRootDirectory(snapshot)
238
-
239
- // Save updated snapshot
240
- await this.snapshotManager.save(snapshot)
241
-
242
- result.success = result.errors.length === 0
243
-
244
- return result
245
- } catch (error) {
246
- result.errors.push({
247
- path: this.rootPath,
248
- operation: "commitLocal",
249
- error: error instanceof Error ? error : new Error(String(error)),
250
- recoverable: true,
251
- })
252
- result.success = false
253
- return result
254
- }
255
- }
256
-
257
- /**
258
- * Recreate documents that failed to sync. Creates new Automerge documents
259
- * with the same content and updates all references (snapshot, parent directory).
260
- * Returns new handles that should be retried for sync.
261
- */
262
- private async recreateFailedDocuments(
263
- failedHandles: DocHandle<unknown>[],
264
- snapshot: SyncSnapshot
265
- ): Promise<DocHandle<unknown>[]> {
266
- const failedUrls = new Set(failedHandles.map(h => getPlainUrl(h.url)))
267
- const newHandles: DocHandle<unknown>[] = []
268
-
269
- // Find which paths correspond to the failed handles
270
- for (const [filePath, entry] of snapshot.files.entries()) {
271
- const plainUrl = getPlainUrl(entry.url)
272
- if (!failedUrls.has(plainUrl)) continue
273
-
274
- debug(`recreate: recreating document for ${filePath} (${plainUrl})`)
275
- out.taskLine(`Recreating document for ${filePath}`)
276
-
277
- try {
278
- // Read the current content from the old handle
279
- const oldHandle = await this.repo.find<FileDocument>(plainUrl)
280
- const doc = await oldHandle.doc()
281
- if (!doc) {
282
- debug(`recreate: could not read doc for ${filePath}, skipping`)
283
- continue
284
- }
285
-
286
- const content = readDocContent(doc.content)
287
- if (content === null) {
288
- debug(`recreate: null content for ${filePath}, skipping`)
289
- continue
290
- }
291
-
292
- // Create a fresh document
293
- const fakeChange: DetectedChange = {
294
- path: filePath,
295
- changeType: ChangeType.LOCAL_ONLY,
296
- fileType: this.isTextContent(content) ? FileType.TEXT : FileType.BINARY,
297
- localContent: content,
298
- remoteContent: null,
299
- }
300
- const newHandle = await this.createRemoteFile(fakeChange)
301
- if (!newHandle) continue
302
-
303
- const entryUrl = this.getEntryUrl(newHandle, filePath)
304
-
305
- // Update snapshot entry
306
- this.snapshotManager.updateFileEntry(snapshot, filePath, {
307
- ...entry,
308
- url: entryUrl,
309
- head: newHandle.heads(),
310
- ...(this.isArtifactPath(filePath) ? {contentHash: contentHash(content)} : {}),
311
- })
312
-
313
- // Update parent directory entry to point to new document
314
- const pathParts = filePath.split("/")
315
- const fileName = pathParts.pop() || ""
316
- const dirPath = pathParts.join("/")
317
-
318
- let dirUrl: AutomergeUrl
319
- if (!dirPath || dirPath === "") {
320
- dirUrl = snapshot.rootDirectoryUrl!
321
- } else {
322
- const dirEntry = snapshot.directories.get(dirPath)
323
- if (!dirEntry) continue
324
- dirUrl = dirEntry.url
325
- }
326
-
327
- const dirHandle = await this.repo.find<DirectoryDocument>(getPlainUrl(dirUrl))
328
- dirHandle.change((d: DirectoryDocument) => {
329
- const idx = d.docs.findIndex(
330
- e => e.name === fileName && e.type === "file"
331
- )
332
- if (idx !== -1) {
333
- d.docs[idx].url = entryUrl
334
- }
335
- })
336
-
337
- // Track new handles
338
- this.handlesByPath.set(filePath, newHandle)
339
- this.handlesByPath.set(dirPath, dirHandle)
340
- newHandles.push(newHandle)
341
- newHandles.push(dirHandle)
342
-
343
- debug(`recreate: created new doc for ${filePath} -> ${newHandle.url}`)
344
- } catch (error) {
345
- debug(`recreate: failed for ${filePath}: ${error}`)
346
- out.taskLine(`Failed to recreate ${filePath}: ${error}`, true)
347
- }
348
- }
349
-
350
- // Also check directory documents
351
- for (const [dirPath, entry] of snapshot.directories.entries()) {
352
- const plainUrl = getPlainUrl(entry.url)
353
- if (!failedUrls.has(plainUrl)) continue
354
-
355
- // Directory docs can't be easily recreated (they reference children).
356
- // Just log a warning — the child recreation above should handle most cases.
357
- debug(`recreate: directory ${dirPath || "(root)"} failed to sync, cannot recreate`)
358
- out.taskLine(`Warning: directory ${dirPath || "(root)"} failed to sync`, true)
359
- }
360
-
361
- return newHandles
362
- }
363
-
364
- /**
365
- * Run full bidirectional sync
366
- */
367
- async sync(): Promise<SyncResult> {
368
- const result: SyncResult = {
369
- success: false,
370
- filesChanged: 0,
371
- directoriesChanged: 0,
372
- errors: [],
373
- warnings: [],
374
- timings: {},
375
- }
376
-
377
- // Reset tracked handles for sync
378
- this.handlesByPath = new Map()
379
-
380
- try {
381
- // Load current snapshot
382
- const snapshot =
383
- (await this.snapshotManager.load()) ||
384
- this.snapshotManager.createEmpty()
385
-
386
- debug(`sync: rootDirectoryUrl=${snapshot.rootDirectoryUrl}, files=${snapshot.files.size}, dirs=${snapshot.directories.size}`)
387
-
388
- // Wait for initial sync to receive any pending remote changes
389
- if (this.config.sync_enabled && snapshot.rootDirectoryUrl) {
390
- debug("sync: waiting for root document to be ready")
391
- out.update("Waiting for root document from server")
392
-
393
- // Wait for the root document to be fetched from the network.
394
- // repo.find() rejects with "unavailable" if the server doesn't
395
- // have the document yet, so we retry with backoff.
396
- // This is critical for clone scenarios.
397
- const plainRootUrl = getPlainUrl(snapshot.rootDirectoryUrl)
398
- const maxAttempts = 6
399
- for (let attempt = 1; attempt <= maxAttempts; attempt++) {
400
- try {
401
- const rootHandle = await this.repo.find<DirectoryDocument>(plainRootUrl)
402
- rootHandle.doc() // throws if not ready
403
- debug(`sync: root document ready (attempt ${attempt})`)
404
- break
405
- } catch (error) {
406
- const isUnavailable = String(error).includes("unavailable") || String(error).includes("not ready")
407
- if (isUnavailable && attempt < maxAttempts) {
408
- const delay = Math.min(1000 * Math.pow(2, attempt - 1), 10000)
409
- debug(`sync: root document not available (attempt ${attempt}/${maxAttempts}), retrying in ${delay}ms`)
410
- out.update(`Waiting for root document (attempt ${attempt}/${maxAttempts})`)
411
- await new Promise(r => setTimeout(r, delay))
412
- } else {
413
- debug(`sync: root document unavailable after ${maxAttempts} attempts: ${error}`)
414
- out.taskLine(`Root document unavailable: ${error}`, true)
415
- break
416
- }
417
- }
418
- }
419
-
420
- debug("sync: waiting for initial bidirectional sync")
421
- out.update("Waiting for initial sync from server")
422
- try {
423
- await waitForBidirectionalSync(
424
- this.repo,
425
- snapshot.rootDirectoryUrl,
426
- {
427
- timeoutMs: 5000, // Increased timeout for initial sync
428
- pollIntervalMs: 100,
429
- stableChecksRequired: 3,
430
- }
431
- )
432
- } catch (error) {
433
- out.taskLine(`Initial sync: ${error}`, true)
434
- }
435
- }
436
-
437
- // Detect all changes
438
- debug("sync: detecting changes")
439
- out.update("Detecting local and remote changes")
440
- // Capture pre-push snapshot file paths to detect deletions after push
441
- const prePushFilePaths = new Set(snapshot.files.keys())
442
- const changes = await this.changeDetector.detectChanges(snapshot)
443
-
444
- // Detect moves
445
- const {moves, remainingChanges} = await this.moveDetector.detectMoves(
446
- changes,
447
- snapshot
448
- )
449
-
450
- debug(`sync: detected ${changes.length} changes, ${moves.length} moves, ${remainingChanges.length} remaining`)
451
-
452
- // Phase 1: Push local changes to remote
453
- debug("sync: phase 1 - pushing local changes")
454
- const phase1Result = await this.pushLocalChanges(
455
- remainingChanges,
456
- moves,
457
- snapshot
458
- )
459
-
460
- result.filesChanged += phase1Result.filesChanged
461
- result.directoriesChanged += phase1Result.directoriesChanged
462
- result.errors.push(...phase1Result.errors)
463
- result.warnings.push(...phase1Result.warnings)
464
-
465
- debug(`sync: phase 1 complete - ${phase1Result.filesChanged} files, ${phase1Result.directoriesChanged} dirs changed`)
466
-
467
- // Wait for network sync (important for clone scenarios)
468
- if (this.config.sync_enabled) {
469
- try {
470
- // Ensure root directory handle is tracked for sync
471
- if (snapshot.rootDirectoryUrl) {
472
- const rootHandle =
473
- await this.repo.find<DirectoryDocument>(
474
- snapshot.rootDirectoryUrl
475
- )
476
- this.handlesByPath.set("", rootHandle)
477
- }
478
-
479
- // Single waitForSync with ALL tracked handles at once
480
- if (this.handlesByPath.size > 0) {
481
- const allHandles = Array.from(
482
- this.handlesByPath.values()
483
- )
484
- const handlePaths = Array.from(this.handlesByPath.keys())
485
- debug(`sync: waiting for ${allHandles.length} handles to sync to server: ${handlePaths.slice(0, 10).map(p => p || "(root)").join(", ")}${handlePaths.length > 10 ? ` ...and ${handlePaths.length - 10} more` : ""}`)
486
- out.update(`Uploading ${allHandles.length} documents to sync server`)
487
- const {failed} = await waitForSync(
488
- allHandles
489
- )
490
-
491
- // Recreate failed documents and retry once
492
- if (failed.length > 0) {
493
- debug(`sync: ${failed.length} documents failed, recreating`)
494
- out.update(`Recreating ${failed.length} failed documents`)
495
- const retryHandles = await this.recreateFailedDocuments(failed, snapshot)
496
- if (retryHandles.length > 0) {
497
- debug(`sync: retrying ${retryHandles.length} recreated handles`)
498
- out.update(`Retrying ${retryHandles.length} recreated documents`)
499
- const retry = await waitForSync(
500
- retryHandles
501
- )
502
- if (retry.failed.length > 0) {
503
- const msg = `${retry.failed.length} documents failed to sync to server after recreation`
504
- debug(`sync: ${msg}`)
505
- result.errors.push({
506
- path: "sync",
507
- operation: "upload",
508
- error: new Error(msg),
509
- recoverable: true,
510
- })
511
- }
512
- }
513
- }
514
-
515
- debug("sync: all handles synced to server")
516
- }
517
-
518
- // Wait for bidirectional sync to stabilize
519
- // Use tracked handles for post-push check (cheaper than full tree scan)
520
- const changedHandles = Array.from(this.handlesByPath.values())
521
- debug(`sync: waiting for bidirectional sync to stabilize (${changedHandles.length} tracked handles)`)
522
- out.update("Waiting for bidirectional sync to stabilize")
523
- await waitForBidirectionalSync(
524
- this.repo,
525
- snapshot.rootDirectoryUrl,
526
- {
527
- timeoutMs: BIDIRECTIONAL_SYNC_TIMEOUT_MS,
528
- pollIntervalMs: 100,
529
- stableChecksRequired: 3,
530
- handles: changedHandles.length > 0 ? changedHandles : undefined,
531
- }
532
- )
533
-
534
- // Root directory touch + sync moved to end of sync() so it always runs
535
- } catch (error) {
536
- debug(`sync: network sync error: ${error}`)
537
- out.taskLine(`Network sync failed: ${error}`, true)
538
- result.errors.push({
539
- path: "sync",
540
- operation: "network-sync",
541
- error: error instanceof Error ? error : new Error(String(error)),
542
- recoverable: true,
543
- })
544
- }
545
- }
546
-
547
- // Re-detect changes after network sync for fresh state
548
- // Compute paths deleted during push so they aren't resurrected during pull
549
- const deletedPaths = new Set<string>()
550
- for (const p of prePushFilePaths) {
551
- if (!snapshot.files.has(p)) {
552
- deletedPaths.add(p)
553
- }
554
- }
555
- if (deletedPaths.size > 0) {
556
- debug(`sync: excluding ${deletedPaths.size} deleted paths from re-detection`)
557
- }
558
- debug("sync: re-detecting changes after network sync")
559
- const freshChanges = await this.changeDetector.detectChanges(snapshot, deletedPaths)
560
- const freshRemoteChanges = freshChanges.filter(
561
- c =>
562
- c.changeType === ChangeType.REMOTE_ONLY ||
563
- c.changeType === ChangeType.BOTH_CHANGED
564
- )
565
-
566
- debug(`sync: phase 2 - pulling ${freshRemoteChanges.length} remote changes`)
567
- if (freshRemoteChanges.length > 0) {
568
- out.update(`Pulling ${freshRemoteChanges.length} remote changes`)
569
- }
570
- // Phase 2: Pull remote changes to local using fresh detection
571
- const phase2Result = await this.pullRemoteChanges(
572
- freshRemoteChanges,
573
- snapshot
574
- )
575
- result.filesChanged += phase2Result.filesChanged
576
- result.directoriesChanged += phase2Result.directoriesChanged
577
- result.errors.push(...phase2Result.errors)
578
- result.warnings.push(...phase2Result.warnings)
579
-
580
- // Update snapshot heads after pulling remote changes
581
- // IMPORTANT: Use getPlainUrl() to strip version/heads from URLs.
582
- // Artifact entries store versioned URLs (with heads baked in).
583
- // repo.find(versionedUrl) returns a view handle whose .heads()
584
- // returns the VERSION heads, not the current document heads.
585
- // Using the versioned URL here would overwrite correct heads with
586
- // stale ones, causing changeAt() to fork from the wrong point
587
- // on the next sync (e.g. an empty directory state where deletions
588
- // can't find the entries to splice out).
589
- for (const [filePath, snapshotEntry] of snapshot.files.entries()) {
590
- try {
591
- const handle = await this.repo.find(getPlainUrl(snapshotEntry.url))
592
- const currentHeads = handle.heads()
593
- if (!A.equals(currentHeads, snapshotEntry.head)) {
594
- // Update snapshot with current heads after pulling changes
595
- snapshot.files.set(filePath, {
596
- ...snapshotEntry,
597
- head: currentHeads,
598
- })
599
- }
600
- } catch (error) {
601
- // Handle might not exist if file was deleted
602
- }
603
- }
604
-
605
- // Update directory document heads
606
- for (const [dirPath, snapshotEntry] of snapshot.directories.entries()) {
607
- try {
608
- const handle = await this.repo.find(getPlainUrl(snapshotEntry.url))
609
- const currentHeads = handle.heads()
610
- if (!A.equals(currentHeads, snapshotEntry.head)) {
611
- // Update snapshot with current heads after pulling changes
612
- snapshot.directories.set(dirPath, {
613
- ...snapshotEntry,
614
- head: currentHeads,
615
- })
616
- }
617
- } catch (error) {
618
- // Handle might not exist if directory was deleted
619
- }
620
- }
621
-
622
- // Small pause before touching root to let everything settle
623
- await new Promise(r => setTimeout(r, 100))
624
- // Always touch root directory after sync completes
625
- await this.touchRootDirectory(snapshot)
626
- if (this.config.sync_enabled && snapshot.rootDirectoryUrl) {
627
- const rootHandle =
628
- await this.repo.find<DirectoryDocument>(
629
- snapshot.rootDirectoryUrl
630
- )
631
- debug("sync: syncing root directory touch to server")
632
- out.update("Syncing root directory update")
633
- await waitForSync(
634
- [rootHandle]
635
- )
636
- // Wait for the touch to fully stabilize on the server
637
- debug("sync: waiting for root touch to stabilize")
638
- await waitForBidirectionalSync(
639
- this.repo,
640
- snapshot.rootDirectoryUrl,
641
- {
642
- timeoutMs: 5000,
643
- pollIntervalMs: 100,
644
- stableChecksRequired: 3,
645
- handles: [rootHandle],
646
- }
647
- )
648
- // Flush repo to ensure everything is persisted
649
- await this.repo.flush()
650
- // Small grace period to ensure server has flushed
651
- await new Promise(r => setTimeout(r, 100))
652
- }
653
-
654
- // Update root directory snapshot heads after touch
655
- const rootSnapshotEntry = snapshot.directories.get("")
656
- if (rootSnapshotEntry && snapshot.rootDirectoryUrl) {
657
- try {
658
- const rootHandle = await this.repo.find<DirectoryDocument>(
659
- getPlainUrl(snapshot.rootDirectoryUrl)
660
- )
661
- rootSnapshotEntry.head = rootHandle.heads()
662
- } catch (error) {
663
- debug(`sync: failed to update root snapshot heads after touch: ${error}`)
664
- }
665
- }
666
-
667
- // Save updated snapshot
668
- await this.snapshotManager.save(snapshot)
669
-
670
- result.success = result.errors.length === 0
671
- return result
672
- } catch (error) {
673
- result.errors.push({
674
- path: "sync",
675
- operation: "full-sync",
676
- error: error as Error,
677
- recoverable: false,
678
- })
679
- return result
680
- }
681
- }
682
-
683
- /**
684
- * Phase 1: Push local changes to Automerge documents.
685
- *
686
- * Works depth-first: processes the deepest files first, creates/updates all
687
- * file docs at each level, then batch-updates the parent directory document
688
- * in a single change. Propagates subdirectory URL updates as we walk up
689
- * toward the root. This eliminates the need for a separate URL update pass.
690
- */
691
- private async pushLocalChanges(
692
- changes: DetectedChange[],
693
- moves: MoveCandidate[],
694
- snapshot: SyncSnapshot
695
- ): Promise<SyncResult> {
696
- const result: SyncResult = {
697
- success: true,
698
- filesChanged: 0,
699
- directoriesChanged: 0,
700
- errors: [],
701
- warnings: [],
702
- }
703
-
704
- // Process moves first - all detected moves are applied
705
- if (moves.length > 0) {
706
- debug(`push: processing ${moves.length} moves`)
707
- out.update(`Processing ${moves.length} move${moves.length > 1 ? "s" : ""}`)
708
- }
709
- for (let i = 0; i < moves.length; i++) {
710
- const move = moves[i]
711
- try {
712
- debug(`push: move ${i + 1}/${moves.length}: ${move.fromPath} -> ${move.toPath}`)
713
- out.taskLine(`Moving ${move.fromPath} -> ${move.toPath}`)
714
- await this.applyMoveToRemote(move, snapshot)
715
- result.filesChanged++
716
- } catch (error) {
717
- debug(`push: move failed for ${move.fromPath}: ${error}`)
718
- result.errors.push({
719
- path: move.fromPath,
720
- operation: "move",
721
- error: error as Error,
722
- recoverable: true,
723
- })
724
- }
725
- }
726
-
727
- // Filter to local changes only
728
- const localChanges = changes.filter(
729
- c =>
730
- c.changeType === ChangeType.LOCAL_ONLY ||
731
- c.changeType === ChangeType.BOTH_CHANGED
732
- )
733
-
734
- if (localChanges.length === 0) {
735
- debug("push: no local changes to push")
736
- return result
737
- }
738
-
739
- const newFiles = localChanges.filter(c => !snapshot.files.has(c.path) && c.localContent !== null)
740
- const modifiedFiles = localChanges.filter(c => snapshot.files.has(c.path) && c.localContent !== null)
741
- const deletedFiles = localChanges.filter(c => c.localContent === null && snapshot.files.has(c.path))
742
- debug(`push: ${localChanges.length} local changes (${newFiles.length} new, ${modifiedFiles.length} modified, ${deletedFiles.length} deleted)`)
743
- out.update(`Pushing ${localChanges.length} local changes (${newFiles.length} new, ${modifiedFiles.length} modified, ${deletedFiles.length} deleted)`)
744
-
745
- // Group changes by parent directory path
746
- const changesByDir = new Map<string, DetectedChange[]>()
747
- for (const change of localChanges) {
748
- const pathParts = change.path.split("/")
749
- pathParts.pop() // remove filename
750
- const dirPath = pathParts.join("/")
751
- if (!changesByDir.has(dirPath)) {
752
- changesByDir.set(dirPath, [])
753
- }
754
- changesByDir.get(dirPath)!.push(change)
755
- }
756
-
757
- // Collect all directory paths that need processing:
758
- // directories with file changes + all ancestors up to root
759
- const allDirsToProcess = new Set<string>()
760
- for (const dirPath of changesByDir.keys()) {
761
- allDirsToProcess.add(dirPath)
762
- // Add ancestors so subdirectory URL updates propagate to root
763
- let current = dirPath
764
- while (current) {
765
- const parts = current.split("/")
766
- parts.pop()
767
- current = parts.join("/")
768
- allDirsToProcess.add(current)
769
- }
770
- }
771
-
772
- // Sort deepest-first
773
- const sortedDirPaths = Array.from(allDirsToProcess).sort((a, b) => {
774
- const depthA = a ? a.split("/").length : 0
775
- const depthB = b ? b.split("/").length : 0
776
- return depthB - depthA
777
- })
778
-
779
- debug(`push: processing ${sortedDirPaths.length} directories (deepest first)`)
780
-
781
- // Track which directories were modified (for subdirectory URL propagation)
782
- const modifiedDirs = new Set<string>()
783
- let filesProcessed = 0
784
- const totalFiles = localChanges.length
785
-
786
- for (const dirPath of sortedDirPaths) {
787
- const dirChanges = changesByDir.get(dirPath) || []
788
- const dirLabel = dirPath || "(root)"
789
-
790
- if (dirChanges.length > 0) {
791
- debug(`push: directory "${dirLabel}": ${dirChanges.length} file changes`)
792
- }
793
-
794
- // Ensure directory document exists
795
- if (snapshot.rootDirectoryUrl) {
796
- await this.ensureDirectoryDocument(snapshot, dirPath)
797
- }
798
-
799
- // Process all file changes in this directory
800
- const newEntries: {name: string; url: AutomergeUrl}[] = []
801
- const updatedEntries: {name: string; url: AutomergeUrl}[] = []
802
- const deletedNames: string[] = []
803
-
804
- for (const change of dirChanges) {
805
- const fileName = change.path.split("/").pop() || ""
806
- const snapshotEntry = snapshot.files.get(change.path)
807
- filesProcessed++
808
-
809
- try {
810
- if (change.localContent === null && snapshotEntry) {
811
- // Delete file
812
- debug(`push: [${filesProcessed}/${totalFiles}] delete ${change.path}`)
813
- out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] deleting ${change.path}`)
814
- await this.deleteRemoteFile(
815
- snapshotEntry.url,
816
- snapshot,
817
- change.path
818
- )
819
- deletedNames.push(fileName)
820
- this.snapshotManager.removeFileEntry(snapshot, change.path)
821
- result.filesChanged++
822
- } else if (!snapshotEntry) {
823
- // New file
824
- debug(`push: [${filesProcessed}/${totalFiles}] create ${change.path} (${change.fileType})`)
825
- out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] creating ${change.path}`)
826
- const handle = await this.createRemoteFile(change)
827
- if (handle) {
828
- const entryUrl = this.getEntryUrl(handle, change.path)
829
- newEntries.push({name: fileName, url: entryUrl})
830
- this.snapshotManager.updateFileEntry(
831
- snapshot,
832
- change.path,
833
- {
834
- path: joinAndNormalizePath(
835
- this.rootPath,
836
- change.path
837
- ),
838
- url: entryUrl,
839
- head: handle.heads(),
840
- extension: getFileExtension(change.path),
841
- mimeType: getEnhancedMimeType(change.path),
842
- ...(this.isArtifactPath(change.path) && change.localContent
843
- ? {contentHash: contentHash(change.localContent)}
844
- : {}),
845
- }
846
- )
847
- result.filesChanged++
848
- debug(`push: created ${change.path} -> ${handle.url}`)
849
- }
850
- } else {
851
- // Update existing file
852
- const contentSize = typeof change.localContent === "string"
853
- ? `${change.localContent!.length} chars`
854
- : `${(change.localContent as Uint8Array).length} bytes`
855
- debug(`push: [${filesProcessed}/${totalFiles}] update ${change.path} (${contentSize})`)
856
- out.update(`Pushing local changes [${filesProcessed}/${totalFiles}] updating ${change.path}`)
857
- await this.updateRemoteFile(
858
- snapshotEntry.url,
859
- change.localContent!,
860
- snapshot,
861
- change.path
862
- )
863
- // Get current entry URL (updateRemoteFile updates snapshot)
864
- const updatedFileEntry = snapshot.files.get(change.path)
865
- if (updatedFileEntry) {
866
- const fileHandle =
867
- await this.repo.find<FileDocument>(
868
- getPlainUrl(updatedFileEntry.url)
869
- )
870
- updatedEntries.push({
871
- name: fileName,
872
- url: this.getEntryUrl(fileHandle, change.path),
873
- })
874
- }
875
- result.filesChanged++
876
- }
877
- } catch (error) {
878
- debug(`push: error processing ${change.path}: ${error}`)
879
- out.taskLine(`Error pushing ${change.path}: ${error}`, true)
880
- result.errors.push({
881
- path: change.path,
882
- operation: "local-to-remote",
883
- error: error as Error,
884
- recoverable: true,
885
- })
886
- }
887
- }
888
-
889
- // Collect subdirectory URL updates for child dirs already processed
890
- const subdirUpdates: {name: string; url: AutomergeUrl}[] = []
891
- for (const modifiedDir of modifiedDirs) {
892
- // Check if modifiedDir is a direct child of dirPath
893
- const parts = modifiedDir.split("/")
894
- const childName = parts.pop() || ""
895
- const parentOfModified = parts.join("/")
896
- if (parentOfModified === dirPath) {
897
- const dirEntry = snapshot.directories.get(modifiedDir)
898
- if (dirEntry) {
899
- const childHandle =
900
- await this.repo.find<DirectoryDocument>(
901
- getPlainUrl(dirEntry.url)
902
- )
903
- subdirUpdates.push({
904
- name: childName,
905
- url: this.getDirEntryUrl(childHandle),
906
- })
907
- }
908
- }
909
- }
910
-
911
- // Batch-update the directory document in a single change
912
- const hasChanges =
913
- newEntries.length > 0 ||
914
- updatedEntries.length > 0 ||
915
- deletedNames.length > 0 ||
916
- subdirUpdates.length > 0
917
- if (hasChanges && snapshot.rootDirectoryUrl) {
918
- debug(`push: batch-updating directory "${dirLabel}" (+${newEntries.length} new, ~${updatedEntries.length} updated, -${deletedNames.length} deleted, ${subdirUpdates.length} subdir URL updates)`)
919
- await this.batchUpdateDirectory(
920
- snapshot,
921
- dirPath,
922
- newEntries,
923
- updatedEntries,
924
- deletedNames,
925
- subdirUpdates
926
- )
927
- modifiedDirs.add(dirPath)
928
- result.directoriesChanged++
929
- }
930
- }
931
-
932
- debug(`push: complete - ${result.filesChanged} files, ${result.directoriesChanged} dirs changed, ${result.errors.length} errors`)
933
- return result
934
- }
935
-
936
- /**
937
- * Phase 2: Pull remote changes to local filesystem
938
- */
939
- private async pullRemoteChanges(
940
- changes: DetectedChange[],
941
- snapshot: SyncSnapshot
942
- ): Promise<SyncResult> {
943
- const result: SyncResult = {
944
- success: true,
945
- filesChanged: 0,
946
- directoriesChanged: 0,
947
- errors: [],
948
- warnings: [],
949
- }
950
-
951
- // Process remote changes
952
- const remoteChanges = changes.filter(
953
- c =>
954
- c.changeType === ChangeType.REMOTE_ONLY ||
955
- c.changeType === ChangeType.BOTH_CHANGED
956
- )
957
-
958
- // Sort changes by dependency order (parents before children)
959
- const sortedChanges = this.sortChangesByDependency(remoteChanges)
960
-
961
- for (const change of sortedChanges) {
962
- try {
963
- await this.applyRemoteChangeToLocal(change, snapshot)
964
- result.filesChanged++
965
- } catch (error) {
966
- result.errors.push({
967
- path: change.path,
968
- operation: "remote-to-local",
969
- error: error as Error,
970
- recoverable: true,
971
- })
972
- }
973
- }
974
-
975
- return result
976
- }
977
-
978
- /**
979
- * Apply remote change to local filesystem
980
- */
981
- private async applyRemoteChangeToLocal(
982
- change: DetectedChange,
983
- snapshot: SyncSnapshot
984
- ): Promise<void> {
985
- const localPath = joinAndNormalizePath(this.rootPath, change.path)
986
-
987
- if (!change.remoteHead) {
988
- throw new Error(
989
- `No remote head found for remote change to ${change.path}`
990
- )
991
- }
992
-
993
- // Check for null (empty string/Uint8Array are valid content)
994
- if (change.remoteContent === null) {
995
- // File was deleted remotely
996
- await removePath(localPath)
997
- this.snapshotManager.removeFileEntry(snapshot, change.path)
998
- return
999
- }
1000
-
1001
- // Create or update local file
1002
- await writeFileContent(localPath, change.remoteContent)
1003
-
1004
- // Update or create snapshot entry for this file
1005
- const snapshotEntry = snapshot.files.get(change.path)
1006
- if (snapshotEntry) {
1007
- // Update existing entry
1008
- snapshotEntry.head = change.remoteHead
1009
- // If the remote document was replaced (new URL), update the snapshot URL
1010
- if (change.remoteUrl) {
1011
- const fileHandle = await this.repo.find<FileDocument>(change.remoteUrl)
1012
- snapshotEntry.url = this.getEntryUrl(fileHandle, change.path)
1013
- }
1014
- } else {
1015
- // Create new snapshot entry for newly discovered remote file
1016
- // We need to find the remote file's URL from the directory hierarchy
1017
- if (snapshot.rootDirectoryUrl) {
1018
- try {
1019
- const fileEntry = await findFileInDirectoryHierarchy(
1020
- this.repo,
1021
- snapshot.rootDirectoryUrl,
1022
- change.path
1023
- )
1024
-
1025
- if (fileEntry) {
1026
- const fileHandle = await this.repo.find<FileDocument>(fileEntry.url)
1027
- const entryUrl = this.getEntryUrl(fileHandle, change.path)
1028
- this.snapshotManager.updateFileEntry(snapshot, change.path, {
1029
- path: localPath,
1030
- url: entryUrl,
1031
- head: change.remoteHead,
1032
- extension: getFileExtension(change.path),
1033
- mimeType: getEnhancedMimeType(change.path),
1034
- })
1035
- }
1036
- } catch (error) {
1037
- // Failed to update snapshot - file may have been deleted
1038
- out.taskLine(
1039
- `Warning: Failed to update snapshot for remote file ${change.path}`,
1040
- true
1041
- )
1042
- }
1043
- }
1044
- }
1045
- }
1046
-
1047
- /**
1048
- * Apply move to remote documents
1049
- */
1050
- private async applyMoveToRemote(
1051
- move: MoveCandidate,
1052
- snapshot: SyncSnapshot
1053
- ): Promise<void> {
1054
- const fromEntry = snapshot.files.get(move.fromPath)
1055
- if (!fromEntry) return
1056
-
1057
- // Parse paths
1058
- const toParts = move.toPath.split("/")
1059
- const toFileName = toParts.pop() || ""
1060
- const toDirPath = toParts.join("/")
1061
-
1062
- // 1) Remove file entry from old directory document
1063
- if (move.fromPath !== move.toPath) {
1064
- await this.removeFileFromDirectory(snapshot, move.fromPath)
1065
- }
1066
-
1067
- // 2) Ensure destination directory document exists
1068
- await this.ensureDirectoryDocument(snapshot, toDirPath)
1069
-
1070
- // 3) Update the FileDocument name and content to match new location/state
1071
- try {
1072
- let entryUrl: AutomergeUrl
1073
- let finalHeads: UrlHeads
1074
-
1075
- if (this.isArtifactPath(move.toPath)) {
1076
- // Artifact files use RawString — no diffing needed, just create a fresh doc
1077
- const content = move.newContent !== undefined
1078
- ? move.newContent
1079
- : readDocContent((await (await this.repo.find<FileDocument>(getPlainUrl(fromEntry.url))).doc())?.content)
1080
- const fakeChange: DetectedChange = {
1081
- path: move.toPath,
1082
- changeType: ChangeType.LOCAL_ONLY,
1083
- fileType: content != null && typeof content === "string" ? FileType.TEXT : FileType.BINARY,
1084
- localContent: content,
1085
- remoteContent: null,
1086
- }
1087
- const newHandle = await this.createRemoteFile(fakeChange)
1088
- if (!newHandle) return
1089
- entryUrl = this.getEntryUrl(newHandle, move.toPath)
1090
- finalHeads = newHandle.heads()
1091
- } else {
1092
- // Use plain URL for mutable handle
1093
- const handle = await this.repo.find<FileDocument>(
1094
- getPlainUrl(fromEntry.url)
1095
- )
1096
- const heads = fromEntry.head
1097
-
1098
- // Update both name and content (if content changed during move)
1099
- changeWithOptionalHeads(handle, heads, (doc: FileDocument) => {
1100
- doc.name = toFileName
1101
-
1102
- // If new content is provided, update it (handles move + modification case)
1103
- if (move.newContent !== undefined) {
1104
- if (typeof move.newContent === "string") {
1105
- updateTextContent(doc, ["content"], move.newContent)
1106
- } else {
1107
- doc.content = move.newContent
1108
- }
1109
- }
1110
- })
1111
-
1112
- entryUrl = this.getEntryUrl(handle, move.toPath)
1113
- finalHeads = handle.heads()
1114
-
1115
- // Track file handle for network sync
1116
- this.handlesByPath.set(move.toPath, handle)
1117
- }
1118
-
1119
- // 4) Add file entry to destination directory
1120
- await this.addFileToDirectory(snapshot, move.toPath, entryUrl)
1121
-
1122
- // 5) Update snapshot entries
1123
- this.snapshotManager.removeFileEntry(snapshot, move.fromPath)
1124
- this.snapshotManager.updateFileEntry(snapshot, move.toPath, {
1125
- ...fromEntry,
1126
- path: joinAndNormalizePath(this.rootPath, move.toPath),
1127
- url: entryUrl,
1128
- head: finalHeads,
1129
- ...(this.isArtifactPath(move.toPath) && move.newContent != null
1130
- ? {contentHash: contentHash(move.newContent)}
1131
- : {}),
1132
- })
1133
- } catch (e) {
1134
- // Failed to update file name - file may have been deleted
1135
- out.taskLine(
1136
- `Warning: Failed to rename ${move.fromPath} to ${move.toPath}`,
1137
- true
1138
- )
1139
- }
1140
- }
1141
-
1142
- /**
1143
- * Create new remote file document
1144
- */
1145
- private async createRemoteFile(
1146
- change: DetectedChange
1147
- ): Promise<DocHandle<FileDocument> | null> {
1148
- if (change.localContent === null) return null
1149
-
1150
- const isText = this.isTextContent(change.localContent)
1151
- const isArtifact = this.isArtifactPath(change.path)
1152
-
1153
- // For artifact files, store text as RawString (immutable snapshot).
1154
- // For regular files, store as collaborative text (empty string + splice).
1155
- const fileDoc: FileDocument = {
1156
- "@patchwork": {type: "file"},
1157
- name: change.path.split("/").pop() || "",
1158
- extension: getFileExtension(change.path),
1159
- mimeType: getEnhancedMimeType(change.path),
1160
- content:
1161
- isText && isArtifact
1162
- ? new A.RawString(change.localContent as string) as unknown as string
1163
- : isText
1164
- ? ""
1165
- : change.localContent,
1166
- metadata: {
1167
- permissions: 0o644,
1168
- },
1169
- }
1170
-
1171
- const handle = this.repo.create(fileDoc)
1172
-
1173
- // For non-artifact text files, splice in the content so it's stored as collaborative text
1174
- if (isText && !isArtifact && typeof change.localContent === "string") {
1175
- handle.change((doc: FileDocument) => {
1176
- updateTextContent(doc, ["content"], change.localContent as string)
1177
- })
1178
- }
1179
-
1180
- // Always track newly created files for network sync
1181
- // (they always represent a change that needs to sync)
1182
- this.handlesByPath.set(change.path, handle)
1183
-
1184
- return handle
1185
- }
1186
-
1187
- /**
1188
- * Update existing remote file document
1189
- */
1190
- private async updateRemoteFile(
1191
- url: AutomergeUrl,
1192
- content: string | Uint8Array,
1193
- snapshot: SyncSnapshot,
1194
- filePath: string
1195
- ): Promise<void> {
1196
- // Use plain URL for mutable handle
1197
- const handle = await this.repo.find<FileDocument>(getPlainUrl(url))
1198
-
1199
- // Check if content actually changed before tracking for sync
1200
- const doc = await handle.doc()
1201
- const rawContent = doc?.content
1202
-
1203
- // For artifact paths, always replace with a new document containing RawString.
1204
- // For non-artifact paths with immutable strings, replace with mutable text.
1205
- // In both cases we create a new document and update the snapshot URL.
1206
- const isArtifact = this.isArtifactPath(filePath)
1207
- if (
1208
- isArtifact ||
1209
- !doc ||
1210
- (rawContent != null && A.isImmutableString(rawContent))
1211
- ) {
1212
- if (!isArtifact) {
1213
- out.taskLine(
1214
- `Replacing ${!doc ? 'unavailable' : 'immutable string'} document for ${filePath}`,
1215
- true
1216
- )
1217
- }
1218
- const fakeChange: DetectedChange = {
1219
- path: filePath,
1220
- changeType: ChangeType.LOCAL_ONLY,
1221
- fileType: this.isTextContent(content)
1222
- ? FileType.TEXT
1223
- : FileType.BINARY,
1224
- localContent: content,
1225
- remoteContent: null,
1226
- }
1227
- const newHandle = await this.createRemoteFile(fakeChange)
1228
- if (newHandle) {
1229
- const entryUrl = this.getEntryUrl(newHandle, filePath)
1230
- this.snapshotManager.updateFileEntry(snapshot, filePath, {
1231
- path: joinAndNormalizePath(this.rootPath, filePath),
1232
- url: entryUrl,
1233
- head: newHandle.heads(),
1234
- extension: getFileExtension(filePath),
1235
- mimeType: getEnhancedMimeType(filePath),
1236
- ...(this.isArtifactPath(filePath)
1237
- ? {contentHash: contentHash(content)}
1238
- : {}),
1239
- })
1240
- }
1241
- return
1242
- }
1243
-
1244
- const currentContent = readDocContent(rawContent)
1245
- const contentChanged = !isContentEqual(content, currentContent)
1246
-
1247
- // Update snapshot heads even when content is identical
1248
- const snapshotEntry = snapshot.files.get(filePath)
1249
- if (snapshotEntry) {
1250
- // Update snapshot with current document heads
1251
- snapshot.files.set(filePath, {
1252
- ...snapshotEntry,
1253
- head: handle.heads(),
1254
- })
1255
- }
1256
-
1257
- if (!contentChanged) {
1258
- // Content is identical, but we've updated the snapshot heads above
1259
- // This prevents fresh change detection from seeing stale heads
1260
- return
1261
- }
1262
-
1263
- const heads = snapshotEntry?.head
1264
-
1265
- if (!heads) {
1266
- throw new Error(`No heads found for ${url}`)
1267
- }
1268
-
1269
- handle.changeAt(heads, (doc: FileDocument) => {
1270
- if (typeof content === "string") {
1271
- updateTextContent(doc, ["content"], content)
1272
- } else {
1273
- doc.content = content
1274
- }
1275
- })
1276
-
1277
- // Update snapshot with new heads after content change
1278
- if (snapshotEntry) {
1279
- snapshot.files.set(filePath, {
1280
- ...snapshotEntry,
1281
- head: handle.heads(),
1282
- })
1283
- }
1284
-
1285
- // Only track files that actually changed content
1286
- this.handlesByPath.set(filePath, handle)
1287
- }
1288
-
1289
- /**
1290
- * Delete remote file document
1291
- */
1292
- private async deleteRemoteFile(
1293
- _url: AutomergeUrl,
1294
- _snapshot?: SyncSnapshot,
1295
- _filePath?: string
1296
- ): Promise<void> {
1297
- // In Automerge, we don't actually delete documents.
1298
- // The file entry is removed from its parent directory, making the
1299
- // document orphaned. Clearing content via splice is expensive for
1300
- // large text files (every character is a CRDT op), so we skip it.
1301
- }
1302
-
1303
- /**
1304
- * Add file entry to appropriate directory document (maintains hierarchy)
1305
- */
1306
- private async addFileToDirectory(
1307
- snapshot: SyncSnapshot,
1308
- filePath: string,
1309
- fileUrl: AutomergeUrl
1310
- ): Promise<void> {
1311
- if (!snapshot.rootDirectoryUrl) return
1312
-
1313
- const pathParts = filePath.split("/")
1314
- const fileName = pathParts.pop() || ""
1315
- const directoryPath = pathParts.join("/")
1316
-
1317
- // Get or create the parent directory document
1318
- const parentDirUrl = await this.ensureDirectoryDocument(
1319
- snapshot,
1320
- directoryPath
1321
- )
1322
-
1323
- // Use plain URL for mutable handle
1324
- const dirHandle = await this.repo.find<DirectoryDocument>(
1325
- getPlainUrl(parentDirUrl)
1326
- )
1327
-
1328
- let didChange = false
1329
- const snapshotEntry = snapshot.directories.get(directoryPath)
1330
- const heads = snapshotEntry?.head
1331
- changeWithOptionalHeads(dirHandle, heads, (doc: DirectoryDocument) => {
1332
- const existingIndex = doc.docs.findIndex(
1333
- entry => entry.name === fileName && entry.type === "file"
1334
- )
1335
- if (existingIndex === -1) {
1336
- doc.docs.push({
1337
- name: fileName,
1338
- type: "file",
1339
- url: fileUrl,
1340
- })
1341
- didChange = true
1342
- }
1343
- })
1344
- // Always track the directory (even if unchanged) for proper leaf-first sync ordering
1345
- this.handlesByPath.set(directoryPath, dirHandle)
1346
-
1347
- if (didChange && snapshotEntry) {
1348
- snapshotEntry.head = dirHandle.heads()
1349
- }
1350
- }
1351
-
1352
- /**
1353
- * Ensure directory document exists for the given path, creating hierarchy as needed
1354
- * First checks for existing shared directories before creating new ones
1355
- */
1356
- private async ensureDirectoryDocument(
1357
- snapshot: SyncSnapshot,
1358
- directoryPath: string
1359
- ): Promise<AutomergeUrl> {
1360
- // Root directory case
1361
- if (!directoryPath || directoryPath === "") {
1362
- return snapshot.rootDirectoryUrl!
1363
- }
1364
-
1365
- // Check if we already have this directory in snapshot
1366
- const existingDir = snapshot.directories.get(directoryPath)
1367
- if (existingDir) {
1368
- return existingDir.url
1369
- }
1370
-
1371
- // Split path into parent and current directory name
1372
- const pathParts = directoryPath.split("/")
1373
- const currentDirName = pathParts.pop() || ""
1374
- const parentPath = pathParts.join("/")
1375
-
1376
- // Ensure parent directory exists first (recursive)
1377
- const parentDirUrl = await this.ensureDirectoryDocument(
1378
- snapshot,
1379
- parentPath
1380
- )
1381
-
1382
- // DISCOVERY: Check if directory already exists in parent on server
1383
- try {
1384
- const parentHandle = await this.repo.find<DirectoryDocument>(parentDirUrl)
1385
- const parentDoc = await parentHandle.doc()
1386
-
1387
- if (parentDoc) {
1388
- const existingDirEntry = parentDoc.docs.find(
1389
- (entry: {name: string; type: string; url: AutomergeUrl}) =>
1390
- entry.name === currentDirName && entry.type === "folder"
1391
- )
1392
-
1393
- if (existingDirEntry) {
1394
- // Resolve the actual directory handle and use its current heads
1395
- // Directory entries in parent docs may not carry valid heads
1396
- try {
1397
- const childDirHandle = await this.repo.find<DirectoryDocument>(
1398
- existingDirEntry.url
1399
- )
1400
-
1401
- // Track discovered directory for sync
1402
- this.handlesByPath.set(directoryPath, childDirHandle)
1403
-
1404
- // Get appropriate URL for directory entry
1405
- const entryUrl = this.getDirEntryUrl(childDirHandle)
1406
-
1407
- // Update snapshot with discovered directory
1408
- this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
1409
- path: joinAndNormalizePath(this.rootPath, directoryPath),
1410
- url: entryUrl,
1411
- head: childDirHandle.heads(),
1412
- entries: [],
1413
- })
1414
-
1415
- return entryUrl
1416
- } catch (resolveErr) {
1417
- // Failed to resolve directory - fall through to create a fresh directory document
1418
- }
1419
- }
1420
- }
1421
- } catch (error) {
1422
- // Failed to check for existing directory - will create new one
1423
- }
1424
-
1425
- // CREATE: Directory doesn't exist, create new one
1426
- const dirDoc: DirectoryDocument = {
1427
- "@patchwork": {type: "folder"},
1428
- name: currentDirName,
1429
- title: currentDirName,
1430
- docs: [],
1431
- }
1432
-
1433
- const dirHandle = this.repo.create(dirDoc)
1434
-
1435
- // Get appropriate URL for directory entry
1436
- const dirEntryUrl = this.getDirEntryUrl(dirHandle)
1437
-
1438
- // Add this directory to its parent
1439
- // Use plain URL for mutable handle
1440
- const parentHandle = await this.repo.find<DirectoryDocument>(
1441
- getPlainUrl(parentDirUrl)
1442
- )
1443
-
1444
- let didChange = false
1445
- parentHandle.change((doc: DirectoryDocument) => {
1446
- // Double-check that entry doesn't exist (race condition protection)
1447
- const existingIndex = doc.docs.findIndex(
1448
- (entry: {name: string; type: string; url: AutomergeUrl}) =>
1449
- entry.name === currentDirName && entry.type === "folder"
1450
- )
1451
- if (existingIndex === -1) {
1452
- doc.docs.push({
1453
- name: currentDirName,
1454
- type: "folder",
1455
- url: dirEntryUrl,
1456
- })
1457
- didChange = true
1458
- }
1459
- })
1460
-
1461
- // Track directory handles for sync
1462
- this.handlesByPath.set(directoryPath, dirHandle)
1463
- if (didChange) {
1464
- this.handlesByPath.set(parentPath, parentHandle)
1465
-
1466
- const parentSnapshotEntry = snapshot.directories.get(parentPath)
1467
- if (parentSnapshotEntry) {
1468
- parentSnapshotEntry.head = parentHandle.heads()
1469
- }
1470
- }
1471
-
1472
- // Update snapshot with new directory
1473
- this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
1474
- path: joinAndNormalizePath(this.rootPath, directoryPath),
1475
- url: dirEntryUrl,
1476
- head: dirHandle.heads(),
1477
- entries: [],
1478
- })
1479
-
1480
- return dirEntryUrl
1481
- }
1482
-
1483
- /**
1484
- * Remove file entry from directory document
1485
- */
1486
- private async removeFileFromDirectory(
1487
- snapshot: SyncSnapshot,
1488
- filePath: string
1489
- ): Promise<void> {
1490
- if (!snapshot.rootDirectoryUrl) return
1491
-
1492
- const pathParts = filePath.split("/")
1493
- const fileName = pathParts.pop() || ""
1494
- const directoryPath = pathParts.join("/")
1495
-
1496
- // Get the parent directory URL
1497
- let parentDirUrl: AutomergeUrl
1498
- if (!directoryPath || directoryPath === "") {
1499
- parentDirUrl = snapshot.rootDirectoryUrl
1500
- } else {
1501
- const existingDir = snapshot.directories.get(directoryPath)
1502
- if (!existingDir) {
1503
- // Directory not found - file may already be removed
1504
- return
1505
- }
1506
- parentDirUrl = existingDir.url
1507
- }
1508
-
1509
- try {
1510
- // Use plain URL for mutable handle
1511
- const dirHandle = await this.repo.find<DirectoryDocument>(
1512
- getPlainUrl(parentDirUrl)
1513
- )
1514
-
1515
- // Track this handle for network sync waiting
1516
- this.handlesByPath.set(directoryPath, dirHandle)
1517
- const snapshotEntry = snapshot.directories.get(directoryPath)
1518
- const heads = snapshotEntry?.head
1519
- let didChange = false
1520
-
1521
- changeWithOptionalHeads(dirHandle, heads, (doc: DirectoryDocument) => {
1522
- const indexToRemove = doc.docs.findIndex(
1523
- entry => entry.name === fileName && entry.type === "file"
1524
- )
1525
- if (indexToRemove !== -1) {
1526
- doc.docs.splice(indexToRemove, 1)
1527
- didChange = true
1528
- out.taskLine(
1529
- `Removed ${fileName} from ${
1530
- formatRelativePath(directoryPath) || "root"
1531
- }`
1532
- )
1533
- }
1534
- })
1535
-
1536
- if (didChange && snapshotEntry) {
1537
- snapshotEntry.head = dirHandle.heads()
1538
- }
1539
- } catch (error) {
1540
- throw error
1541
- }
1542
- }
1543
-
1544
- /**
1545
- * Batch-update a directory document in a single change: add new file entries,
1546
- * update URLs for modified files, remove deleted entries, and update
1547
- * subdirectory URLs. This replaces the separate per-file directory mutations
1548
- * and the post-hoc URL update pass.
1549
- */
1550
- private async batchUpdateDirectory(
1551
- snapshot: SyncSnapshot,
1552
- dirPath: string,
1553
- newEntries: {name: string; url: AutomergeUrl}[],
1554
- updatedEntries: {name: string; url: AutomergeUrl}[],
1555
- deletedNames: string[],
1556
- subdirUpdates: {name: string; url: AutomergeUrl}[]
1557
- ): Promise<void> {
1558
- let dirUrl: AutomergeUrl
1559
- if (!dirPath || dirPath === "") {
1560
- dirUrl = snapshot.rootDirectoryUrl!
1561
- } else {
1562
- const dirEntry = snapshot.directories.get(dirPath)
1563
- if (!dirEntry) return
1564
- dirUrl = dirEntry.url
1565
- }
1566
-
1567
- const dirHandle = await this.repo.find<DirectoryDocument>(
1568
- getPlainUrl(dirUrl)
1569
- )
1570
-
1571
- const snapshotEntry = snapshot.directories.get(dirPath)
1572
- const heads = snapshotEntry?.head
1573
-
1574
- // Determine directory name
1575
- const dirName = dirPath ? dirPath.split("/").pop() || "" : path.basename(this.rootPath)
1576
-
1577
- changeWithOptionalHeads(dirHandle, heads, (doc: DirectoryDocument) => {
1578
- // Ensure name and title fields are set
1579
- if (!doc.name) doc.name = dirName
1580
- if (!doc.title) doc.title = dirName
1581
-
1582
- // Remove deleted file entries
1583
- for (const name of deletedNames) {
1584
- const idx = doc.docs.findIndex(
1585
- entry => entry.name === name && entry.type === "file"
1586
- )
1587
- if (idx !== -1) {
1588
- doc.docs.splice(idx, 1)
1589
- out.taskLine(
1590
- `Removed ${name} from ${
1591
- formatRelativePath(dirPath) || "root"
1592
- }`
1593
- )
1594
- }
1595
- }
1596
-
1597
- // Update URLs for modified files
1598
- for (const {name, url} of updatedEntries) {
1599
- const idx = doc.docs.findIndex(
1600
- entry => entry.name === name && entry.type === "file"
1601
- )
1602
- if (idx !== -1) {
1603
- doc.docs[idx].url = url
1604
- }
1605
- }
1606
-
1607
- // Add new file entries
1608
- for (const {name, url} of newEntries) {
1609
- const existing = doc.docs.findIndex(
1610
- entry => entry.name === name && entry.type === "file"
1611
- )
1612
- if (existing === -1) {
1613
- doc.docs.push({name, type: "file", url})
1614
- } else {
1615
- // Entry already exists (e.g. from immutable string replacement)
1616
- doc.docs[existing].url = url
1617
- }
1618
- }
1619
-
1620
- // Update subdirectory URLs with current heads
1621
- for (const {name, url} of subdirUpdates) {
1622
- const idx = doc.docs.findIndex(
1623
- entry => entry.name === name && entry.type === "folder"
1624
- )
1625
- if (idx !== -1) {
1626
- doc.docs[idx].url = url
1627
- }
1628
- }
1629
- })
1630
-
1631
- // Track directory handle and update snapshot heads
1632
- this.handlesByPath.set(dirPath, dirHandle)
1633
- if (snapshotEntry) {
1634
- snapshotEntry.head = dirHandle.heads()
1635
- }
1636
- }
1637
-
1638
- /**
1639
- * Sort changes by dependency order
1640
- */
1641
- private sortChangesByDependency(changes: DetectedChange[]): DetectedChange[] {
1642
- // Sort by path depth (shallower paths first)
1643
- return changes.sort((a, b) => {
1644
- const depthA = a.path.split("/").length
1645
- const depthB = b.path.split("/").length
1646
- return depthA - depthB
1647
- })
1648
- }
1649
-
1650
- /**
1651
- * Get sync status
1652
- */
1653
- async getStatus(): Promise<{
1654
- snapshot: SyncSnapshot | null
1655
- hasChanges: boolean
1656
- changeCount: number
1657
- lastSync: Date | null
1658
- }> {
1659
- const snapshot = await this.snapshotManager.load()
1660
-
1661
- if (!snapshot) {
1662
- return {
1663
- snapshot: null,
1664
- hasChanges: false,
1665
- changeCount: 0,
1666
- lastSync: null,
1667
- }
1668
- }
1669
-
1670
- const changes = await this.changeDetector.detectChanges(snapshot)
1671
-
1672
- return {
1673
- snapshot,
1674
- hasChanges: changes.length > 0,
1675
- changeCount: changes.length,
1676
- lastSync: new Date(snapshot.timestamp),
1677
- }
1678
- }
1679
-
1680
- /**
1681
- * Preview changes without applying them
1682
- */
1683
- async previewChanges(): Promise<{
1684
- changes: DetectedChange[]
1685
- moves: MoveCandidate[]
1686
- summary: string
1687
- }> {
1688
- const snapshot = await this.snapshotManager.load()
1689
- if (!snapshot) {
1690
- return {
1691
- changes: [],
1692
- moves: [],
1693
- summary: "No snapshot found - run init first",
1694
- }
1695
- }
1696
-
1697
- const changes = await this.changeDetector.detectChanges(snapshot)
1698
- const {moves} = await this.moveDetector.detectMoves(changes, snapshot)
1699
-
1700
- const summary = this.generateChangeSummary(changes, moves)
1701
-
1702
- return {changes, moves, summary}
1703
- }
1704
-
1705
- /**
1706
- * Generate human-readable summary of changes
1707
- */
1708
- private generateChangeSummary(
1709
- changes: DetectedChange[],
1710
- moves: MoveCandidate[]
1711
- ): string {
1712
- const localChanges = changes.filter(
1713
- c =>
1714
- c.changeType === ChangeType.LOCAL_ONLY ||
1715
- c.changeType === ChangeType.BOTH_CHANGED
1716
- ).length
1717
-
1718
- const remoteChanges = changes.filter(
1719
- c =>
1720
- c.changeType === ChangeType.REMOTE_ONLY ||
1721
- c.changeType === ChangeType.BOTH_CHANGED
1722
- ).length
1723
-
1724
- const conflicts = changes.filter(
1725
- c => c.changeType === ChangeType.BOTH_CHANGED
1726
- ).length
1727
-
1728
- const parts: string[] = []
1729
-
1730
- if (localChanges > 0) {
1731
- parts.push(`${localChanges} local change${localChanges > 1 ? "s" : ""}`)
1732
- }
1733
-
1734
- if (remoteChanges > 0) {
1735
- parts.push(
1736
- `${remoteChanges} remote change${remoteChanges > 1 ? "s" : ""}`
1737
- )
1738
- }
1739
-
1740
- if (moves.length > 0) {
1741
- parts.push(`${moves.length} potential move${moves.length > 1 ? "s" : ""}`)
1742
- }
1743
-
1744
- if (conflicts > 0) {
1745
- parts.push(`${conflicts} conflict${conflicts > 1 ? "s" : ""}`)
1746
- }
1747
-
1748
- if (parts.length === 0) {
1749
- return "No changes detected"
1750
- }
1751
-
1752
- return parts.join(", ")
1753
- }
1754
-
1755
- /**
1756
- * Update the lastSyncAt timestamp on the root directory document
1757
- */
1758
- private async touchRootDirectory(snapshot: SyncSnapshot): Promise<void> {
1759
- if (!snapshot.rootDirectoryUrl) {
1760
- return
1761
- }
1762
-
1763
- try {
1764
- const rootHandle = await this.repo.find<DirectoryDocument>(
1765
- snapshot.rootDirectoryUrl
1766
- )
1767
-
1768
- const timestamp = Date.now()
1769
-
1770
- let version: string
1771
- try {
1772
- version = require("../../package.json").version
1773
- } catch {
1774
- version = "unknown"
1775
- }
1776
-
1777
- debug(`touchRootDirectory: setting lastSyncAt=${timestamp} with=pushwork@${version}`)
1778
- rootHandle.change((doc: DirectoryDocument) => {
1779
- doc.lastSyncAt = timestamp
1780
- doc.with = `pushwork@${version}`
1781
- })
1782
-
1783
- // Track root directory for network sync
1784
- this.handlesByPath.set("", rootHandle)
1785
-
1786
- const snapshotEntry = snapshot.directories.get("")
1787
- if (snapshotEntry) {
1788
- snapshotEntry.head = rootHandle.heads()
1789
- }
1790
- } catch (error) {
1791
- debug(`touchRootDirectory: failed: ${error}`)
1792
- }
1793
- }
1794
-
1795
- }