pushwork 1.0.21 → 1.0.25

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. package/CLAUDE.md +24 -2
  2. package/dist/cli.js +21 -0
  3. package/dist/cli.js.map +1 -1
  4. package/dist/commands.d.ts +11 -1
  5. package/dist/commands.d.ts.map +1 -1
  6. package/dist/commands.js +81 -7
  7. package/dist/commands.js.map +1 -1
  8. package/dist/core/change-detection.d.ts +8 -1
  9. package/dist/core/change-detection.d.ts.map +1 -1
  10. package/dist/core/change-detection.js +69 -1
  11. package/dist/core/change-detection.js.map +1 -1
  12. package/dist/core/sync-engine.d.ts +11 -0
  13. package/dist/core/sync-engine.d.ts.map +1 -1
  14. package/dist/core/sync-engine.js +257 -42
  15. package/dist/core/sync-engine.js.map +1 -1
  16. package/dist/types/config.d.ts +5 -0
  17. package/dist/types/config.d.ts.map +1 -1
  18. package/dist/types/snapshot.d.ts +1 -0
  19. package/dist/types/snapshot.d.ts.map +1 -1
  20. package/dist/utils/content.d.ts +5 -0
  21. package/dist/utils/content.d.ts.map +1 -1
  22. package/dist/utils/content.js +9 -0
  23. package/dist/utils/content.js.map +1 -1
  24. package/dist/utils/network-sync.d.ts +12 -2
  25. package/dist/utils/network-sync.d.ts.map +1 -1
  26. package/dist/utils/network-sync.js +127 -77
  27. package/dist/utils/network-sync.js.map +1 -1
  28. package/dist/utils/repo-factory.d.ts.map +1 -1
  29. package/dist/utils/repo-factory.js +0 -1
  30. package/dist/utils/repo-factory.js.map +1 -1
  31. package/package.json +1 -1
  32. package/src/cli.ts +36 -0
  33. package/src/commands.ts +106 -6
  34. package/src/core/change-detection.ts +81 -2
  35. package/src/core/sync-engine.ts +319 -48
  36. package/src/types/config.ts +5 -0
  37. package/src/types/snapshot.ts +1 -0
  38. package/src/utils/content.ts +10 -0
  39. package/src/utils/network-sync.ts +162 -94
  40. package/src/utils/repo-factory.ts +0 -1
@@ -17,7 +17,7 @@ import {
17
17
  getPlainUrl,
18
18
  readDocContent,
19
19
  } from "../utils"
20
- import {isContentEqual} from "../utils/content"
20
+ import {isContentEqual, contentHash} from "../utils/content"
21
21
  import {out} from "../utils/output"
22
22
 
23
23
  /**
@@ -27,9 +27,21 @@ export class ChangeDetector {
27
27
  constructor(
28
28
  private repo: Repo,
29
29
  private rootPath: string,
30
- private excludePatterns: string[] = []
30
+ private excludePatterns: string[] = [],
31
+ private artifactDirectories: string[] = []
31
32
  ) {}
32
33
 
34
+ /**
35
+ * Check if a file path is inside an artifact directory.
36
+ * Artifact files use RawString and are always replaced wholesale,
37
+ * so we can skip expensive remote content reads for them.
38
+ */
39
+ private isArtifactPath(filePath: string): boolean {
40
+ return this.artifactDirectories.some(
41
+ dir => filePath === dir || filePath.startsWith(dir + "/")
42
+ )
43
+ }
44
+
33
45
  /**
34
46
  * Detect all changes between local filesystem and snapshot
35
47
  */
@@ -78,6 +90,35 @@ export class ChangeDetector {
78
90
  localContent: fileInfo.content,
79
91
  remoteContent: null,
80
92
  })
93
+ } else if (this.isArtifactPath(relativePath)) {
94
+ // Artifact files are always replaced wholesale (RawString).
95
+ // Skip remote doc content reads — compare local hash against
96
+ // stored hash to detect local changes, and check heads for remote.
97
+ const localHash = contentHash(fileInfo.content)
98
+ const localChanged = snapshotEntry.contentHash
99
+ ? localHash !== snapshotEntry.contentHash
100
+ : true // No stored hash = first sync with hash support, assume changed
101
+
102
+ const remoteHead = await this.getCurrentRemoteHead(
103
+ snapshotEntry.url
104
+ )
105
+ const remoteChanged = !A.equals(remoteHead, snapshotEntry.head)
106
+
107
+ if (localChanged || remoteChanged) {
108
+ changes.push({
109
+ path: relativePath,
110
+ changeType: localChanged && remoteChanged
111
+ ? ChangeType.BOTH_CHANGED
112
+ : localChanged
113
+ ? ChangeType.LOCAL_ONLY
114
+ : ChangeType.REMOTE_ONLY,
115
+ fileType: fileInfo.type,
116
+ localContent: fileInfo.content,
117
+ remoteContent: null,
118
+ localHead: snapshotEntry.head,
119
+ remoteHead,
120
+ })
121
+ }
81
122
  } else {
82
123
  // Check if content changed
83
124
  const lastKnownContent = await this.getContentAtHead(
@@ -129,6 +170,27 @@ export class ChangeDetector {
129
170
  Array.from(snapshot.files.entries())
130
171
  .filter(([relativePath]) => !currentFiles.has(relativePath))
131
172
  .map(async ([relativePath, snapshotEntry]) => {
173
+ if (this.isArtifactPath(relativePath)) {
174
+ // Artifact deletion: skip remote content read
175
+ const remoteHead = await this.getCurrentRemoteHead(
176
+ snapshotEntry.url
177
+ )
178
+ const remoteChanged = !A.equals(remoteHead, snapshotEntry.head)
179
+
180
+ changes.push({
181
+ path: relativePath,
182
+ changeType: remoteChanged
183
+ ? ChangeType.BOTH_CHANGED
184
+ : ChangeType.LOCAL_ONLY,
185
+ fileType: FileType.TEXT,
186
+ localContent: null,
187
+ remoteContent: null,
188
+ localHead: snapshotEntry.head,
189
+ remoteHead,
190
+ })
191
+ return
192
+ }
193
+
132
194
  // File was deleted locally
133
195
  const currentRemoteContent = await this.getCurrentRemoteContent(
134
196
  snapshotEntry.url
@@ -204,6 +266,23 @@ export class ChangeDetector {
204
266
  )
205
267
 
206
268
  if (!A.equals(currentRemoteHead, snapshotEntry.head)) {
269
+ if (this.isArtifactPath(relativePath)) {
270
+ // Artifact: skip content reads, just report head change
271
+ const localContent = await this.getLocalContent(relativePath)
272
+ changes.push({
273
+ path: relativePath,
274
+ changeType: localContent !== null
275
+ ? ChangeType.BOTH_CHANGED
276
+ : ChangeType.REMOTE_ONLY,
277
+ fileType: FileType.TEXT,
278
+ localContent,
279
+ remoteContent: null,
280
+ localHead: snapshotEntry.head,
281
+ remoteHead: currentRemoteHead,
282
+ })
283
+ return
284
+ }
285
+
207
286
  // Remote document has changed
208
287
  const currentRemoteContent = await this.getCurrentRemoteContent(
209
288
  snapshotEntry.url
@@ -30,7 +30,7 @@ import {
30
30
  updateTextContent,
31
31
  readDocContent,
32
32
  } from "../utils"
33
- import {isContentEqual} from "../utils/content"
33
+ import {isContentEqual, contentHash} from "../utils/content"
34
34
  import {waitForSync, waitForBidirectionalSync} from "../utils/network-sync"
35
35
  import {SnapshotManager} from "./snapshot"
36
36
  import {ChangeDetector} from "./change-detection"
@@ -86,7 +86,8 @@ export class SyncEngine {
86
86
  this.changeDetector = new ChangeDetector(
87
87
  repo,
88
88
  rootPath,
89
- config.exclude_patterns
89
+ config.exclude_patterns,
90
+ config.artifact_directories || []
90
91
  )
91
92
  this.moveDetector = new MoveDetector(config.sync.move_detection_threshold)
92
93
  }
@@ -248,6 +249,237 @@ export class SyncEngine {
248
249
  }
249
250
  }
250
251
 
252
+ /**
253
+ * Recreate documents that failed to sync. Creates new Automerge documents
254
+ * with the same content and updates all references (snapshot, parent directory).
255
+ * Returns new handles that should be retried for sync.
256
+ */
257
+ private async recreateFailedDocuments(
258
+ failedHandles: DocHandle<unknown>[],
259
+ snapshot: SyncSnapshot
260
+ ): Promise<DocHandle<unknown>[]> {
261
+ const failedUrls = new Set(failedHandles.map(h => getPlainUrl(h.url)))
262
+ const newHandles: DocHandle<unknown>[] = []
263
+
264
+ // Find which paths correspond to the failed handles
265
+ for (const [filePath, entry] of snapshot.files.entries()) {
266
+ const plainUrl = getPlainUrl(entry.url)
267
+ if (!failedUrls.has(plainUrl)) continue
268
+
269
+ debug(`recreate: recreating document for ${filePath} (${plainUrl.slice(0, 20)}...)`)
270
+ out.taskLine(`Recreating document for ${filePath}`)
271
+
272
+ try {
273
+ // Read the current content from the old handle
274
+ const oldHandle = await this.repo.find<FileDocument>(plainUrl)
275
+ const doc = await oldHandle.doc()
276
+ if (!doc) {
277
+ debug(`recreate: could not read doc for ${filePath}, skipping`)
278
+ continue
279
+ }
280
+
281
+ const content = readDocContent(doc.content)
282
+ if (content === null) {
283
+ debug(`recreate: null content for ${filePath}, skipping`)
284
+ continue
285
+ }
286
+
287
+ // Create a fresh document
288
+ const fakeChange: DetectedChange = {
289
+ path: filePath,
290
+ changeType: ChangeType.LOCAL_ONLY,
291
+ fileType: this.isTextContent(content) ? FileType.TEXT : FileType.BINARY,
292
+ localContent: content,
293
+ remoteContent: null,
294
+ }
295
+ const newHandle = await this.createRemoteFile(fakeChange)
296
+ if (!newHandle) continue
297
+
298
+ const entryUrl = this.getEntryUrl(newHandle, filePath)
299
+
300
+ // Update snapshot entry
301
+ this.snapshotManager.updateFileEntry(snapshot, filePath, {
302
+ ...entry,
303
+ url: entryUrl,
304
+ head: newHandle.heads(),
305
+ ...(this.isArtifactPath(filePath) ? {contentHash: contentHash(content)} : {}),
306
+ })
307
+
308
+ // Update parent directory entry to point to new document
309
+ const pathParts = filePath.split("/")
310
+ const fileName = pathParts.pop() || ""
311
+ const dirPath = pathParts.join("/")
312
+
313
+ let dirUrl: AutomergeUrl
314
+ if (!dirPath || dirPath === "") {
315
+ dirUrl = snapshot.rootDirectoryUrl!
316
+ } else {
317
+ const dirEntry = snapshot.directories.get(dirPath)
318
+ if (!dirEntry) continue
319
+ dirUrl = dirEntry.url
320
+ }
321
+
322
+ const dirHandle = await this.repo.find<DirectoryDocument>(getPlainUrl(dirUrl))
323
+ dirHandle.change((d: DirectoryDocument) => {
324
+ const idx = d.docs.findIndex(
325
+ e => e.name === fileName && e.type === "file"
326
+ )
327
+ if (idx !== -1) {
328
+ d.docs[idx].url = entryUrl
329
+ }
330
+ })
331
+
332
+ // Track new handles
333
+ this.handlesByPath.set(filePath, newHandle)
334
+ this.handlesByPath.set(dirPath, dirHandle)
335
+ newHandles.push(newHandle)
336
+ newHandles.push(dirHandle)
337
+
338
+ debug(`recreate: created new doc for ${filePath} -> ${newHandle.url.slice(0, 20)}...`)
339
+ } catch (error) {
340
+ debug(`recreate: failed for ${filePath}: ${error}`)
341
+ out.taskLine(`Failed to recreate ${filePath}: ${error}`, true)
342
+ }
343
+ }
344
+
345
+ // Also check directory documents
346
+ for (const [dirPath, entry] of snapshot.directories.entries()) {
347
+ const plainUrl = getPlainUrl(entry.url)
348
+ if (!failedUrls.has(plainUrl)) continue
349
+
350
+ // Directory docs can't be easily recreated (they reference children).
351
+ // Just log a warning — the child recreation above should handle most cases.
352
+ debug(`recreate: directory ${dirPath || "(root)"} failed to sync, cannot recreate`)
353
+ out.taskLine(`Warning: directory ${dirPath || "(root)"} failed to sync`, true)
354
+ }
355
+
356
+ return newHandles
357
+ }
358
+
359
+ /**
360
+ * Push local changes to server without pulling remote changes.
361
+ * Detect changes, push to Automerge docs, upload to server. No bidirectional wait, no pull.
362
+ */
363
+ async pushToRemote(): Promise<SyncResult> {
364
+ const result: SyncResult = {
365
+ success: false,
366
+ filesChanged: 0,
367
+ directoriesChanged: 0,
368
+ errors: [],
369
+ warnings: [],
370
+ }
371
+
372
+ // Reset tracked handles for sync
373
+ this.handlesByPath = new Map()
374
+
375
+ try {
376
+ const snapshot =
377
+ (await this.snapshotManager.load()) ||
378
+ this.snapshotManager.createEmpty()
379
+
380
+ debug(`pushToRemote: rootDirectoryUrl=${snapshot.rootDirectoryUrl?.slice(0, 30)}..., files=${snapshot.files.size}, dirs=${snapshot.directories.size}`)
381
+
382
+ // Detect all changes
383
+ debug("pushToRemote: detecting changes")
384
+ out.update("Detecting local changes")
385
+ const changes = await this.changeDetector.detectChanges(snapshot)
386
+
387
+ // Detect moves
388
+ const {moves, remainingChanges} = await this.moveDetector.detectMoves(
389
+ changes,
390
+ snapshot
391
+ )
392
+
393
+ debug(`pushToRemote: detected ${changes.length} changes, ${moves.length} moves, ${remainingChanges.length} remaining`)
394
+
395
+ // Push local changes to remote
396
+ debug("pushToRemote: pushing local changes")
397
+ const pushResult = await this.pushLocalChanges(
398
+ remainingChanges,
399
+ moves,
400
+ snapshot
401
+ )
402
+
403
+ result.filesChanged += pushResult.filesChanged
404
+ result.directoriesChanged += pushResult.directoriesChanged
405
+ result.errors.push(...pushResult.errors)
406
+ result.warnings.push(...pushResult.warnings)
407
+
408
+ debug(`pushToRemote: push complete - ${pushResult.filesChanged} files, ${pushResult.directoriesChanged} dirs changed`)
409
+
410
+ // Touch root directory if any changes were made
411
+ const hasChanges =
412
+ result.filesChanged > 0 || result.directoriesChanged > 0
413
+ if (hasChanges) {
414
+ await this.touchRootDirectory(snapshot)
415
+ }
416
+
417
+ // Wait for network sync (upload to server)
418
+ if (this.config.sync_enabled) {
419
+ try {
420
+ // Ensure root directory handle is tracked for sync
421
+ if (snapshot.rootDirectoryUrl) {
422
+ const rootHandle =
423
+ await this.repo.find<DirectoryDocument>(
424
+ snapshot.rootDirectoryUrl
425
+ )
426
+ this.handlesByPath.set("", rootHandle)
427
+ }
428
+
429
+ if (this.handlesByPath.size > 0) {
430
+ const allHandles = Array.from(
431
+ this.handlesByPath.values()
432
+ )
433
+ debug(`pushToRemote: waiting for ${allHandles.length} handles to sync to server`)
434
+ out.update(`Uploading ${allHandles.length} documents to sync server`)
435
+ const {failed} = await waitForSync(
436
+ allHandles,
437
+ this.config.sync_server_storage_id
438
+ )
439
+
440
+ // Recreate failed documents and retry once
441
+ if (failed.length > 0) {
442
+ debug(`pushToRemote: ${failed.length} documents failed, recreating`)
443
+ out.update(`Recreating ${failed.length} failed documents`)
444
+ const retryHandles = await this.recreateFailedDocuments(failed, snapshot)
445
+ if (retryHandles.length > 0) {
446
+ debug(`pushToRemote: retrying ${retryHandles.length} recreated handles`)
447
+ out.update(`Retrying ${retryHandles.length} recreated documents`)
448
+ const retry = await waitForSync(
449
+ retryHandles,
450
+ this.config.sync_server_storage_id
451
+ )
452
+ if (retry.failed.length > 0) {
453
+ result.warnings.push(`${retry.failed.length} documents still failed after recreation`)
454
+ }
455
+ }
456
+ }
457
+
458
+ debug("pushToRemote: sync to server complete")
459
+ }
460
+ } catch (error) {
461
+ debug(`pushToRemote: network sync error: ${error}`)
462
+ out.taskLine(`Network sync failed: ${error}`, true)
463
+ result.warnings.push(`Network sync failed: ${error}`)
464
+ }
465
+ }
466
+
467
+ // Save updated snapshot
468
+ await this.snapshotManager.save(snapshot)
469
+
470
+ result.success = result.errors.length === 0
471
+ return result
472
+ } catch (error) {
473
+ result.errors.push({
474
+ path: "push",
475
+ operation: "push-to-remote",
476
+ error: error as Error,
477
+ recoverable: false,
478
+ })
479
+ return result
480
+ }
481
+ }
482
+
251
483
  /**
252
484
  * Run full bidirectional sync
253
485
  */
@@ -340,15 +572,36 @@ export class SyncEngine {
340
572
  const handlePaths = Array.from(this.handlesByPath.keys())
341
573
  debug(`sync: waiting for ${allHandles.length} handles to sync to server: ${handlePaths.slice(0, 10).map(p => p || "(root)").join(", ")}${handlePaths.length > 10 ? ` ...and ${handlePaths.length - 10} more` : ""}`)
342
574
  out.update(`Uploading ${allHandles.length} documents to sync server`)
343
- await waitForSync(
575
+ const {failed} = await waitForSync(
344
576
  allHandles,
345
577
  this.config.sync_server_storage_id
346
578
  )
579
+
580
+ // Recreate failed documents and retry once
581
+ if (failed.length > 0) {
582
+ debug(`sync: ${failed.length} documents failed, recreating`)
583
+ out.update(`Recreating ${failed.length} failed documents`)
584
+ const retryHandles = await this.recreateFailedDocuments(failed, snapshot)
585
+ if (retryHandles.length > 0) {
586
+ debug(`sync: retrying ${retryHandles.length} recreated handles`)
587
+ out.update(`Retrying ${retryHandles.length} recreated documents`)
588
+ const retry = await waitForSync(
589
+ retryHandles,
590
+ this.config.sync_server_storage_id
591
+ )
592
+ if (retry.failed.length > 0) {
593
+ result.warnings.push(`${retry.failed.length} documents still failed after recreation`)
594
+ }
595
+ }
596
+ }
597
+
347
598
  debug("sync: all handles synced to server")
348
599
  }
349
600
 
350
601
  // Wait for bidirectional sync to stabilize
351
- debug("sync: waiting for bidirectional sync to stabilize")
602
+ // Use tracked handles for post-push check (cheaper than full tree scan)
603
+ const changedHandles = Array.from(this.handlesByPath.values())
604
+ debug(`sync: waiting for bidirectional sync to stabilize (${changedHandles.length} tracked handles)`)
352
605
  out.update("Waiting for bidirectional sync to stabilize")
353
606
  await waitForBidirectionalSync(
354
607
  this.repo,
@@ -358,6 +611,7 @@ export class SyncEngine {
358
611
  timeoutMs: BIDIRECTIONAL_SYNC_TIMEOUT_MS,
359
612
  pollIntervalMs: 100,
360
613
  stableChecksRequired: 3,
614
+ handles: changedHandles.length > 0 ? changedHandles : undefined,
361
615
  }
362
616
  )
363
617
  } catch (error) {
@@ -606,6 +860,9 @@ export class SyncEngine {
606
860
  head: handle.heads(),
607
861
  extension: getFileExtension(change.path),
608
862
  mimeType: getEnhancedMimeType(change.path),
863
+ ...(this.isArtifactPath(change.path) && change.localContent
864
+ ? {contentHash: contentHash(change.localContent)}
865
+ : {}),
609
866
  }
610
867
  )
611
868
  result.filesChanged++
@@ -828,42 +1085,66 @@ export class SyncEngine {
828
1085
 
829
1086
  // 3) Update the FileDocument name and content to match new location/state
830
1087
  try {
831
- // Use plain URL for mutable handle
832
- const handle = await this.repo.find<FileDocument>(
833
- getPlainUrl(fromEntry.url)
834
- )
835
- const heads = fromEntry.head
836
-
837
- // Update both name and content (if content changed during move)
838
- changeWithOptionalHeads(handle, heads, (doc: FileDocument) => {
839
- doc.name = toFileName
840
-
841
- // If new content is provided, update it (handles move + modification case)
842
- if (move.newContent !== undefined) {
843
- if (typeof move.newContent === "string") {
844
- updateTextContent(doc, ["content"], move.newContent)
845
- } else {
846
- doc.content = move.newContent
847
- }
1088
+ let entryUrl: AutomergeUrl
1089
+ let finalHeads: UrlHeads
1090
+
1091
+ if (this.isArtifactPath(move.toPath)) {
1092
+ // Artifact files use RawString — no diffing needed, just create a fresh doc
1093
+ const content = move.newContent !== undefined
1094
+ ? move.newContent
1095
+ : readDocContent((await (await this.repo.find<FileDocument>(getPlainUrl(fromEntry.url))).doc())?.content)
1096
+ const fakeChange: DetectedChange = {
1097
+ path: move.toPath,
1098
+ changeType: ChangeType.LOCAL_ONLY,
1099
+ fileType: content != null && typeof content === "string" ? FileType.TEXT : FileType.BINARY,
1100
+ localContent: content,
1101
+ remoteContent: null,
848
1102
  }
849
- })
1103
+ const newHandle = await this.createRemoteFile(fakeChange)
1104
+ if (!newHandle) return
1105
+ entryUrl = this.getEntryUrl(newHandle, move.toPath)
1106
+ finalHeads = newHandle.heads()
1107
+ } else {
1108
+ // Use plain URL for mutable handle
1109
+ const handle = await this.repo.find<FileDocument>(
1110
+ getPlainUrl(fromEntry.url)
1111
+ )
1112
+ const heads = fromEntry.head
1113
+
1114
+ // Update both name and content (if content changed during move)
1115
+ changeWithOptionalHeads(handle, heads, (doc: FileDocument) => {
1116
+ doc.name = toFileName
1117
+
1118
+ // If new content is provided, update it (handles move + modification case)
1119
+ if (move.newContent !== undefined) {
1120
+ if (typeof move.newContent === "string") {
1121
+ updateTextContent(doc, ["content"], move.newContent)
1122
+ } else {
1123
+ doc.content = move.newContent
1124
+ }
1125
+ }
1126
+ })
850
1127
 
851
- // Get appropriate URL for directory entry
852
- const entryUrl = this.getEntryUrl(handle, move.toPath)
1128
+ entryUrl = this.getEntryUrl(handle, move.toPath)
1129
+ finalHeads = handle.heads()
1130
+
1131
+ // Track file handle for network sync
1132
+ this.handlesByPath.set(move.toPath, handle)
1133
+ }
853
1134
 
854
1135
  // 4) Add file entry to destination directory
855
1136
  await this.addFileToDirectory(snapshot, move.toPath, entryUrl)
856
1137
 
857
- // Track file handle for network sync
858
- this.handlesByPath.set(move.toPath, handle)
859
-
860
1138
  // 5) Update snapshot entries
861
1139
  this.snapshotManager.removeFileEntry(snapshot, move.fromPath)
862
1140
  this.snapshotManager.updateFileEntry(snapshot, move.toPath, {
863
1141
  ...fromEntry,
864
1142
  path: joinAndNormalizePath(this.rootPath, move.toPath),
865
1143
  url: entryUrl,
866
- head: handle.heads(),
1144
+ head: finalHeads,
1145
+ ...(this.isArtifactPath(move.toPath) && move.newContent != null
1146
+ ? {contentHash: contentHash(move.newContent)}
1147
+ : {}),
867
1148
  })
868
1149
  } catch (e) {
869
1150
  // Failed to update file name - file may have been deleted
@@ -968,6 +1249,9 @@ export class SyncEngine {
968
1249
  head: newHandle.heads(),
969
1250
  extension: getFileExtension(filePath),
970
1251
  mimeType: getEnhancedMimeType(filePath),
1252
+ ...(this.isArtifactPath(filePath)
1253
+ ? {contentHash: contentHash(content)}
1254
+ : {}),
971
1255
  })
972
1256
  }
973
1257
  return
@@ -1022,27 +1306,14 @@ export class SyncEngine {
1022
1306
  * Delete remote file document
1023
1307
  */
1024
1308
  private async deleteRemoteFile(
1025
- url: AutomergeUrl,
1026
- snapshot?: SyncSnapshot,
1027
- filePath?: string
1309
+ _url: AutomergeUrl,
1310
+ _snapshot?: SyncSnapshot,
1311
+ _filePath?: string
1028
1312
  ): Promise<void> {
1029
- // In Automerge, we don't actually delete documents
1030
- // They become orphaned and will be garbage collected
1031
- // For now, we just mark them as deleted by clearing content
1032
- // Use plain URL for mutable handle
1033
- const handle = await this.repo.find<FileDocument>(getPlainUrl(url))
1034
- // const doc = await handle.doc(); // no longer needed
1035
- let heads
1036
- if (snapshot && filePath) {
1037
- heads = snapshot.files.get(filePath)?.head
1038
- }
1039
- changeWithOptionalHeads(handle, heads, (doc: FileDocument) => {
1040
- if (doc.content instanceof Uint8Array) {
1041
- doc.content = new Uint8Array(0)
1042
- } else {
1043
- updateTextContent(doc, ["content"], "")
1044
- }
1045
- })
1313
+ // In Automerge, we don't actually delete documents.
1314
+ // The file entry is removed from its parent directory, making the
1315
+ // document orphaned. Clearing content via splice is expensive for
1316
+ // large text files (every character is a CRDT op), so we skip it.
1046
1317
  }
1047
1318
 
1048
1319
  /**
@@ -101,6 +101,11 @@ export interface StatusOptions extends CommandOptions {
101
101
  verbose?: boolean;
102
102
  }
103
103
 
104
+ /**
105
+ * Push command specific options
106
+ */
107
+ export interface PushOptions extends CommandOptions {}
108
+
104
109
  /**
105
110
  * Watch command specific options
106
111
  */
@@ -9,6 +9,7 @@ export interface SnapshotFileEntry {
9
9
  head: UrlHeads; // Document head at last sync
10
10
  extension: string; // File extension
11
11
  mimeType: string; // MIME type
12
+ contentHash?: string; // SHA-256 of content at last sync (used by artifact files to skip remote reads)
12
13
  }
13
14
 
14
15
  /**
@@ -1,3 +1,13 @@
1
+ import { createHash } from "crypto";
2
+
3
+ /**
4
+ * Compute a SHA-256 hash of file content.
5
+ * Used to detect local changes for artifact files without reading remote docs.
6
+ */
7
+ export function contentHash(content: string | Uint8Array): string {
8
+ return createHash("sha256").update(content).digest("hex");
9
+ }
10
+
1
11
  /**
2
12
  * Compare two content pieces for equality
3
13
  */