pushwork 1.0.16 → 1.0.18

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -37,6 +37,11 @@ import {ChangeDetector} from "./change-detection"
37
37
  import {MoveDetector} from "./move-detection"
38
38
  import {out} from "../utils/output"
39
39
 
40
+ const isDebug = !!process.env.DEBUG
41
+ function debug(...args: any[]) {
42
+ if (isDebug) console.error("[pushwork:engine]", ...args)
43
+ }
44
+
40
45
  /**
41
46
  * Apply a change to a document handle, using changeAt when heads are available
42
47
  * to branch from a known version, otherwise falling back to change.
@@ -105,6 +110,30 @@ export class SyncEngine {
105
110
  return stringifyAutomergeUrl({documentId, heads})
106
111
  }
107
112
 
113
+ /**
114
+ * Determine if a file path is inside an artifact directory.
115
+ * Artifact files are stored as immutable strings (RawString) and
116
+ * referenced with versioned URLs in directory entries.
117
+ */
118
+ private isArtifactPath(filePath: string): boolean {
119
+ const artifactDirs = this.config.artifact_directories || []
120
+ return artifactDirs.some(
121
+ dir => filePath === dir || filePath.startsWith(dir + "/")
122
+ )
123
+ }
124
+
125
+ /**
126
+ * Get the appropriate URL for a directory entry.
127
+ * Artifact paths get versioned URLs (with heads) for exact version fetching.
128
+ * Non-artifact paths get plain URLs for collaborative editing.
129
+ */
130
+ private getEntryUrl(handle: DocHandle<unknown>, filePath: string): AutomergeUrl {
131
+ if (this.isArtifactPath(filePath)) {
132
+ return this.getVersionedUrl(handle)
133
+ }
134
+ return getPlainUrl(handle.url)
135
+ }
136
+
108
137
  /**
109
138
  * Set the root directory URL in the snapshot
110
139
  */
@@ -240,8 +269,11 @@ export class SyncEngine {
240
269
  (await this.snapshotManager.load()) ||
241
270
  this.snapshotManager.createEmpty()
242
271
 
272
+ debug(`sync: rootDirectoryUrl=${snapshot.rootDirectoryUrl?.slice(0, 30)}..., files=${snapshot.files.size}, dirs=${snapshot.directories.size}`)
273
+
243
274
  // Wait for initial sync to receive any pending remote changes
244
275
  if (this.config.sync_enabled && snapshot.rootDirectoryUrl) {
276
+ debug("sync: waiting for initial bidirectional sync")
245
277
  try {
246
278
  await waitForBidirectionalSync(
247
279
  this.repo,
@@ -259,6 +291,7 @@ export class SyncEngine {
259
291
  }
260
292
 
261
293
  // Detect all changes
294
+ debug("sync: detecting changes")
262
295
  const changes = await this.changeDetector.detectChanges(snapshot)
263
296
 
264
297
  // Detect moves
@@ -267,7 +300,10 @@ export class SyncEngine {
267
300
  snapshot
268
301
  )
269
302
 
303
+ debug(`sync: detected ${changes.length} changes, ${moves.length} moves, ${remainingChanges.length} remaining`)
304
+
270
305
  // Phase 1: Push local changes to remote
306
+ debug("sync: phase 1 - pushing local changes")
271
307
  const phase1Result = await this.pushLocalChanges(
272
308
  remainingChanges,
273
309
  moves,
@@ -279,6 +315,8 @@ export class SyncEngine {
279
315
  result.errors.push(...phase1Result.errors)
280
316
  result.warnings.push(...phase1Result.warnings)
281
317
 
318
+ debug(`sync: phase 1 complete - ${phase1Result.filesChanged} files, ${phase1Result.directoriesChanged} dirs changed`)
319
+
282
320
  // Wait for network sync (important for clone scenarios)
283
321
  if (this.config.sync_enabled) {
284
322
  try {
@@ -296,6 +334,7 @@ export class SyncEngine {
296
334
  const allHandles = Array.from(
297
335
  this.handlesByPath.values()
298
336
  )
337
+ debug(`sync: waiting for ${allHandles.length} handles to sync to server`)
299
338
  await waitForSync(
300
339
  allHandles,
301
340
  this.config.sync_server_storage_id
@@ -303,6 +342,7 @@ export class SyncEngine {
303
342
  }
304
343
 
305
344
  // Wait for bidirectional sync to stabilize
345
+ debug("sync: waiting for bidirectional sync to stabilize")
306
346
  await waitForBidirectionalSync(
307
347
  this.repo,
308
348
  snapshot.rootDirectoryUrl,
@@ -314,12 +354,14 @@ export class SyncEngine {
314
354
  }
315
355
  )
316
356
  } catch (error) {
357
+ debug(`sync: network sync error: ${error}`)
317
358
  out.taskLine(`Network sync failed: ${error}`, true)
318
359
  result.warnings.push(`Network sync failed: ${error}`)
319
360
  }
320
361
  }
321
362
 
322
363
  // Re-detect changes after network sync for fresh state
364
+ debug("sync: re-detecting changes after network sync")
323
365
  const freshChanges = await this.changeDetector.detectChanges(snapshot)
324
366
  const freshRemoteChanges = freshChanges.filter(
325
367
  c =>
@@ -327,6 +369,7 @@ export class SyncEngine {
327
369
  c.changeType === ChangeType.BOTH_CHANGED
328
370
  )
329
371
 
372
+ debug(`sync: phase 2 - pulling ${freshRemoteChanges.length} remote changes`)
330
373
  // Phase 2: Pull remote changes to local using fresh detection
331
374
  const phase2Result = await this.pullRemoteChanges(
332
375
  freshRemoteChanges,
@@ -508,8 +551,8 @@ export class SyncEngine {
508
551
  // New file
509
552
  const handle = await this.createRemoteFile(change)
510
553
  if (handle) {
511
- const versionedUrl = this.getVersionedUrl(handle)
512
- newEntries.push({name: fileName, url: versionedUrl})
554
+ const entryUrl = this.getEntryUrl(handle, change.path)
555
+ newEntries.push({name: fileName, url: entryUrl})
513
556
  this.snapshotManager.updateFileEntry(
514
557
  snapshot,
515
558
  change.path,
@@ -518,7 +561,7 @@ export class SyncEngine {
518
561
  this.rootPath,
519
562
  change.path
520
563
  ),
521
- url: versionedUrl,
564
+ url: entryUrl,
522
565
  head: handle.heads(),
523
566
  extension: getFileExtension(change.path),
524
567
  mimeType: getEnhancedMimeType(change.path),
@@ -534,7 +577,7 @@ export class SyncEngine {
534
577
  snapshot,
535
578
  change.path
536
579
  )
537
- // Get current versioned URL (updateRemoteFile updates snapshot)
580
+ // Get current entry URL (updateRemoteFile updates snapshot)
538
581
  const updatedFileEntry = snapshot.files.get(change.path)
539
582
  if (updatedFileEntry) {
540
583
  const fileHandle =
@@ -543,7 +586,7 @@ export class SyncEngine {
543
586
  )
544
587
  updatedEntries.push({
545
588
  name: fileName,
546
- url: this.getVersionedUrl(fileHandle),
589
+ url: this.getEntryUrl(fileHandle, change.path),
547
590
  })
548
591
  }
549
592
  result.filesChanged++
@@ -574,7 +617,7 @@ export class SyncEngine {
574
617
  )
575
618
  subdirUpdates.push({
576
619
  name: childName,
577
- url: this.getVersionedUrl(childHandle),
620
+ url: this.getEntryUrl(childHandle, modifiedDir),
578
621
  })
579
622
  }
580
623
  }
@@ -688,12 +731,11 @@ export class SyncEngine {
688
731
  )
689
732
 
690
733
  if (fileEntry) {
691
- // Get versioned URL from handle (includes heads)
692
734
  const fileHandle = await this.repo.find<FileDocument>(fileEntry.url)
693
- const versionedUrl = this.getVersionedUrl(fileHandle)
735
+ const entryUrl = this.getEntryUrl(fileHandle, change.path)
694
736
  this.snapshotManager.updateFileEntry(snapshot, change.path, {
695
737
  path: localPath,
696
- url: versionedUrl,
738
+ url: entryUrl,
697
739
  head: change.remoteHead,
698
740
  extension: getFileExtension(change.path),
699
741
  mimeType: getEnhancedMimeType(change.path),
@@ -755,11 +797,11 @@ export class SyncEngine {
755
797
  }
756
798
  })
757
799
 
758
- // Get versioned URL after changes (includes current heads)
759
- const versionedUrl = this.getVersionedUrl(handle)
800
+ // Get appropriate URL for directory entry
801
+ const entryUrl = this.getEntryUrl(handle, move.toPath)
760
802
 
761
- // 4) Add file entry to destination directory with versioned URL
762
- await this.addFileToDirectory(snapshot, move.toPath, versionedUrl)
803
+ // 4) Add file entry to destination directory
804
+ await this.addFileToDirectory(snapshot, move.toPath, entryUrl)
763
805
 
764
806
  // Track file handle for network sync
765
807
  this.handlesByPath.set(move.toPath, handle)
@@ -769,7 +811,7 @@ export class SyncEngine {
769
811
  this.snapshotManager.updateFileEntry(snapshot, move.toPath, {
770
812
  ...fromEntry,
771
813
  path: joinAndNormalizePath(this.rootPath, move.toPath),
772
- url: versionedUrl,
814
+ url: entryUrl,
773
815
  head: handle.heads(),
774
816
  })
775
817
  } catch (e) {
@@ -790,15 +832,21 @@ export class SyncEngine {
790
832
  if (change.localContent === null) return null
791
833
 
792
834
  const isText = this.isTextContent(change.localContent)
835
+ const isArtifact = this.isArtifactPath(change.path)
793
836
 
794
- // Create initial document structure with empty string for text content.
795
- // We then splice in the actual content so it's stored as collaborative text.
837
+ // For artifact files, store text as RawString (immutable snapshot).
838
+ // For regular files, store as collaborative text (empty string + splice).
796
839
  const fileDoc: FileDocument = {
797
840
  "@patchwork": {type: "file"},
798
841
  name: change.path.split("/").pop() || "",
799
842
  extension: getFileExtension(change.path),
800
843
  mimeType: getEnhancedMimeType(change.path),
801
- content: isText ? "" : change.localContent,
844
+ content:
845
+ isText && isArtifact
846
+ ? new A.RawString(change.localContent as string) as unknown as string
847
+ : isText
848
+ ? ""
849
+ : change.localContent,
802
850
  metadata: {
803
851
  permissions: 0o644,
804
852
  },
@@ -806,8 +854,8 @@ export class SyncEngine {
806
854
 
807
855
  const handle = this.repo.create(fileDoc)
808
856
 
809
- // For text files, splice in the content so it's stored as collaborative text
810
- if (isText && typeof change.localContent === "string") {
857
+ // For non-artifact text files, splice in the content so it's stored as collaborative text
858
+ if (isText && !isArtifact && typeof change.localContent === "string") {
811
859
  handle.change((doc: FileDocument) => {
812
860
  updateTextContent(doc, ["content"], change.localContent as string)
813
861
  })
@@ -836,14 +884,21 @@ export class SyncEngine {
836
884
  const doc = await handle.doc()
837
885
  const rawContent = doc?.content
838
886
 
839
- // If the existing content is an immutable string, we can't splice into it.
840
- // Throw away the old document and create a brand new one with mutable text.
841
- // The caller's batch directory update will pick up the new URL from snapshot.
842
- if (rawContent != null && A.isImmutableString(rawContent)) {
843
- out.taskLine(
844
- `Replacing immutable string document for ${filePath}`,
845
- true
846
- )
887
+ // For artifact paths, always replace with a new document containing RawString.
888
+ // For non-artifact paths with immutable strings, replace with mutable text.
889
+ // In both cases we create a new document and update the snapshot URL.
890
+ const isArtifact = this.isArtifactPath(filePath)
891
+ if (
892
+ isArtifact ||
893
+ !doc ||
894
+ (rawContent != null && A.isImmutableString(rawContent))
895
+ ) {
896
+ if (!isArtifact) {
897
+ out.taskLine(
898
+ `Replacing ${!doc ? 'unavailable' : 'immutable string'} document for ${filePath}`,
899
+ true
900
+ )
901
+ }
847
902
  const fakeChange: DetectedChange = {
848
903
  path: filePath,
849
904
  changeType: ChangeType.LOCAL_ONLY,
@@ -855,10 +910,10 @@ export class SyncEngine {
855
910
  }
856
911
  const newHandle = await this.createRemoteFile(fakeChange)
857
912
  if (newHandle) {
858
- const versionedUrl = this.getVersionedUrl(newHandle)
913
+ const entryUrl = this.getEntryUrl(newHandle, filePath)
859
914
  this.snapshotManager.updateFileEntry(snapshot, filePath, {
860
915
  path: joinAndNormalizePath(this.rootPath, filePath),
861
- url: versionedUrl,
916
+ url: entryUrl,
862
917
  head: newHandle.heads(),
863
918
  extension: getFileExtension(filePath),
864
919
  mimeType: getEnhancedMimeType(filePath),
@@ -1040,19 +1095,18 @@ export class SyncEngine {
1040
1095
  // Track discovered directory for sync
1041
1096
  this.handlesByPath.set(directoryPath, childDirHandle)
1042
1097
 
1043
- // Get versioned URL for storage (includes current heads)
1044
- const versionedUrl = this.getVersionedUrl(childDirHandle)
1098
+ // Get appropriate URL for directory entry
1099
+ const entryUrl = this.getEntryUrl(childDirHandle, directoryPath)
1045
1100
 
1046
- // Update snapshot with discovered directory using versioned URL
1101
+ // Update snapshot with discovered directory
1047
1102
  this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
1048
1103
  path: joinAndNormalizePath(this.rootPath, directoryPath),
1049
- url: versionedUrl,
1104
+ url: entryUrl,
1050
1105
  head: childDirHandle.heads(),
1051
1106
  entries: [],
1052
1107
  })
1053
1108
 
1054
- // Return versioned URL (callers use getPlainUrl() when they need to modify)
1055
- return versionedUrl
1109
+ return entryUrl
1056
1110
  } catch (resolveErr) {
1057
1111
  // Failed to resolve directory - fall through to create a fresh directory document
1058
1112
  }
@@ -1070,8 +1124,8 @@ export class SyncEngine {
1070
1124
 
1071
1125
  const dirHandle = this.repo.create(dirDoc)
1072
1126
 
1073
- // Get versioned URL for the new directory (includes heads)
1074
- const versionedDirUrl = this.getVersionedUrl(dirHandle)
1127
+ // Get appropriate URL for directory entry
1128
+ const dirEntryUrl = this.getEntryUrl(dirHandle, directoryPath)
1075
1129
 
1076
1130
  // Add this directory to its parent
1077
1131
  // Use plain URL for mutable handle
@@ -1090,7 +1144,7 @@ export class SyncEngine {
1090
1144
  doc.docs.push({
1091
1145
  name: currentDirName,
1092
1146
  type: "folder",
1093
- url: versionedDirUrl,
1147
+ url: dirEntryUrl,
1094
1148
  })
1095
1149
  didChange = true
1096
1150
  }
@@ -1107,16 +1161,15 @@ export class SyncEngine {
1107
1161
  }
1108
1162
  }
1109
1163
 
1110
- // Update snapshot with new directory (use versioned URL for storage)
1164
+ // Update snapshot with new directory
1111
1165
  this.snapshotManager.updateDirectoryEntry(snapshot, directoryPath, {
1112
1166
  path: joinAndNormalizePath(this.rootPath, directoryPath),
1113
- url: versionedDirUrl,
1167
+ url: dirEntryUrl,
1114
1168
  head: dirHandle.heads(),
1115
1169
  entries: [],
1116
1170
  })
1117
1171
 
1118
- // Return versioned URL (callers use getPlainUrl() when they need to modify)
1119
- return versionedDirUrl
1172
+ return dirEntryUrl
1120
1173
  }
1121
1174
 
1122
1175
  /**
@@ -14,6 +14,7 @@ export interface GlobalConfig {
14
14
  sync_server?: string;
15
15
  sync_server_storage_id?: StorageId;
16
16
  exclude_patterns: string[];
17
+ artifact_directories: string[];
17
18
  sync: {
18
19
  move_detection_threshold: number;
19
20
  };
@@ -9,6 +9,11 @@ import { out } from "./output";
9
9
  import { DirectoryDocument } from "../types";
10
10
  import { getPlainUrl } from "./directory";
11
11
 
12
+ const isDebug = !!process.env.DEBUG;
13
+ function debug(...args: any[]) {
14
+ if (isDebug) console.error("[pushwork:sync]", ...args);
15
+ }
16
+
12
17
  /**
13
18
  * Wait for bidirectional sync to stabilize.
14
19
  * This function waits until document heads stop changing, indicating that
@@ -43,6 +48,8 @@ export async function waitForBidirectionalSync(
43
48
  let lastSeenHeads = new Map<string, string>();
44
49
  let stableCount = 0;
45
50
 
51
+ debug(`waitForBidirectionalSync: starting (timeout=${timeoutMs}ms, stableChecks=${stableChecksRequired})`);
52
+
46
53
  while (Date.now() - startTime < timeoutMs) {
47
54
  // Get current heads for all documents in the directory hierarchy
48
55
  const currentHeads = await getAllDocumentHeads(repo, rootDirectoryUrl);
@@ -52,10 +59,15 @@ export async function waitForBidirectionalSync(
52
59
 
53
60
  if (isStable) {
54
61
  stableCount++;
62
+ debug(`waitForBidirectionalSync: stable check ${stableCount}/${stableChecksRequired} (${currentHeads.size} docs)`);
55
63
  if (stableCount >= stableChecksRequired) {
64
+ debug(`waitForBidirectionalSync: converged in ${Date.now() - startTime}ms`);
56
65
  return; // Converged!
57
66
  }
58
67
  } else {
68
+ if (stableCount > 0) {
69
+ debug(`waitForBidirectionalSync: heads changed, resetting stable count`);
70
+ }
59
71
  stableCount = 0;
60
72
  lastSeenHeads = currentHeads;
61
73
  }
@@ -65,6 +77,7 @@ export async function waitForBidirectionalSync(
65
77
 
66
78
  // Timeout - but don't throw, just log a warning
67
79
  // The sync may still work, we just couldn't confirm stability
80
+ debug(`waitForBidirectionalSync: timed out after ${timeoutMs}ms`);
68
81
  out.taskLine(`Sync stability check timed out after ${timeoutMs}ms`, true);
69
82
  }
70
83
 
@@ -149,20 +162,22 @@ function headsMapEqual(
149
162
  export async function waitForSync(
150
163
  handlesToWaitOn: DocHandle<unknown>[],
151
164
  syncServerStorageId?: StorageId,
152
- timeoutMs: number = 1000000,
165
+ timeoutMs: number = 60000,
153
166
  ): Promise<void> {
154
167
  const startTime = Date.now();
155
168
 
156
169
  if (!syncServerStorageId) {
157
- // No sync server storage ID - skip network sync
170
+ debug("waitForSync: no sync server storage ID, skipping");
158
171
  return;
159
172
  }
160
173
 
161
174
  if (handlesToWaitOn.length === 0) {
162
- // No documents to sync
175
+ debug("waitForSync: no documents to sync");
163
176
  return;
164
177
  }
165
178
 
179
+ debug(`waitForSync: waiting for ${handlesToWaitOn.length} documents (timeout=${timeoutMs}ms)`);
180
+
166
181
  let alreadySynced = 0;
167
182
 
168
183
  const promises = handlesToWaitOn.map((handle) => {
@@ -174,12 +189,14 @@ export async function waitForSync(
174
189
 
175
190
  if (wasAlreadySynced) {
176
191
  alreadySynced++;
192
+ debug(`waitForSync: ${handle.url.slice(0, 20)}... already synced`);
177
193
  return Promise.resolve();
178
194
  }
179
195
 
196
+ debug(`waitForSync: ${handle.url.slice(0, 20)}... waiting for convergence (remoteHeads=${remoteHeads ? 'present' : 'missing'})`);
197
+
180
198
  // Wait for convergence
181
199
  return new Promise<void>((resolve, reject) => {
182
- // TODO: can we delete this polling?
183
200
  let pollInterval: NodeJS.Timeout;
184
201
 
185
202
  const cleanup = () => {
@@ -189,11 +206,13 @@ export async function waitForSync(
189
206
  };
190
207
 
191
208
  const onConverged = () => {
209
+ debug(`waitForSync: ${handle.url.slice(0, 20)}... converged in ${Date.now() - startTime}ms`);
192
210
  cleanup();
193
211
  resolve();
194
212
  };
195
213
 
196
214
  const timeout = setTimeout(() => {
215
+ debug(`waitForSync: ${handle.url.slice(0, 20)}... timed out after ${timeoutMs}ms`);
197
216
  cleanup();
198
217
  reject(
199
218
  new Error(
@@ -243,10 +262,10 @@ export async function waitForSync(
243
262
 
244
263
  try {
245
264
  await Promise.all(promises);
265
+ debug(`waitForSync: all ${handlesToWaitOn.length} documents synced in ${Date.now() - startTime}ms (${alreadySynced} were already synced)`);
246
266
  } catch (error) {
247
267
  const elapsed = Date.now() - startTime;
248
- out.errorBlock("FAILED", `after ${elapsed}ms`);
249
- out.crash(error);
268
+ debug(`waitForSync: failed after ${elapsed}ms: ${error}`);
250
269
  throw error;
251
270
  }
252
271
  }
@@ -106,6 +106,7 @@ describe("Exclude Patterns", () => {
106
106
  sync_server: "wss://test.server.com",
107
107
  sync_enabled: true,
108
108
  exclude_patterns: [".git", "*.tmp", ".pushwork", "*.env"],
109
+ artifact_directories: ["dist"],
109
110
  sync: {
110
111
  move_detection_threshold: 0.8,
111
112
  },
@@ -161,7 +161,7 @@ describe("Pushwork Fuzzer", () => {
161
161
  expect(contentA).toBe("Hello, Pushwork!");
162
162
  expect(contentB).toBe("Hello, Pushwork!");
163
163
  expect(contentA).toBe(contentB);
164
- }, 10000); // 10 second timeout for this test
164
+ }, 30000); // 30 second timeout for this test
165
165
  });
166
166
 
167
167
  describe("Manual Fuzzing Tests", () => {
@@ -27,6 +27,7 @@ describe("Sync Flow Integration", () => {
27
27
  sync_server: "wss://test.server.com",
28
28
  sync_enabled: true,
29
29
  exclude_patterns: [".git", "*.tmp"],
30
+ artifact_directories: ["dist"],
30
31
  sync: {
31
32
  move_detection_threshold: 0.8,
32
33
  },
@@ -49,6 +50,7 @@ describe("Sync Flow Integration", () => {
49
50
  sync_server: "wss://local.server.com",
50
51
  sync_enabled: true,
51
52
  exclude_patterns: [".git", "*.tmp"],
53
+ artifact_directories: ["dist"],
52
54
  sync: {
53
55
  move_detection_threshold: 0.9,
54
56
  },