@trestleinc/replicate 0.1.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/README.md +356 -420
  2. package/dist/client/collection.d.ts +78 -76
  3. package/dist/client/errors.d.ts +59 -0
  4. package/dist/client/index.d.ts +22 -18
  5. package/dist/client/logger.d.ts +0 -1
  6. package/dist/client/merge.d.ts +77 -0
  7. package/dist/client/persistence/adapters/index.d.ts +8 -0
  8. package/dist/client/persistence/adapters/opsqlite.d.ts +46 -0
  9. package/dist/client/persistence/adapters/sqljs.d.ts +83 -0
  10. package/dist/client/persistence/index.d.ts +49 -0
  11. package/dist/client/persistence/indexeddb.d.ts +17 -0
  12. package/dist/client/persistence/memory.d.ts +16 -0
  13. package/dist/client/persistence/sqlite-browser.d.ts +51 -0
  14. package/dist/client/persistence/sqlite-level.d.ts +63 -0
  15. package/dist/client/persistence/sqlite-rn.d.ts +36 -0
  16. package/dist/client/persistence/sqlite.d.ts +47 -0
  17. package/dist/client/persistence/types.d.ts +42 -0
  18. package/dist/client/prose.d.ts +56 -0
  19. package/dist/client/replicate.d.ts +40 -0
  20. package/dist/client/services/checkpoint.d.ts +18 -0
  21. package/dist/client/services/reconciliation.d.ts +24 -0
  22. package/dist/component/_generated/api.d.ts +35 -0
  23. package/dist/component/_generated/api.js +3 -3
  24. package/dist/component/_generated/component.d.ts +89 -0
  25. package/dist/component/_generated/component.js +0 -0
  26. package/dist/component/_generated/dataModel.d.ts +45 -0
  27. package/dist/component/_generated/dataModel.js +0 -0
  28. package/{src → dist}/component/_generated/server.d.ts +9 -38
  29. package/dist/component/convex.config.d.ts +2 -2
  30. package/dist/component/convex.config.js +2 -1
  31. package/dist/component/logger.d.ts +8 -0
  32. package/dist/component/logger.js +30 -0
  33. package/dist/component/public.d.ts +36 -61
  34. package/dist/component/public.js +232 -58
  35. package/dist/component/schema.d.ts +32 -8
  36. package/dist/component/schema.js +19 -6
  37. package/dist/index.js +1553 -308
  38. package/dist/server/builder.d.ts +94 -0
  39. package/dist/server/index.d.ts +14 -17
  40. package/dist/server/schema.d.ts +17 -63
  41. package/dist/server/storage.d.ts +80 -0
  42. package/dist/server.js +268 -83
  43. package/dist/shared/index.d.ts +5 -0
  44. package/dist/shared/index.js +2 -0
  45. package/dist/shared/types.d.ts +50 -0
  46. package/dist/shared/types.js +6 -0
  47. package/dist/shared.js +6 -0
  48. package/package.json +59 -49
  49. package/src/client/collection.ts +877 -450
  50. package/src/client/errors.ts +45 -0
  51. package/src/client/index.ts +52 -26
  52. package/src/client/logger.ts +2 -28
  53. package/src/client/merge.ts +374 -0
  54. package/src/client/persistence/adapters/index.ts +8 -0
  55. package/src/client/persistence/adapters/opsqlite.ts +54 -0
  56. package/src/client/persistence/adapters/sqljs.ts +128 -0
  57. package/src/client/persistence/index.ts +54 -0
  58. package/src/client/persistence/indexeddb.ts +110 -0
  59. package/src/client/persistence/memory.ts +61 -0
  60. package/src/client/persistence/sqlite-browser.ts +107 -0
  61. package/src/client/persistence/sqlite-level.ts +407 -0
  62. package/src/client/persistence/sqlite-rn.ts +44 -0
  63. package/src/client/persistence/sqlite.ts +161 -0
  64. package/src/client/persistence/types.ts +49 -0
  65. package/src/client/prose.ts +369 -0
  66. package/src/client/replicate.ts +80 -0
  67. package/src/client/services/checkpoint.ts +86 -0
  68. package/src/client/services/reconciliation.ts +108 -0
  69. package/src/component/_generated/api.ts +52 -0
  70. package/src/component/_generated/component.ts +103 -0
  71. package/src/component/_generated/{dataModel.d.ts → dataModel.ts} +1 -1
  72. package/src/component/_generated/server.ts +161 -0
  73. package/src/component/convex.config.ts +3 -1
  74. package/src/component/logger.ts +36 -0
  75. package/src/component/public.ts +364 -111
  76. package/src/component/schema.ts +18 -5
  77. package/src/env.d.ts +31 -0
  78. package/src/server/builder.ts +85 -0
  79. package/src/server/index.ts +9 -24
  80. package/src/server/schema.ts +20 -76
  81. package/src/server/storage.ts +313 -0
  82. package/src/shared/index.ts +5 -0
  83. package/src/shared/types.ts +52 -0
  84. package/LICENSE.package +0 -201
  85. package/dist/client/storage.d.ts +0 -143
  86. package/dist/server/replication.d.ts +0 -122
  87. package/dist/server/ssr.d.ts +0 -79
  88. package/dist/ssr.js +0 -19
  89. package/src/client/storage.ts +0 -206
  90. package/src/component/_generated/api.d.ts +0 -95
  91. package/src/component/_generated/api.js +0 -23
  92. package/src/component/_generated/server.js +0 -90
  93. package/src/server/replication.ts +0 -244
  94. package/src/server/ssr.ts +0 -106
@@ -0,0 +1,369 @@
1
+ /**
2
+ * Prose Field Helpers - Document-level state management for rich text sync
3
+ *
4
+ * Manages Y.XmlFragment observation, debounced sync, and pending state.
5
+ * Uses document-level tracking to prevent race conditions.
6
+ */
7
+
8
+ import * as Y from 'yjs';
9
+ import type { Collection } from '@tanstack/db';
10
+ import { getLogger } from '$/client/logger.js';
11
+ import { serializeYMapValue } from '$/client/merge.js';
12
+
13
+ /** Server origin - changes from server should not trigger local sync */
14
+ const SERVER_ORIGIN = 'server';
15
+
16
+ const logger = getLogger(['replicate', 'prose']);
17
+
18
+ // Default debounce time for prose sync
19
+ const DEFAULT_DEBOUNCE_MS = 1000;
20
+
21
+ // ============================================================================
22
+ // Document-Level State (keyed by "collection:documentId")
23
+ // ============================================================================
24
+
25
+ // Track when applying server data to prevent echo loops - DOCUMENT-LEVEL
26
+ const applyingFromServer = new Map<string, boolean>();
27
+
28
+ // Debounce timers for prose sync
29
+ const debounceTimers = new Map<string, ReturnType<typeof setTimeout>>();
30
+
31
+ // Last synced state vectors for computing deltas
32
+ const lastSyncedVectors = new Map<string, Uint8Array>();
33
+
34
+ // Pending sync state
35
+ const pendingState = new Map<string, boolean>();
36
+
37
+ // Pending state change listeners
38
+ const pendingListeners = new Map<string, Set<(pending: boolean) => void>>();
39
+
40
+ // Fragment observer cleanup functions
41
+ const fragmentObservers = new Map<string, () => void>();
42
+
43
+ // Failed sync queue for retry
44
+ const failedSyncQueue = new Map<string, boolean>();
45
+
46
+ // ============================================================================
47
+ // Applying From Server (Document-Level)
48
+ // ============================================================================
49
+
50
+ /**
51
+ * Check if a document is currently applying server data.
52
+ * Used to prevent echo loops in onUpdate handlers.
53
+ */
54
+ export function isApplyingFromServer(collection: string, documentId: string): boolean {
55
+ const key = `${collection}:${documentId}`;
56
+ return applyingFromServer.get(key) ?? false;
57
+ }
58
+
59
+ /**
60
+ * Set whether a document is currently applying server data.
61
+ */
62
+ export function setApplyingFromServer(
63
+ collection: string,
64
+ documentId: string,
65
+ value: boolean
66
+ ): void {
67
+ const key = `${collection}:${documentId}`;
68
+ if (value) {
69
+ applyingFromServer.set(key, true);
70
+ } else {
71
+ applyingFromServer.delete(key);
72
+ }
73
+ }
74
+
75
+ // ============================================================================
76
+ // Pending State Management
77
+ // ============================================================================
78
+
79
+ /**
80
+ * Set pending state and notify listeners.
81
+ */
82
+ function setPendingInternal(key: string, value: boolean): void {
83
+ const current = pendingState.get(key) ?? false;
84
+
85
+ if (current !== value) {
86
+ pendingState.set(key, value);
87
+ const listeners = pendingListeners.get(key);
88
+ if (listeners) {
89
+ for (const cb of listeners) {
90
+ try {
91
+ cb(value);
92
+ } catch (err) {
93
+ logger.error('Pending listener error', { key, error: String(err) });
94
+ }
95
+ }
96
+ }
97
+ }
98
+ }
99
+
100
+ /**
101
+ * Get current pending state for a document.
102
+ */
103
+ export function isPending(collection: string, documentId: string): boolean {
104
+ return pendingState.get(`${collection}:${documentId}`) ?? false;
105
+ }
106
+
107
+ /**
108
+ * Subscribe to pending state changes for a document.
109
+ */
110
+ export function subscribePending(
111
+ collection: string,
112
+ documentId: string,
113
+ callback: (pending: boolean) => void
114
+ ): () => void {
115
+ const key = `${collection}:${documentId}`;
116
+
117
+ let listeners = pendingListeners.get(key);
118
+ if (!listeners) {
119
+ listeners = new Set();
120
+ pendingListeners.set(key, listeners);
121
+ }
122
+
123
+ listeners.add(callback);
124
+ return () => {
125
+ listeners?.delete(callback);
126
+ if (listeners?.size === 0) {
127
+ pendingListeners.delete(key);
128
+ }
129
+ };
130
+ }
131
+
132
+ // ============================================================================
133
+ // Cancel Pending Sync
134
+ // ============================================================================
135
+
136
+ /**
137
+ * Cancel any pending debounced sync for a document.
138
+ * Called when receiving remote updates to avoid conflicts.
139
+ */
140
+ export function cancelPending(collection: string, documentId: string): void {
141
+ const key = `${collection}:${documentId}`;
142
+ const timer = debounceTimers.get(key);
143
+
144
+ if (timer) {
145
+ clearTimeout(timer);
146
+ debounceTimers.delete(key);
147
+ setPendingInternal(key, false);
148
+ logger.debug('Cancelled pending sync due to remote update', { collection, documentId });
149
+ }
150
+ }
151
+
152
+ /**
153
+ * Cancel all pending syncs for a collection.
154
+ * Called when receiving a snapshot that replaces all state.
155
+ */
156
+ export function cancelAllPending(collection: string): void {
157
+ const prefix = `${collection}:`;
158
+ for (const [key, timer] of debounceTimers) {
159
+ if (key.startsWith(prefix)) {
160
+ clearTimeout(timer);
161
+ debounceTimers.delete(key);
162
+ setPendingInternal(key, false);
163
+ }
164
+ }
165
+ logger.debug('Cancelled all pending syncs', { collection });
166
+ }
167
+
168
+ // ============================================================================
169
+ // Fragment Observation
170
+ // ============================================================================
171
+
172
+ /** Configuration for fragment observation */
173
+ export interface ProseObserverConfig {
174
+ collection: string;
175
+ documentId: string;
176
+ field: string;
177
+ fragment: Y.XmlFragment;
178
+ ydoc: Y.Doc;
179
+ ymap: Y.Map<unknown>;
180
+ collectionRef: Collection<any>;
181
+ debounceMs?: number;
182
+ }
183
+
184
+ /**
185
+ * Set up observation for a prose field's Y.XmlFragment.
186
+ * Returns a cleanup function.
187
+ */
188
+ export function observeFragment(config: ProseObserverConfig): () => void {
189
+ const {
190
+ collection,
191
+ documentId,
192
+ field,
193
+ fragment,
194
+ ydoc,
195
+ ymap,
196
+ collectionRef,
197
+ debounceMs = DEFAULT_DEBOUNCE_MS,
198
+ } = config;
199
+ const key = `${collection}:${documentId}`;
200
+
201
+ // Skip if already observing this document
202
+ const existingCleanup = fragmentObservers.get(key);
203
+ if (existingCleanup) {
204
+ logger.debug('Fragment already being observed', { collection, documentId, field });
205
+ return existingCleanup;
206
+ }
207
+
208
+ const observerHandler = (_events: Y.YEvent<any>[], transaction: Y.Transaction) => {
209
+ // Skip server-originated changes (echo prevention via transaction origin)
210
+ if (transaction.origin === SERVER_ORIGIN) {
211
+ return;
212
+ }
213
+
214
+ // Clear existing timer
215
+ const existing = debounceTimers.get(key);
216
+ if (existing) clearTimeout(existing);
217
+
218
+ // Mark as pending
219
+ setPendingInternal(key, true);
220
+
221
+ // Schedule sync
222
+ const timer = setTimeout(async () => {
223
+ debounceTimers.delete(key);
224
+
225
+ const itemYMap = ymap.get(documentId) as Y.Map<unknown> | undefined;
226
+ if (!itemYMap) {
227
+ logger.error('Document not found', { collection, documentId });
228
+ setPendingInternal(key, false);
229
+ return;
230
+ }
231
+
232
+ try {
233
+ // Compute delta since last sync
234
+ const lastVector = lastSyncedVectors.get(key);
235
+ const delta = lastVector
236
+ ? Y.encodeStateAsUpdateV2(ydoc, lastVector)
237
+ : Y.encodeStateAsUpdateV2(ydoc);
238
+
239
+ if (delta.length <= 2) {
240
+ logger.debug('No changes to sync', { collection, documentId });
241
+ setPendingInternal(key, false);
242
+ return;
243
+ }
244
+
245
+ const crdtBytes = delta.buffer as ArrayBuffer;
246
+ const currentVector = Y.encodeStateVector(ydoc);
247
+
248
+ logger.debug('Syncing prose delta', {
249
+ collection,
250
+ documentId,
251
+ deltaSize: delta.byteLength,
252
+ });
253
+
254
+ const materializedDoc = serializeYMapValue(itemYMap);
255
+
256
+ // Send via collection.update with contentSync metadata
257
+ const result = collectionRef.update(
258
+ documentId,
259
+ { metadata: { contentSync: { crdtBytes, materializedDoc } } },
260
+ (draft: any) => {
261
+ draft.updatedAt = Date.now();
262
+ }
263
+ );
264
+ await result.isPersisted.promise;
265
+
266
+ // Update last synced vector
267
+ lastSyncedVectors.set(key, currentVector);
268
+ failedSyncQueue.delete(key);
269
+ setPendingInternal(key, false);
270
+ logger.debug('Prose sync completed', { collection, documentId });
271
+ } catch (err) {
272
+ logger.error('Prose sync failed, queued for retry', {
273
+ collection,
274
+ documentId,
275
+ error: String(err),
276
+ });
277
+ failedSyncQueue.set(key, true);
278
+ // Keep pending=true for retry indication
279
+ }
280
+ }, debounceMs);
281
+
282
+ debounceTimers.set(key, timer);
283
+
284
+ // Also retry any failed syncs for this document
285
+ if (failedSyncQueue.has(key)) {
286
+ failedSyncQueue.delete(key);
287
+ logger.debug('Retrying failed sync', { collection, documentId });
288
+ }
289
+ };
290
+
291
+ // Set up deep observation on the fragment
292
+ fragment.observeDeep(observerHandler);
293
+
294
+ const cleanup = () => {
295
+ fragment.unobserveDeep(observerHandler);
296
+ cancelPending(collection, documentId);
297
+ fragmentObservers.delete(key);
298
+ lastSyncedVectors.delete(key);
299
+ logger.debug('Fragment observer cleaned up', { collection, documentId, field });
300
+ };
301
+
302
+ fragmentObservers.set(key, cleanup);
303
+ logger.debug('Fragment observer registered', { collection, documentId, field });
304
+
305
+ return cleanup;
306
+ }
307
+
308
+ // ============================================================================
309
+ // Cleanup
310
+ // ============================================================================
311
+
312
+ /**
313
+ * Clean up all prose state for a collection.
314
+ * Called when collection is destroyed.
315
+ */
316
+ export function cleanup(collection: string): void {
317
+ const prefix = `${collection}:`;
318
+
319
+ // Cancel all pending syncs
320
+ for (const [key, timer] of debounceTimers) {
321
+ if (key.startsWith(prefix)) {
322
+ clearTimeout(timer);
323
+ debounceTimers.delete(key);
324
+ }
325
+ }
326
+
327
+ // Clear pending state and listeners
328
+ for (const key of pendingState.keys()) {
329
+ if (key.startsWith(prefix)) {
330
+ pendingState.delete(key);
331
+ }
332
+ }
333
+ for (const key of pendingListeners.keys()) {
334
+ if (key.startsWith(prefix)) {
335
+ pendingListeners.delete(key);
336
+ }
337
+ }
338
+
339
+ // Clear applying from server flags
340
+ for (const key of applyingFromServer.keys()) {
341
+ if (key.startsWith(prefix)) {
342
+ applyingFromServer.delete(key);
343
+ }
344
+ }
345
+
346
+ // Clear last synced vectors
347
+ for (const key of lastSyncedVectors.keys()) {
348
+ if (key.startsWith(prefix)) {
349
+ lastSyncedVectors.delete(key);
350
+ }
351
+ }
352
+
353
+ // Clean up fragment observers
354
+ for (const [key, cleanupFn] of fragmentObservers) {
355
+ if (key.startsWith(prefix)) {
356
+ cleanupFn();
357
+ fragmentObservers.delete(key);
358
+ }
359
+ }
360
+
361
+ // Clear failed sync queue
362
+ for (const key of failedSyncQueue.keys()) {
363
+ if (key.startsWith(prefix)) {
364
+ failedSyncQueue.delete(key);
365
+ }
366
+ }
367
+
368
+ logger.debug('Prose cleanup complete', { collection });
369
+ }
@@ -0,0 +1,80 @@
1
+ /**
2
+ * Replicate Helpers - Collection-bound functions for TanStack DB optimistic updates
3
+ *
4
+ * Each collection gets its own set of bound functions that operate on that
5
+ * collection's TanStack DB instance. No global state - fully concurrent-safe.
6
+ */
7
+
8
+ export interface ReplicateParams {
9
+ readonly begin: () => void;
10
+ readonly write: (message: { type: 'insert' | 'update' | 'delete'; value: unknown }) => void;
11
+ readonly commit: () => void;
12
+ readonly truncate: () => void;
13
+ }
14
+
15
+ /**
16
+ * Bound replicate operations for a specific collection.
17
+ * These functions are already tied to the collection's TanStack DB params.
18
+ */
19
+ export interface BoundReplicateOps<T> {
20
+ readonly insert: (items: T[]) => void;
21
+ readonly delete: (items: T[]) => void;
22
+ readonly upsert: (items: T[]) => void;
23
+ readonly replace: (items: T[]) => void;
24
+ }
25
+
26
+ /**
27
+ * Create bound replicate operations for a collection.
28
+ * Returns functions that are already tied to the collection's params.
29
+ * This is the proper way to handle multiple concurrent collections.
30
+ *
31
+ * @example
32
+ * ```typescript
33
+ * const ops = createReplicateOps<Task>(params);
34
+ * ops.replace(items); // Always targets THIS collection's TanStack DB
35
+ * ops.upsert([item]);
36
+ * ops.delete([item]);
37
+ * ```
38
+ */
39
+ export function createReplicateOps<T>(params: ReplicateParams): BoundReplicateOps<T> {
40
+ return {
41
+ insert(items: T[]): void {
42
+ params.begin();
43
+ for (const item of items) {
44
+ params.write({ type: 'insert', value: item });
45
+ }
46
+ params.commit();
47
+ },
48
+
49
+ delete(items: T[]): void {
50
+ params.begin();
51
+ for (const item of items) {
52
+ params.write({ type: 'delete', value: item });
53
+ }
54
+ params.commit();
55
+ },
56
+
57
+ // Upsert uses 'update' type - TanStack DB only recognizes insert/update/delete
58
+ upsert(items: T[]): void {
59
+ params.begin();
60
+ for (const item of items) {
61
+ params.write({ type: 'update', value: item });
62
+ }
63
+ params.commit();
64
+ },
65
+
66
+ replace(items: T[]): void {
67
+ params.begin();
68
+ params.truncate();
69
+ for (const item of items) {
70
+ params.write({ type: 'insert', value: item });
71
+ }
72
+ params.commit();
73
+ },
74
+ };
75
+ }
76
+
77
+ // Internal - for test cleanup only
78
+ export function _resetReplicateParams(): void {
79
+ // No-op now - nothing to reset since we don't use global state
80
+ }
@@ -0,0 +1,86 @@
1
+ import { Effect, Context, Layer } from 'effect';
2
+ import { IDBError, IDBWriteError } from '$/client/errors.js';
3
+ import type { KeyValueStore } from '$/client/persistence/types.js';
4
+
5
+ export interface CheckpointData {
6
+ lastModified: number;
7
+ }
8
+
9
+ export class Checkpoint extends Context.Tag('Checkpoint')<
10
+ Checkpoint,
11
+ {
12
+ readonly loadCheckpoint: (collection: string) => Effect.Effect<CheckpointData, IDBError>;
13
+ readonly saveCheckpoint: (
14
+ collection: string,
15
+ checkpoint: CheckpointData
16
+ ) => Effect.Effect<void, IDBWriteError>;
17
+ readonly clearCheckpoint: (collection: string) => Effect.Effect<void, IDBError>;
18
+ }
19
+ >() {}
20
+
21
+ /**
22
+ * Create a Checkpoint service layer using the provided KeyValueStore.
23
+ */
24
+ export function createCheckpointLayer(kv: KeyValueStore) {
25
+ return Layer.succeed(
26
+ Checkpoint,
27
+ Checkpoint.of({
28
+ loadCheckpoint: (collection) =>
29
+ Effect.gen(function* (_) {
30
+ const key = `checkpoint:${collection}`;
31
+ const stored = yield* _(
32
+ Effect.tryPromise({
33
+ try: () => kv.get<CheckpointData>(key),
34
+ catch: (cause) => new IDBError({ operation: 'get', key, cause }),
35
+ })
36
+ );
37
+
38
+ if (stored) {
39
+ yield* _(
40
+ Effect.logDebug('Loaded checkpoint from storage', {
41
+ collection,
42
+ checkpoint: stored,
43
+ })
44
+ );
45
+ return stored;
46
+ }
47
+
48
+ yield* _(
49
+ Effect.logDebug('No stored checkpoint, using default', {
50
+ collection,
51
+ })
52
+ );
53
+ return { lastModified: 0 };
54
+ }),
55
+
56
+ saveCheckpoint: (collection, checkpoint) =>
57
+ Effect.gen(function* (_) {
58
+ const key = `checkpoint:${collection}`;
59
+ yield* _(
60
+ Effect.tryPromise({
61
+ try: () => kv.set(key, checkpoint),
62
+ catch: (cause) => new IDBWriteError({ key, value: checkpoint, cause }),
63
+ })
64
+ );
65
+ yield* _(
66
+ Effect.logDebug('Checkpoint saved', {
67
+ collection,
68
+ checkpoint,
69
+ })
70
+ );
71
+ }),
72
+
73
+ clearCheckpoint: (collection) =>
74
+ Effect.gen(function* (_) {
75
+ const key = `checkpoint:${collection}`;
76
+ yield* _(
77
+ Effect.tryPromise({
78
+ try: () => kv.del(key),
79
+ catch: (cause) => new IDBError({ operation: 'delete', key, cause }),
80
+ })
81
+ );
82
+ yield* _(Effect.logDebug('Checkpoint cleared', { collection }));
83
+ }),
84
+ })
85
+ );
86
+ }
@@ -0,0 +1,108 @@
1
+ import { Effect, Context, Layer } from 'effect';
2
+ import * as Y from 'yjs';
3
+ import { yjsTransact, serializeYMap } from '$/client/merge.js';
4
+ import { ReconciliationError as ReconciliationErrorImport } from '$/client/errors.js';
5
+
6
+ /**
7
+ * Reconciliation handles removal of phantom documents -
8
+ * documents that exist locally but have been deleted on the server.
9
+ */
10
+ export class Reconciliation extends Context.Tag('Reconciliation')<
11
+ Reconciliation,
12
+ {
13
+ /**
14
+ * Reconciles local Yjs state with server state by removing phantom documents.
15
+ * Uses an existing Yjs document and map instead of creating new ones.
16
+ *
17
+ * @param ydoc - Existing Yjs document
18
+ * @param ymap - Existing Yjs map within the document
19
+ * @param collection - Collection name for logging
20
+ * @param serverDocs - Documents from server
21
+ * @param getKey - Function to extract key from document
22
+ */
23
+ readonly reconcile: <T>(
24
+ ydoc: Y.Doc,
25
+ ymap: Y.Map<unknown>,
26
+ collection: string,
27
+ serverDocs: readonly T[],
28
+ getKey: (doc: T) => string
29
+ ) => Effect.Effect<T[], ReconciliationErrorImport>;
30
+ }
31
+ >() {}
32
+
33
+ export const ReconciliationLive = Layer.succeed(
34
+ Reconciliation,
35
+ Reconciliation.of({
36
+ reconcile: <T>(
37
+ ydoc: Y.Doc,
38
+ ymap: Y.Map<unknown>,
39
+ collection: string,
40
+ serverDocs: readonly T[],
41
+ getKey: (doc: T) => string
42
+ ) =>
43
+ Effect.gen(function* (_) {
44
+ const serverDocIds = new Set(serverDocs.map(getKey));
45
+ const toDelete: string[] = [];
46
+
47
+ // Find phantom documents (exist locally but not on server)
48
+ ymap.forEach((_, key) => {
49
+ if (!serverDocIds.has(key)) {
50
+ toDelete.push(key);
51
+ }
52
+ });
53
+
54
+ if (toDelete.length === 0) {
55
+ yield* _(Effect.logDebug('No phantom documents found', { collection }));
56
+ return [];
57
+ }
58
+
59
+ yield* _(
60
+ Effect.logWarning(`Found ${toDelete.length} phantom documents`, {
61
+ collection,
62
+ phantomDocs: toDelete.slice(0, 10), // Log first 10
63
+ })
64
+ );
65
+
66
+ // Extract items before deletion for TanStack DB sync
67
+ // Use serializeYMap for consistent ProseMirror JSON (not XML string from toJSON)
68
+ const removedItems: T[] = [];
69
+ for (const key of toDelete) {
70
+ const itemYMap = ymap.get(key);
71
+ if (itemYMap instanceof Y.Map) {
72
+ removedItems.push(serializeYMap(itemYMap) as T);
73
+ }
74
+ }
75
+
76
+ // Remove from Yjs using plain function
77
+ yjsTransact(
78
+ ydoc,
79
+ () => {
80
+ for (const key of toDelete) {
81
+ ymap.delete(key);
82
+ }
83
+ },
84
+ 'reconciliation'
85
+ );
86
+
87
+ yield* _(
88
+ Effect.logInfo('Reconciliation completed', {
89
+ collection,
90
+ deletedCount: removedItems.length,
91
+ })
92
+ );
93
+
94
+ // Return removed items for TanStack DB sync
95
+ return removedItems;
96
+ }).pipe(
97
+ Effect.catchAll((cause) =>
98
+ Effect.fail(
99
+ new ReconciliationErrorImport({
100
+ collection,
101
+ reason: 'Reconciliation failed',
102
+ cause,
103
+ })
104
+ )
105
+ )
106
+ ),
107
+ })
108
+ );
@@ -0,0 +1,52 @@
1
+ /* eslint-disable */
2
+ /**
3
+ * Generated `api` utility.
4
+ *
5
+ * THIS CODE IS AUTOMATICALLY GENERATED.
6
+ *
7
+ * To regenerate, run `npx convex dev`.
8
+ * @module
9
+ */
10
+
11
+ import type * as logger from "../logger.js";
12
+ import type * as public_ from "../public.js";
13
+
14
+ import type {
15
+ ApiFromModules,
16
+ FilterApi,
17
+ FunctionReference,
18
+ } from "convex/server";
19
+ import { anyApi, componentsGeneric } from "convex/server";
20
+
21
+ const fullApi: ApiFromModules<{
22
+ logger: typeof logger;
23
+ public: typeof public_;
24
+ }> = anyApi as any;
25
+
26
+ /**
27
+ * A utility for referencing Convex functions in your app's public API.
28
+ *
29
+ * Usage:
30
+ * ```js
31
+ * const myFunctionReference = api.myModule.myFunction;
32
+ * ```
33
+ */
34
+ export const api: FilterApi<
35
+ typeof fullApi,
36
+ FunctionReference<any, "public">
37
+ > = anyApi as any;
38
+
39
+ /**
40
+ * A utility for referencing Convex functions in your app's internal API.
41
+ *
42
+ * Usage:
43
+ * ```js
44
+ * const myFunctionReference = internal.myModule.myFunction;
45
+ * ```
46
+ */
47
+ export const internal: FilterApi<
48
+ typeof fullApi,
49
+ FunctionReference<any, "internal">
50
+ > = anyApi as any;
51
+
52
+ export const components = componentsGeneric() as unknown as {};