@trestleinc/replicate 0.1.0 → 1.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +356 -420
- package/dist/client/collection.d.ts +78 -76
- package/dist/client/errors.d.ts +59 -0
- package/dist/client/index.d.ts +22 -18
- package/dist/client/logger.d.ts +0 -1
- package/dist/client/merge.d.ts +77 -0
- package/dist/client/persistence/adapters/index.d.ts +8 -0
- package/dist/client/persistence/adapters/opsqlite.d.ts +46 -0
- package/dist/client/persistence/adapters/sqljs.d.ts +83 -0
- package/dist/client/persistence/index.d.ts +49 -0
- package/dist/client/persistence/indexeddb.d.ts +17 -0
- package/dist/client/persistence/memory.d.ts +16 -0
- package/dist/client/persistence/sqlite-browser.d.ts +51 -0
- package/dist/client/persistence/sqlite-level.d.ts +63 -0
- package/dist/client/persistence/sqlite-rn.d.ts +36 -0
- package/dist/client/persistence/sqlite.d.ts +47 -0
- package/dist/client/persistence/types.d.ts +42 -0
- package/dist/client/prose.d.ts +56 -0
- package/dist/client/replicate.d.ts +40 -0
- package/dist/client/services/checkpoint.d.ts +18 -0
- package/dist/client/services/reconciliation.d.ts +24 -0
- package/dist/component/_generated/api.d.ts +35 -0
- package/dist/component/_generated/api.js +3 -3
- package/dist/component/_generated/component.d.ts +89 -0
- package/dist/component/_generated/component.js +0 -0
- package/dist/component/_generated/dataModel.d.ts +45 -0
- package/dist/component/_generated/dataModel.js +0 -0
- package/{src → dist}/component/_generated/server.d.ts +9 -38
- package/dist/component/convex.config.d.ts +2 -2
- package/dist/component/convex.config.js +2 -1
- package/dist/component/logger.d.ts +8 -0
- package/dist/component/logger.js +30 -0
- package/dist/component/public.d.ts +36 -61
- package/dist/component/public.js +232 -58
- package/dist/component/schema.d.ts +32 -8
- package/dist/component/schema.js +19 -6
- package/dist/index.js +1553 -308
- package/dist/server/builder.d.ts +94 -0
- package/dist/server/index.d.ts +14 -17
- package/dist/server/schema.d.ts +17 -63
- package/dist/server/storage.d.ts +80 -0
- package/dist/server.js +268 -83
- package/dist/shared/index.d.ts +5 -0
- package/dist/shared/index.js +2 -0
- package/dist/shared/types.d.ts +50 -0
- package/dist/shared/types.js +6 -0
- package/dist/shared.js +6 -0
- package/package.json +59 -49
- package/src/client/collection.ts +877 -450
- package/src/client/errors.ts +45 -0
- package/src/client/index.ts +52 -26
- package/src/client/logger.ts +2 -28
- package/src/client/merge.ts +374 -0
- package/src/client/persistence/adapters/index.ts +8 -0
- package/src/client/persistence/adapters/opsqlite.ts +54 -0
- package/src/client/persistence/adapters/sqljs.ts +128 -0
- package/src/client/persistence/index.ts +54 -0
- package/src/client/persistence/indexeddb.ts +110 -0
- package/src/client/persistence/memory.ts +61 -0
- package/src/client/persistence/sqlite-browser.ts +107 -0
- package/src/client/persistence/sqlite-level.ts +407 -0
- package/src/client/persistence/sqlite-rn.ts +44 -0
- package/src/client/persistence/sqlite.ts +161 -0
- package/src/client/persistence/types.ts +49 -0
- package/src/client/prose.ts +369 -0
- package/src/client/replicate.ts +80 -0
- package/src/client/services/checkpoint.ts +86 -0
- package/src/client/services/reconciliation.ts +108 -0
- package/src/component/_generated/api.ts +52 -0
- package/src/component/_generated/component.ts +103 -0
- package/src/component/_generated/{dataModel.d.ts → dataModel.ts} +1 -1
- package/src/component/_generated/server.ts +161 -0
- package/src/component/convex.config.ts +3 -1
- package/src/component/logger.ts +36 -0
- package/src/component/public.ts +364 -111
- package/src/component/schema.ts +18 -5
- package/src/env.d.ts +31 -0
- package/src/server/builder.ts +85 -0
- package/src/server/index.ts +9 -24
- package/src/server/schema.ts +20 -76
- package/src/server/storage.ts +313 -0
- package/src/shared/index.ts +5 -0
- package/src/shared/types.ts +52 -0
- package/LICENSE.package +0 -201
- package/dist/client/storage.d.ts +0 -143
- package/dist/server/replication.d.ts +0 -122
- package/dist/server/ssr.d.ts +0 -79
- package/dist/ssr.js +0 -19
- package/src/client/storage.ts +0 -206
- package/src/component/_generated/api.d.ts +0 -95
- package/src/component/_generated/api.js +0 -23
- package/src/component/_generated/server.js +0 -90
- package/src/server/replication.ts +0 -244
- package/src/server/ssr.ts +0 -106
package/src/client/collection.ts
CHANGED
|
@@ -1,550 +1,977 @@
|
|
|
1
1
|
import * as Y from 'yjs';
|
|
2
|
-
import {
|
|
3
|
-
|
|
4
|
-
NonRetriableError,
|
|
5
|
-
type OfflineExecutor,
|
|
6
|
-
} from '@tanstack/offline-transactions';
|
|
2
|
+
import { createMutex } from 'lib0/mutex';
|
|
3
|
+
import type { Persistence, PersistenceProvider } from '$/client/persistence/types.js';
|
|
7
4
|
import type { ConvexClient } from 'convex/browser';
|
|
8
5
|
import type { FunctionReference } from 'convex/server';
|
|
9
6
|
import type { CollectionConfig, Collection } from '@tanstack/db';
|
|
10
|
-
import {
|
|
7
|
+
import { Effect, Layer } from 'effect';
|
|
8
|
+
import { getLogger } from '$/client/logger.js';
|
|
9
|
+
import { ProseError, NonRetriableError } from '$/client/errors.js';
|
|
10
|
+
import { Checkpoint, createCheckpointLayer } from '$/client/services/checkpoint.js';
|
|
11
|
+
import { Reconciliation, ReconciliationLive } from '$/client/services/reconciliation.js';
|
|
12
|
+
import { createReplicateOps, type BoundReplicateOps } from '$/client/replicate.js';
|
|
13
|
+
import {
|
|
14
|
+
createYjsDocument,
|
|
15
|
+
getYMap,
|
|
16
|
+
transactWithDelta,
|
|
17
|
+
applyUpdate,
|
|
18
|
+
extractItems,
|
|
19
|
+
extractItem,
|
|
20
|
+
isDoc,
|
|
21
|
+
fragmentFromJSON,
|
|
22
|
+
serializeYMapValue,
|
|
23
|
+
getFragmentFromYMap,
|
|
24
|
+
} from '$/client/merge.js';
|
|
25
|
+
import * as prose from '$/client/prose.js';
|
|
26
|
+
|
|
27
|
+
/** Origin markers for Yjs transactions */
|
|
28
|
+
enum YjsOrigin {
|
|
29
|
+
Local = 'local',
|
|
30
|
+
Fragment = 'fragment',
|
|
31
|
+
Server = 'server',
|
|
32
|
+
}
|
|
33
|
+
import type { ProseFields, XmlFragmentJSON } from '$/shared/types.js';
|
|
11
34
|
|
|
12
|
-
const logger = getLogger(['
|
|
35
|
+
const logger = getLogger(['replicate', 'collection']);
|
|
13
36
|
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
*/
|
|
18
|
-
export interface ConvexCollectionOptionsConfig<T extends object> {
|
|
19
|
-
/** Function to extract unique key from items */
|
|
20
|
-
getKey: (item: T) => string | number;
|
|
37
|
+
interface HttpError extends Error {
|
|
38
|
+
status?: number;
|
|
39
|
+
}
|
|
21
40
|
|
|
22
|
-
|
|
23
|
-
|
|
41
|
+
/** Mutation data passed by TanStack DB transaction handlers */
|
|
42
|
+
interface CollectionMutation<T> {
|
|
43
|
+
key: string | number;
|
|
44
|
+
modified: T;
|
|
45
|
+
original?: T | Record<string, never>;
|
|
46
|
+
changes?: Partial<T>;
|
|
47
|
+
metadata?: unknown;
|
|
48
|
+
}
|
|
24
49
|
|
|
25
|
-
|
|
26
|
-
|
|
50
|
+
/** Metadata for content sync operations */
|
|
51
|
+
interface ContentSyncMetadata {
|
|
52
|
+
crdtBytes: ArrayBuffer;
|
|
53
|
+
materializedDoc: unknown;
|
|
54
|
+
}
|
|
27
55
|
|
|
28
|
-
|
|
56
|
+
/** Transaction wrapper containing mutations array */
|
|
57
|
+
interface CollectionTransaction<T> {
|
|
58
|
+
transaction: {
|
|
59
|
+
mutations: CollectionMutation<T>[];
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
function handleMutationError(
|
|
64
|
+
error: unknown,
|
|
65
|
+
operation: 'Insert' | 'Update' | 'Delete',
|
|
66
|
+
collection: string
|
|
67
|
+
): never {
|
|
68
|
+
const httpError = error as HttpError;
|
|
69
|
+
logger.error(`${operation} failed`, {
|
|
70
|
+
collection,
|
|
71
|
+
error: httpError?.message,
|
|
72
|
+
status: httpError?.status,
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
if (httpError?.status === 401 || httpError?.status === 403) {
|
|
76
|
+
throw new NonRetriableError('Authentication failed');
|
|
77
|
+
}
|
|
78
|
+
if (httpError?.status === 422) {
|
|
79
|
+
throw new NonRetriableError('Validation error');
|
|
80
|
+
}
|
|
81
|
+
throw error;
|
|
82
|
+
}
|
|
83
|
+
|
|
84
|
+
const cleanupFunctions = new Map<string, () => void>();
|
|
85
|
+
|
|
86
|
+
/** Server-rendered material data for SSR hydration */
|
|
87
|
+
export type Materialized<T> = {
|
|
88
|
+
documents: ReadonlyArray<T>;
|
|
89
|
+
checkpoint?: { lastModified: number };
|
|
90
|
+
count?: number;
|
|
91
|
+
crdtBytes?: ArrayBuffer;
|
|
92
|
+
};
|
|
93
|
+
|
|
94
|
+
/** Configuration for creating a Convex-backed collection */
|
|
95
|
+
export interface ConvexCollectionOptionsConfig<T extends object> {
|
|
96
|
+
getKey: (item: T) => string | number;
|
|
97
|
+
material?: Materialized<T>;
|
|
98
|
+
convexClient: ConvexClient;
|
|
29
99
|
api: {
|
|
30
|
-
stream: FunctionReference<'query'>;
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
100
|
+
stream: FunctionReference<'query'>;
|
|
101
|
+
insert: FunctionReference<'mutation'>;
|
|
102
|
+
update: FunctionReference<'mutation'>;
|
|
103
|
+
remove: FunctionReference<'mutation'>;
|
|
104
|
+
recovery: FunctionReference<'query'>;
|
|
105
|
+
material?: FunctionReference<'query'>;
|
|
106
|
+
[key: string]: any;
|
|
34
107
|
};
|
|
108
|
+
collection: string;
|
|
109
|
+
/** Fields that contain prose (rich text) content stored as Y.XmlFragment */
|
|
110
|
+
prose: Array<ProseFields<T>>;
|
|
111
|
+
/** Undo capture timeout in ms. Changes within this window merge into one undo. Default: 500 */
|
|
112
|
+
undoCaptureTimeout?: number;
|
|
113
|
+
/** Persistence provider for Y.Doc and key-value storage */
|
|
114
|
+
persistence: Persistence;
|
|
115
|
+
}
|
|
116
|
+
|
|
117
|
+
/** Editor binding for BlockNote/TipTap collaboration */
|
|
118
|
+
export interface EditorBinding {
|
|
119
|
+
/** The Y.XmlFragment bound to the editor */
|
|
120
|
+
readonly fragment: Y.XmlFragment;
|
|
121
|
+
|
|
122
|
+
/** Provider stub for BlockNote compatibility */
|
|
123
|
+
readonly provider: { readonly awareness: null };
|
|
124
|
+
|
|
125
|
+
/** Current sync state - true if unsent changes exist */
|
|
126
|
+
readonly pending: boolean;
|
|
35
127
|
|
|
36
|
-
/**
|
|
37
|
-
|
|
128
|
+
/** Subscribe to pending state changes. Returns unsubscribe function. */
|
|
129
|
+
onPendingChange(callback: (pending: boolean) => void): () => void;
|
|
130
|
+
|
|
131
|
+
/** Undo the last content edit */
|
|
132
|
+
undo(): void;
|
|
133
|
+
|
|
134
|
+
/** Redo the last undone edit */
|
|
135
|
+
redo(): void;
|
|
136
|
+
|
|
137
|
+
/** Check if undo is available */
|
|
138
|
+
canUndo(): boolean;
|
|
139
|
+
|
|
140
|
+
/** Check if redo is available */
|
|
141
|
+
canRedo(): boolean;
|
|
142
|
+
}
|
|
143
|
+
|
|
144
|
+
/** Utilities exposed on collection.utils */
|
|
145
|
+
interface ConvexCollectionUtils<T extends object> {
|
|
146
|
+
/**
|
|
147
|
+
* Get an editor binding for a prose field.
|
|
148
|
+
* Waits for Y.Doc to be ready (IndexedDB loaded) before returning.
|
|
149
|
+
* @param documentId - The document ID
|
|
150
|
+
* @param field - The prose field name (must be in `prose` config)
|
|
151
|
+
* @returns Promise resolving to EditorBinding
|
|
152
|
+
*/
|
|
153
|
+
prose(documentId: string, field: ProseFields<T>): Promise<EditorBinding>;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
/** Extended collection with prose field utilities */
|
|
157
|
+
export interface ConvexCollection<T extends object> extends Collection<T> {
|
|
158
|
+
/** Utilities for prose field operations */
|
|
159
|
+
utils: ConvexCollectionUtils<T>;
|
|
160
|
+
}
|
|
161
|
+
|
|
162
|
+
// Module-level storage for Y.Doc and Y.Map instances
|
|
163
|
+
const collectionDocs = new Map<string, { ydoc: Y.Doc; ymap: Y.Map<unknown> }>();
|
|
164
|
+
|
|
165
|
+
// Module-level storage for undo configuration per collection
|
|
166
|
+
const collectionUndoConfig = new Map<
|
|
167
|
+
string,
|
|
168
|
+
{ captureTimeout: number; trackedOrigins: Set<unknown> }
|
|
169
|
+
>();
|
|
170
|
+
|
|
171
|
+
// Default undo capture timeout
|
|
172
|
+
const DEFAULT_UNDO_CAPTURE_TIMEOUT = 500;
|
|
173
|
+
|
|
174
|
+
// Default debounce time for prose sync
|
|
175
|
+
const DEFAULT_DEBOUNCE_MS = 1000;
|
|
176
|
+
|
|
177
|
+
// Mutex per collection for thread-safe updates
|
|
178
|
+
const collectionMutex = new Map<string, ReturnType<typeof createMutex>>();
|
|
179
|
+
|
|
180
|
+
// Fragment undo managers: "collection:documentId:field" -> UndoManager
|
|
181
|
+
const fragmentUndoManagers = new Map<string, Y.UndoManager>();
|
|
182
|
+
|
|
183
|
+
// Debounce config per collection
|
|
184
|
+
const debounceConfig = new Map<string, number>();
|
|
185
|
+
|
|
186
|
+
// Collection references - set in sync.sync() callback, used by utils.prose()
|
|
187
|
+
const collectionRefs = new Map<string, Collection<any>>();
|
|
188
|
+
|
|
189
|
+
// Server state vectors for recovery sync
|
|
190
|
+
const serverStateVectors = new Map<string, Uint8Array>();
|
|
191
|
+
|
|
192
|
+
// ============================================================================
|
|
193
|
+
// Mutex Management
|
|
194
|
+
// ============================================================================
|
|
195
|
+
|
|
196
|
+
/**
|
|
197
|
+
* Get or create mutex for a collection.
|
|
198
|
+
*/
|
|
199
|
+
function getOrCreateMutex(collection: string): ReturnType<typeof createMutex> {
|
|
200
|
+
let mux = collectionMutex.get(collection);
|
|
201
|
+
if (!mux) {
|
|
202
|
+
mux = createMutex();
|
|
203
|
+
collectionMutex.set(collection, mux);
|
|
204
|
+
}
|
|
205
|
+
return mux;
|
|
38
206
|
}
|
|
39
207
|
|
|
208
|
+
// ============================================================================
|
|
209
|
+
// Fragment UndoManager (scoped to content field only)
|
|
210
|
+
// ============================================================================
|
|
211
|
+
|
|
40
212
|
/**
|
|
41
|
-
*
|
|
42
|
-
*
|
|
213
|
+
* Get or create an UndoManager scoped to a fragment field.
|
|
214
|
+
* This tracks only content edits, not document-level changes like title.
|
|
43
215
|
*/
|
|
44
|
-
|
|
216
|
+
function getOrCreateFragmentUndoManager(
|
|
217
|
+
collection: string,
|
|
218
|
+
documentId: string,
|
|
219
|
+
field: string,
|
|
220
|
+
fragment: Y.XmlFragment
|
|
221
|
+
): Y.UndoManager {
|
|
222
|
+
const key = `${collection}:${documentId}:${field}`;
|
|
223
|
+
|
|
224
|
+
let um = fragmentUndoManagers.get(key);
|
|
225
|
+
if (um) return um;
|
|
226
|
+
|
|
227
|
+
const config = collectionUndoConfig.get(collection);
|
|
228
|
+
|
|
229
|
+
um = new Y.UndoManager([fragment], {
|
|
230
|
+
captureTimeout: config?.captureTimeout ?? DEFAULT_UNDO_CAPTURE_TIMEOUT,
|
|
231
|
+
// Only track local fragment edits, not server syncs
|
|
232
|
+
trackedOrigins: new Set([YjsOrigin.Fragment]),
|
|
233
|
+
});
|
|
234
|
+
|
|
235
|
+
fragmentUndoManagers.set(key, um);
|
|
236
|
+
return um;
|
|
237
|
+
}
|
|
45
238
|
|
|
46
239
|
/**
|
|
47
|
-
*
|
|
48
|
-
*
|
|
49
|
-
* This implements the CORRECT pattern:
|
|
50
|
-
* - Uses onInsert/onUpdate/onDelete handlers (not custom wrapper)
|
|
51
|
-
* - Yjs Y.Doc with 'update' event for delta encoding
|
|
52
|
-
* - Stores Y.Map instances (not plain objects) for field-level CRDT
|
|
53
|
-
* - Uses ydoc.transact() to batch changes into single 'update' event
|
|
240
|
+
* Create TanStack DB collection options with Convex + Yjs replication.
|
|
54
241
|
*
|
|
55
242
|
* @example
|
|
56
243
|
* ```typescript
|
|
57
|
-
*
|
|
58
|
-
*
|
|
59
|
-
*
|
|
60
|
-
*
|
|
61
|
-
*
|
|
62
|
-
*
|
|
63
|
-
*
|
|
64
|
-
* collectionName: 'tasks',
|
|
65
|
-
* getKey: (task) => task.id,
|
|
66
|
-
* initialData,
|
|
67
|
-
* })
|
|
68
|
-
* )
|
|
244
|
+
* const options = convexCollectionOptions<Task>({
|
|
245
|
+
* getKey: (t) => t.id,
|
|
246
|
+
* convexClient,
|
|
247
|
+
* api: { stream: api.tasks.stream, insert: api.tasks.insert, ... },
|
|
248
|
+
* collection: 'tasks',
|
|
249
|
+
* });
|
|
250
|
+
* const collection = createCollection(options);
|
|
69
251
|
* ```
|
|
70
252
|
*/
|
|
71
253
|
export function convexCollectionOptions<T extends object>({
|
|
72
254
|
getKey,
|
|
73
|
-
|
|
255
|
+
material,
|
|
74
256
|
convexClient,
|
|
75
257
|
api,
|
|
76
|
-
|
|
258
|
+
collection,
|
|
259
|
+
prose: proseFields,
|
|
260
|
+
undoCaptureTimeout = 500,
|
|
261
|
+
persistence,
|
|
77
262
|
}: ConvexCollectionOptionsConfig<T>): CollectionConfig<T> & {
|
|
78
263
|
_convexClient: ConvexClient;
|
|
79
|
-
|
|
264
|
+
_collection: string;
|
|
265
|
+
_proseFields: Array<ProseFields<T>>;
|
|
266
|
+
_persistence: Persistence;
|
|
267
|
+
utils: ConvexCollectionUtils<T>;
|
|
80
268
|
} {
|
|
81
|
-
//
|
|
82
|
-
const
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
269
|
+
// Create a Set for O(1) lookup of prose fields
|
|
270
|
+
const proseFieldSet = new Set<string>(proseFields as string[]);
|
|
271
|
+
|
|
272
|
+
// Create utils object - prose() waits for Y.Doc to be ready via collectionDocs
|
|
273
|
+
const utils: ConvexCollectionUtils<T> = {
|
|
274
|
+
async prose(documentId: string, field: ProseFields<T>): Promise<EditorBinding> {
|
|
275
|
+
const fieldStr = field as string;
|
|
276
|
+
|
|
277
|
+
// Validate field is in prose config
|
|
278
|
+
if (!proseFieldSet.has(fieldStr)) {
|
|
279
|
+
throw new ProseError({
|
|
280
|
+
documentId,
|
|
281
|
+
field: fieldStr,
|
|
282
|
+
collection,
|
|
283
|
+
});
|
|
284
|
+
}
|
|
285
|
+
|
|
286
|
+
// Wait for collection to be ready (Y.Doc initialized from persistence)
|
|
287
|
+
let docs = collectionDocs.get(collection);
|
|
288
|
+
|
|
289
|
+
if (!docs) {
|
|
290
|
+
// Poll until ready - Y.Doc initialization is async
|
|
291
|
+
await new Promise<void>((resolve, reject) => {
|
|
292
|
+
const maxWait = 10000; // 10 second timeout
|
|
293
|
+
const startTime = Date.now();
|
|
294
|
+
const check = setInterval(() => {
|
|
295
|
+
if (collectionDocs.has(collection)) {
|
|
296
|
+
clearInterval(check);
|
|
297
|
+
resolve();
|
|
298
|
+
} else if (Date.now() - startTime > maxWait) {
|
|
299
|
+
clearInterval(check);
|
|
300
|
+
reject(
|
|
301
|
+
new ProseError({
|
|
302
|
+
documentId,
|
|
303
|
+
field: fieldStr,
|
|
304
|
+
collection,
|
|
305
|
+
})
|
|
306
|
+
);
|
|
307
|
+
}
|
|
308
|
+
}, 10);
|
|
309
|
+
});
|
|
310
|
+
docs = collectionDocs.get(collection);
|
|
311
|
+
}
|
|
312
|
+
|
|
313
|
+
if (!docs) {
|
|
314
|
+
throw new ProseError({
|
|
315
|
+
documentId,
|
|
316
|
+
field: fieldStr,
|
|
317
|
+
collection,
|
|
318
|
+
});
|
|
319
|
+
}
|
|
320
|
+
|
|
321
|
+
const fragment = getFragmentFromYMap(docs.ymap, documentId, fieldStr);
|
|
322
|
+
if (!fragment) {
|
|
323
|
+
throw new ProseError({
|
|
324
|
+
documentId,
|
|
325
|
+
field: fieldStr,
|
|
326
|
+
collection,
|
|
327
|
+
});
|
|
328
|
+
}
|
|
329
|
+
|
|
330
|
+
// Setup fragment observer via prose module (handles debounced sync)
|
|
331
|
+
const collectionRef = collectionRefs.get(collection);
|
|
332
|
+
if (collectionRef) {
|
|
333
|
+
prose.observeFragment({
|
|
334
|
+
collection,
|
|
335
|
+
documentId,
|
|
336
|
+
field: fieldStr,
|
|
337
|
+
fragment,
|
|
338
|
+
ydoc: docs.ydoc,
|
|
339
|
+
ymap: docs.ymap,
|
|
340
|
+
collectionRef,
|
|
341
|
+
debounceMs: debounceConfig.get(collection) ?? DEFAULT_DEBOUNCE_MS,
|
|
342
|
+
});
|
|
343
|
+
}
|
|
344
|
+
|
|
345
|
+
const undoManager = getOrCreateFragmentUndoManager(
|
|
346
|
+
collection,
|
|
347
|
+
documentId,
|
|
348
|
+
fieldStr,
|
|
349
|
+
fragment
|
|
350
|
+
);
|
|
351
|
+
|
|
352
|
+
// Return EditorBinding with reactive pending state from prose module
|
|
353
|
+
return {
|
|
354
|
+
fragment,
|
|
355
|
+
provider: { awareness: null },
|
|
356
|
+
|
|
357
|
+
get pending() {
|
|
358
|
+
return prose.isPending(collection, documentId);
|
|
359
|
+
},
|
|
360
|
+
|
|
361
|
+
onPendingChange(callback: (pending: boolean) => void) {
|
|
362
|
+
return prose.subscribePending(collection, documentId, callback);
|
|
363
|
+
},
|
|
364
|
+
|
|
365
|
+
undo() {
|
|
366
|
+
undoManager.undo();
|
|
367
|
+
},
|
|
368
|
+
|
|
369
|
+
redo() {
|
|
370
|
+
undoManager.redo();
|
|
371
|
+
},
|
|
372
|
+
|
|
373
|
+
canUndo() {
|
|
374
|
+
return undoManager.canUndo();
|
|
375
|
+
},
|
|
376
|
+
|
|
377
|
+
canRedo() {
|
|
378
|
+
return undoManager.canRedo();
|
|
379
|
+
},
|
|
380
|
+
} satisfies EditorBinding;
|
|
381
|
+
},
|
|
382
|
+
};
|
|
383
|
+
|
|
384
|
+
let ydoc: Y.Doc = null as any;
|
|
385
|
+
let ymap: Y.Map<unknown> = null as any;
|
|
386
|
+
let docPersistence: PersistenceProvider = null as any;
|
|
387
|
+
|
|
388
|
+
// Bound replicate operations - set during sync initialization
|
|
389
|
+
// Used by onDelete and other handlers that need to sync with TanStack DB
|
|
390
|
+
let ops: BoundReplicateOps<T> = null as any;
|
|
391
|
+
|
|
392
|
+
// Create services layer with the persistence KV store
|
|
393
|
+
const checkpointLayer = createCheckpointLayer(persistence.kv);
|
|
394
|
+
const servicesLayer = Layer.mergeAll(checkpointLayer, ReconciliationLive);
|
|
395
|
+
|
|
396
|
+
let resolvePersistenceReady: (() => void) | undefined;
|
|
397
|
+
const persistenceReadyPromise = new Promise<void>((resolve) => {
|
|
398
|
+
resolvePersistenceReady = resolve;
|
|
96
399
|
});
|
|
97
400
|
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
401
|
+
let resolveOptimisticReady: (() => void) | undefined;
|
|
402
|
+
const optimisticReadyPromise = new Promise<void>((resolve) => {
|
|
403
|
+
resolveOptimisticReady = resolve;
|
|
404
|
+
});
|
|
101
405
|
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
406
|
+
const reconcile = (ops: BoundReplicateOps<T>) =>
|
|
407
|
+
Effect.gen(function* () {
|
|
408
|
+
if (!api.material) return;
|
|
409
|
+
|
|
410
|
+
const materialApi = api.material;
|
|
411
|
+
const reconciliation = yield* Reconciliation;
|
|
105
412
|
|
|
106
|
-
|
|
107
|
-
|
|
108
|
-
|
|
109
|
-
collectionName,
|
|
110
|
-
mutationCount: transaction.mutations.length,
|
|
413
|
+
const serverResponse = yield* Effect.tryPromise({
|
|
414
|
+
try: () => convexClient.query(materialApi, {}),
|
|
415
|
+
catch: (error) => new Error(`Reconciliation query failed: ${error}`),
|
|
111
416
|
});
|
|
112
417
|
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
transaction.mutations.forEach((mut: any) => {
|
|
117
|
-
// Store as Y.Map for field-level CRDT conflict resolution
|
|
118
|
-
const itemYMap = new Y.Map();
|
|
119
|
-
Object.entries(mut.modified as Record<string, unknown>).forEach(([k, v]) => {
|
|
120
|
-
itemYMap.set(k, v);
|
|
121
|
-
});
|
|
122
|
-
ymap.set(String(mut.key), itemYMap);
|
|
123
|
-
});
|
|
124
|
-
}, 'insert');
|
|
125
|
-
|
|
126
|
-
// Send DELTA to Convex (not full state)
|
|
127
|
-
if (pendingUpdate) {
|
|
128
|
-
logger.debug('Sending insert delta to Convex', {
|
|
129
|
-
collectionName,
|
|
130
|
-
documentId: String(transaction.mutations[0].key),
|
|
131
|
-
deltaSize: pendingUpdate.length,
|
|
132
|
-
});
|
|
418
|
+
const serverDocs = Array.isArray(serverResponse)
|
|
419
|
+
? serverResponse
|
|
420
|
+
: ((serverResponse as any).documents as T[] | undefined) || [];
|
|
133
421
|
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
422
|
+
const removedItems = yield* reconciliation.reconcile(
|
|
423
|
+
ydoc,
|
|
424
|
+
ymap,
|
|
425
|
+
collection,
|
|
426
|
+
serverDocs,
|
|
427
|
+
(doc: T) => String(getKey(doc))
|
|
428
|
+
);
|
|
141
429
|
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
status: error?.status,
|
|
153
|
-
});
|
|
430
|
+
if (removedItems.length > 0) {
|
|
431
|
+
ops.delete(removedItems);
|
|
432
|
+
}
|
|
433
|
+
}).pipe(
|
|
434
|
+
Effect.catchAll((error) =>
|
|
435
|
+
Effect.gen(function* () {
|
|
436
|
+
yield* Effect.logError('Reconciliation failed', { collection, error });
|
|
437
|
+
})
|
|
438
|
+
)
|
|
439
|
+
);
|
|
154
440
|
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
441
|
+
/**
|
|
442
|
+
* Recovery sync using state vectors.
|
|
443
|
+
* Fetches missing data from server based on local state vector.
|
|
444
|
+
*/
|
|
445
|
+
const recoverSync = async (): Promise<void> => {
|
|
446
|
+
if (!api.recovery) {
|
|
447
|
+
logger.debug('No recovery API configured, skipping recovery sync', { collection });
|
|
448
|
+
return;
|
|
449
|
+
}
|
|
450
|
+
|
|
451
|
+
try {
|
|
452
|
+
// Encode local state vector
|
|
453
|
+
const localStateVector = Y.encodeStateVector(ydoc);
|
|
454
|
+
|
|
455
|
+
logger.debug('Starting recovery sync', {
|
|
456
|
+
collection,
|
|
457
|
+
localVectorSize: localStateVector.byteLength,
|
|
458
|
+
});
|
|
459
|
+
|
|
460
|
+
// Query server for diff
|
|
461
|
+
const response = await convexClient.query(api.recovery, {
|
|
462
|
+
clientStateVector: localStateVector.buffer as ArrayBuffer,
|
|
463
|
+
});
|
|
464
|
+
|
|
465
|
+
// Apply diff if any
|
|
466
|
+
if (response.diff) {
|
|
467
|
+
const mux = getOrCreateMutex(collection);
|
|
468
|
+
mux(() => {
|
|
469
|
+
applyUpdate(ydoc, new Uint8Array(response.diff), YjsOrigin.Server);
|
|
470
|
+
});
|
|
162
471
|
|
|
163
|
-
|
|
164
|
-
|
|
472
|
+
logger.info('Recovery sync applied diff', {
|
|
473
|
+
collection,
|
|
474
|
+
diffSize: response.diff.byteLength,
|
|
475
|
+
});
|
|
476
|
+
} else {
|
|
477
|
+
logger.debug('Recovery sync - no diff needed', { collection });
|
|
165
478
|
}
|
|
166
|
-
},
|
|
167
479
|
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
480
|
+
// Store server state vector for future reference
|
|
481
|
+
if (response.serverStateVector) {
|
|
482
|
+
serverStateVectors.set(collection, new Uint8Array(response.serverStateVector));
|
|
483
|
+
}
|
|
484
|
+
} catch (error) {
|
|
485
|
+
logger.error('Recovery sync failed', {
|
|
486
|
+
collection,
|
|
487
|
+
error: String(error),
|
|
173
488
|
});
|
|
489
|
+
// Don't throw - recovery is best-effort, subscription will catch up
|
|
490
|
+
}
|
|
491
|
+
};
|
|
174
492
|
|
|
175
|
-
|
|
176
|
-
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
493
|
+
const applyYjsInsert = (mutations: CollectionMutation<T>[]): Uint8Array => {
|
|
494
|
+
const { delta } = transactWithDelta(
|
|
495
|
+
ydoc,
|
|
496
|
+
() => {
|
|
497
|
+
mutations.forEach((mut) => {
|
|
498
|
+
const itemYMap = new Y.Map();
|
|
499
|
+
// First, set the itemYMap in ymap so fragments are bound to the document
|
|
500
|
+
ymap.set(String(mut.key), itemYMap);
|
|
501
|
+
Object.entries(mut.modified as Record<string, unknown>).forEach(([k, v]) => {
|
|
502
|
+
// Check if this is a prose field (auto-detect from config)
|
|
503
|
+
if (proseFieldSet.has(k) && isDoc(v)) {
|
|
504
|
+
const fragment = new Y.XmlFragment();
|
|
505
|
+
// Add fragment to map FIRST (binds it to the Y.Doc)
|
|
506
|
+
itemYMap.set(k, fragment);
|
|
507
|
+
// THEN populate content (now it's part of the document)
|
|
508
|
+
fragmentFromJSON(fragment, v as XmlFragmentJSON);
|
|
185
509
|
} else {
|
|
186
|
-
|
|
187
|
-
const newYMap = new Y.Map();
|
|
188
|
-
Object.entries(mut.modified as Record<string, unknown>).forEach(([k, v]) => {
|
|
189
|
-
newYMap.set(k, v);
|
|
190
|
-
});
|
|
191
|
-
ymap.set(String(mut.key), newYMap);
|
|
510
|
+
itemYMap.set(k, v);
|
|
192
511
|
}
|
|
193
512
|
});
|
|
194
|
-
}
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
documentId: String(transaction.mutations[0].key),
|
|
201
|
-
deltaSize: pendingUpdate.length,
|
|
202
|
-
});
|
|
513
|
+
});
|
|
514
|
+
},
|
|
515
|
+
YjsOrigin.Local
|
|
516
|
+
);
|
|
517
|
+
return delta;
|
|
518
|
+
};
|
|
203
519
|
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
520
|
+
const applyYjsUpdate = (mutations: CollectionMutation<T>[]): Uint8Array => {
|
|
521
|
+
const { delta } = transactWithDelta(
|
|
522
|
+
ydoc,
|
|
523
|
+
() => {
|
|
524
|
+
mutations.forEach((mut) => {
|
|
525
|
+
const itemYMap = ymap.get(String(mut.key)) as Y.Map<unknown> | undefined;
|
|
526
|
+
if (itemYMap) {
|
|
527
|
+
const modifiedFields = mut.modified as Record<string, unknown>;
|
|
528
|
+
if (!modifiedFields) {
|
|
529
|
+
logger.warn('mut.modified is null/undefined', { collection, key: String(mut.key) });
|
|
530
|
+
return;
|
|
531
|
+
}
|
|
532
|
+
Object.entries(modifiedFields).forEach(([k, v]) => {
|
|
533
|
+
const existingValue = itemYMap.get(k);
|
|
534
|
+
|
|
535
|
+
// ALWAYS skip prose fields - they are managed by Y.XmlFragment directly
|
|
536
|
+
// User edits go: BlockNote → Y.XmlFragment → observer → debounce → server
|
|
537
|
+
// Server sync goes: subscription → applyUpdate(ydoc) → CRDT merge
|
|
538
|
+
// Writing serialized JSON back would corrupt the CRDT state
|
|
539
|
+
if (proseFieldSet.has(k)) {
|
|
540
|
+
logger.debug('Skipping prose field in applyYjsUpdate', { field: k });
|
|
541
|
+
return;
|
|
542
|
+
}
|
|
211
543
|
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
544
|
+
// Also skip if existing value is a Y.XmlFragment (defensive check)
|
|
545
|
+
if (existingValue instanceof Y.XmlFragment) {
|
|
546
|
+
logger.debug('Preserving live fragment field', { field: k });
|
|
547
|
+
return;
|
|
548
|
+
}
|
|
549
|
+
|
|
550
|
+
// Regular field update
|
|
551
|
+
itemYMap.set(k, v);
|
|
552
|
+
});
|
|
553
|
+
} else {
|
|
554
|
+
logger.error('Update attempted on non-existent item', {
|
|
555
|
+
collection,
|
|
556
|
+
key: String(mut.key),
|
|
557
|
+
});
|
|
558
|
+
}
|
|
223
559
|
});
|
|
560
|
+
},
|
|
561
|
+
YjsOrigin.Local
|
|
562
|
+
);
|
|
563
|
+
return delta;
|
|
564
|
+
};
|
|
224
565
|
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
}
|
|
566
|
+
const applyYjsDelete = (mutations: CollectionMutation<T>[]): Uint8Array => {
|
|
567
|
+
const { delta } = transactWithDelta(
|
|
568
|
+
ydoc,
|
|
569
|
+
() => {
|
|
570
|
+
mutations.forEach((mut) => {
|
|
571
|
+
ymap.delete(String(mut.key));
|
|
572
|
+
});
|
|
573
|
+
},
|
|
574
|
+
YjsOrigin.Local
|
|
575
|
+
);
|
|
576
|
+
return delta;
|
|
577
|
+
};
|
|
232
578
|
|
|
233
|
-
|
|
579
|
+
return {
|
|
580
|
+
id: collection,
|
|
581
|
+
getKey,
|
|
582
|
+
_convexClient: convexClient,
|
|
583
|
+
_collection: collection,
|
|
584
|
+
_proseFields: proseFields,
|
|
585
|
+
_persistence: persistence,
|
|
586
|
+
utils,
|
|
587
|
+
|
|
588
|
+
onInsert: async ({ transaction }: CollectionTransaction<T>) => {
|
|
589
|
+
try {
|
|
590
|
+
await Promise.all([persistenceReadyPromise, optimisticReadyPromise]);
|
|
591
|
+
const delta = applyYjsInsert(transaction.mutations);
|
|
592
|
+
if (delta.length > 0) {
|
|
593
|
+
const documentKey = String(transaction.mutations[0].key);
|
|
594
|
+
const itemYMap = ymap.get(documentKey) as Y.Map<unknown>;
|
|
595
|
+
// Use serializeYMapValue to convert Y.XmlFragment → XmlFragmentJSON (same as onUpdate)
|
|
596
|
+
const materializedDoc = itemYMap
|
|
597
|
+
? serializeYMapValue(itemYMap)
|
|
598
|
+
: transaction.mutations[0].modified;
|
|
599
|
+
await convexClient.mutation(api.insert, {
|
|
600
|
+
documentId: documentKey,
|
|
601
|
+
crdtBytes: delta.slice().buffer,
|
|
602
|
+
materializedDoc,
|
|
603
|
+
});
|
|
604
|
+
}
|
|
605
|
+
} catch (error) {
|
|
606
|
+
handleMutationError(error, 'Insert', collection);
|
|
234
607
|
}
|
|
235
608
|
},
|
|
236
609
|
|
|
237
|
-
|
|
238
|
-
onDelete: async ({ transaction }: any) => {
|
|
239
|
-
logger.debug('onDelete handler called', {
|
|
240
|
-
collectionName,
|
|
241
|
-
mutationCount: transaction.mutations.length,
|
|
242
|
-
});
|
|
243
|
-
|
|
610
|
+
onUpdate: async ({ transaction }: CollectionTransaction<T>) => {
|
|
244
611
|
try {
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
|
|
252
|
-
|
|
253
|
-
|
|
254
|
-
logger.debug('Sending delete delta to Convex', {
|
|
255
|
-
collectionName,
|
|
256
|
-
documentId: String(transaction.mutations[0].key),
|
|
257
|
-
deltaSize: pendingUpdate.length,
|
|
258
|
-
});
|
|
612
|
+
const mutation = transaction.mutations[0];
|
|
613
|
+
const documentKey = String(mutation.key);
|
|
614
|
+
|
|
615
|
+
// Skip if this update originated from server (prevents echo loops)
|
|
616
|
+
// Now checks DOCUMENT-level flag, not collection-level
|
|
617
|
+
if (prose.isApplyingFromServer(collection, documentKey)) {
|
|
618
|
+
logger.debug('Skipping onUpdate - data from server', { collection, documentKey });
|
|
619
|
+
return;
|
|
620
|
+
}
|
|
259
621
|
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
version: Date.now(),
|
|
265
|
-
});
|
|
622
|
+
await Promise.all([persistenceReadyPromise, optimisticReadyPromise]);
|
|
623
|
+
|
|
624
|
+
// Metadata is on mutation, not transaction (TanStack DB API)
|
|
625
|
+
const metadata = mutation.metadata as { contentSync?: ContentSyncMetadata } | undefined;
|
|
266
626
|
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
627
|
+
// Check if this is a content sync from utils.prose()
|
|
628
|
+
if (metadata?.contentSync) {
|
|
629
|
+
const { crdtBytes, materializedDoc } = metadata.contentSync;
|
|
630
|
+
|
|
631
|
+
await convexClient.mutation(api.update, {
|
|
632
|
+
documentId: documentKey,
|
|
633
|
+
crdtBytes,
|
|
634
|
+
materializedDoc,
|
|
271
635
|
});
|
|
636
|
+
return;
|
|
272
637
|
}
|
|
273
|
-
} catch (error: any) {
|
|
274
|
-
logger.error('Delete failed', {
|
|
275
|
-
collectionName,
|
|
276
|
-
error: error?.message,
|
|
277
|
-
status: error?.status,
|
|
278
|
-
});
|
|
279
638
|
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
639
|
+
// Regular update - apply to Y.Doc and generate delta
|
|
640
|
+
const delta = applyYjsUpdate(transaction.mutations);
|
|
641
|
+
if (delta.length > 0) {
|
|
642
|
+
const itemYMap = ymap.get(documentKey) as Y.Map<unknown>;
|
|
643
|
+
// Use serializeYMapValue to properly handle XmlFragment fields
|
|
644
|
+
const fullDoc = itemYMap ? serializeYMapValue(itemYMap) : mutation.modified;
|
|
645
|
+
await convexClient.mutation(api.update, {
|
|
646
|
+
documentId: documentKey,
|
|
647
|
+
crdtBytes: delta.slice().buffer,
|
|
648
|
+
materializedDoc: fullDoc,
|
|
649
|
+
});
|
|
285
650
|
}
|
|
651
|
+
} catch (error) {
|
|
652
|
+
handleMutationError(error, 'Update', collection);
|
|
653
|
+
}
|
|
654
|
+
},
|
|
286
655
|
|
|
287
|
-
|
|
656
|
+
onDelete: async ({ transaction }: CollectionTransaction<T>) => {
|
|
657
|
+
try {
|
|
658
|
+
await Promise.all([persistenceReadyPromise, optimisticReadyPromise]);
|
|
659
|
+
const delta = applyYjsDelete(transaction.mutations);
|
|
660
|
+
const itemsToDelete = transaction.mutations
|
|
661
|
+
.map((mut) => mut.original)
|
|
662
|
+
.filter((item): item is T => item !== undefined && Object.keys(item).length > 0);
|
|
663
|
+
ops.delete(itemsToDelete);
|
|
664
|
+
if (delta.length > 0) {
|
|
665
|
+
const documentKey = String(transaction.mutations[0].key);
|
|
666
|
+
await convexClient.mutation(api.remove, {
|
|
667
|
+
documentId: documentKey,
|
|
668
|
+
crdtBytes: delta.slice().buffer,
|
|
669
|
+
});
|
|
670
|
+
}
|
|
671
|
+
} catch (error) {
|
|
672
|
+
handleMutationError(error, 'Delete', collection);
|
|
288
673
|
}
|
|
289
674
|
},
|
|
290
675
|
|
|
291
|
-
// Sync function for pulling data from server
|
|
292
676
|
sync: {
|
|
677
|
+
rowUpdateMode: 'partial',
|
|
293
678
|
sync: (params: any) => {
|
|
294
|
-
const {
|
|
295
|
-
|
|
296
|
-
// Step 1: Write initial SSR data to BOTH Yjs AND TanStack DB
|
|
297
|
-
if (initialData && initialData.length > 0) {
|
|
298
|
-
// Sync to Yjs first (for CRDT state)
|
|
299
|
-
ydoc.transact(() => {
|
|
300
|
-
for (const item of initialData) {
|
|
301
|
-
const key = getKey(item);
|
|
302
|
-
const itemYMap = new Y.Map();
|
|
303
|
-
Object.entries(item as Record<string, unknown>).forEach(([k, v]) => {
|
|
304
|
-
itemYMap.set(k, v);
|
|
305
|
-
});
|
|
306
|
-
ymap.set(String(key), itemYMap);
|
|
307
|
-
}
|
|
308
|
-
}, 'ssr-init');
|
|
679
|
+
const { markReady, collection: collectionInstance } = params;
|
|
309
680
|
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
for (const item of initialData) {
|
|
313
|
-
write({ type: 'insert', value: item });
|
|
314
|
-
}
|
|
315
|
-
commit();
|
|
316
|
-
logger.debug('Initialized with SSR data', {
|
|
317
|
-
collectionName,
|
|
318
|
-
count: initialData.length,
|
|
319
|
-
});
|
|
320
|
-
}
|
|
681
|
+
// Store collection reference for utils.prose() to access
|
|
682
|
+
collectionRefs.set(collection, collectionInstance);
|
|
321
683
|
|
|
322
|
-
|
|
323
|
-
|
|
684
|
+
const existingCleanup = cleanupFunctions.get(collection);
|
|
685
|
+
if (existingCleanup) {
|
|
686
|
+
existingCleanup();
|
|
687
|
+
cleanupFunctions.delete(collection);
|
|
688
|
+
}
|
|
324
689
|
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
690
|
+
let subscription: (() => void) | null = null;
|
|
691
|
+
const ssrDocuments = material?.documents;
|
|
692
|
+
const ssrCheckpoint = material?.checkpoint;
|
|
693
|
+
const ssrCRDTBytes = material?.crdtBytes;
|
|
694
|
+
const docs: T[] = ssrDocuments ? [...ssrDocuments] : [];
|
|
328
695
|
|
|
329
|
-
|
|
696
|
+
(async () => {
|
|
330
697
|
try {
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
|
|
698
|
+
ydoc = await createYjsDocument(collection, persistence.kv);
|
|
699
|
+
ymap = getYMap<unknown>(ydoc, collection);
|
|
700
|
+
|
|
701
|
+
collectionDocs.set(collection, { ydoc, ymap });
|
|
702
|
+
|
|
703
|
+
// Store undo config for per-document undo managers
|
|
704
|
+
const trackedOrigins = new Set([YjsOrigin.Local]);
|
|
705
|
+
collectionUndoConfig.set(collection, {
|
|
706
|
+
captureTimeout: undoCaptureTimeout,
|
|
707
|
+
trackedOrigins,
|
|
334
708
|
});
|
|
335
709
|
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
|
|
341
|
-
|
|
710
|
+
docPersistence = persistence.createDocPersistence(collection, ydoc);
|
|
711
|
+
docPersistence.whenSynced.then(() => {
|
|
712
|
+
logger.debug('Persistence synced', { collection });
|
|
713
|
+
resolvePersistenceReady?.();
|
|
714
|
+
});
|
|
715
|
+
await persistenceReadyPromise;
|
|
716
|
+
logger.info('Persistence ready', { collection, ymapSize: ymap.size });
|
|
342
717
|
|
|
343
|
-
//
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
|
|
347
|
-
|
|
348
|
-
|
|
718
|
+
// Create bound replicate operations for this collection
|
|
719
|
+
// These are tied to this collection's TanStack DB params
|
|
720
|
+
ops = createReplicateOps<T>(params);
|
|
721
|
+
resolveOptimisticReady?.();
|
|
722
|
+
|
|
723
|
+
// Note: Fragment sync is handled by utils.prose() debounce handler
|
|
724
|
+
// calling collection.update() with contentSync metadata
|
|
725
|
+
|
|
726
|
+
if (ssrCRDTBytes) {
|
|
727
|
+
applyUpdate(ydoc, new Uint8Array(ssrCRDTBytes), YjsOrigin.Server);
|
|
349
728
|
}
|
|
350
729
|
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
730
|
+
// === LOCAL-FIRST FLOW WITH RECOVERY ===
|
|
731
|
+
// 1. Local data (IndexedDB/Yjs) is the source of truth
|
|
732
|
+
// 2. Recovery sync - get any missing data from server using state vectors
|
|
733
|
+
// 3. Push local+recovered data to TanStack DB with ops.replace
|
|
734
|
+
// 4. Reconcile phantom documents (hidden in loading state)
|
|
735
|
+
// 5. markReady() - UI renders DATA immediately
|
|
736
|
+
// 6. Subscription starts in background (replication)
|
|
737
|
+
|
|
738
|
+
// Step 1: Recovery sync - fetch missing server data
|
|
739
|
+
await recoverSync();
|
|
740
|
+
|
|
741
|
+
// Step 2: Push local+recovered data to TanStack DB
|
|
742
|
+
if (ymap.size > 0) {
|
|
743
|
+
const items = extractItems<T>(ymap);
|
|
744
|
+
ops.replace(items); // Atomic replace, not accumulative insert
|
|
745
|
+
logger.info('Data loaded to TanStack DB', {
|
|
746
|
+
collection,
|
|
747
|
+
itemCount: items.length,
|
|
356
748
|
});
|
|
749
|
+
} else {
|
|
750
|
+
// No data - clear TanStack DB to avoid stale state
|
|
751
|
+
ops.replace([]);
|
|
752
|
+
logger.info('No data, cleared TanStack DB', { collection });
|
|
357
753
|
}
|
|
358
754
|
|
|
359
|
-
|
|
755
|
+
// Step 3: Reconcile phantom documents (still in loading state)
|
|
756
|
+
logger.debug('Running reconciliation', { collection, ymapSize: ymap.size });
|
|
757
|
+
await Effect.runPromise(reconcile(ops).pipe(Effect.provide(servicesLayer)));
|
|
758
|
+
logger.debug('Reconciliation complete', { collection });
|
|
759
|
+
|
|
760
|
+
// Step 4: Mark ready - UI shows data immediately
|
|
761
|
+
markReady();
|
|
762
|
+
logger.info('Collection ready', { collection, ymapSize: ymap.size });
|
|
763
|
+
|
|
764
|
+
// Step 4: Load checkpoint for subscription (background replication)
|
|
765
|
+
const checkpoint =
|
|
766
|
+
ssrCheckpoint ||
|
|
767
|
+
(await Effect.runPromise(
|
|
768
|
+
Effect.gen(function* () {
|
|
769
|
+
const checkpointSvc = yield* Checkpoint;
|
|
770
|
+
return yield* checkpointSvc.loadCheckpoint(collection);
|
|
771
|
+
}).pipe(Effect.provide(checkpointLayer))
|
|
772
|
+
));
|
|
773
|
+
|
|
774
|
+
logger.info('Checkpoint loaded', {
|
|
775
|
+
collection,
|
|
776
|
+
checkpoint,
|
|
777
|
+
source: ssrCheckpoint ? 'SSR' : 'IndexedDB',
|
|
778
|
+
ymapSize: ymap.size,
|
|
779
|
+
});
|
|
360
780
|
|
|
361
|
-
//
|
|
362
|
-
|
|
363
|
-
|
|
781
|
+
// Get mutex for thread-safe updates
|
|
782
|
+
const mux = getOrCreateMutex(collection);
|
|
783
|
+
|
|
784
|
+
const handleSnapshotChange = (crdtBytes: ArrayBuffer) => {
|
|
785
|
+
// Cancel all pending syncs - snapshot replaces everything
|
|
786
|
+
prose.cancelAllPending(collection);
|
|
787
|
+
|
|
788
|
+
mux(() => {
|
|
789
|
+
try {
|
|
790
|
+
logger.debug('Applying snapshot', {
|
|
791
|
+
collection,
|
|
792
|
+
bytesLength: crdtBytes.byteLength,
|
|
793
|
+
});
|
|
794
|
+
applyUpdate(ydoc, new Uint8Array(crdtBytes), YjsOrigin.Server);
|
|
795
|
+
const items = extractItems<T>(ymap);
|
|
796
|
+
logger.debug('Snapshot applied', { collection, itemCount: items.length });
|
|
797
|
+
ops.replace(items);
|
|
798
|
+
} catch (error) {
|
|
799
|
+
logger.error('Error applying snapshot', { collection, error: String(error) });
|
|
800
|
+
throw new Error(`Snapshot application failed: ${error}`);
|
|
801
|
+
}
|
|
802
|
+
});
|
|
803
|
+
};
|
|
804
|
+
|
|
805
|
+
const handleDeltaChange = (crdtBytes: ArrayBuffer, documentId: string | undefined) => {
|
|
806
|
+
// Cancel any pending sync for this document to avoid conflicts
|
|
807
|
+
if (documentId) {
|
|
808
|
+
prose.cancelPending(collection, documentId);
|
|
809
|
+
// Mark that we're applying server data to prevent echo loops (DOCUMENT-level)
|
|
810
|
+
prose.setApplyingFromServer(collection, documentId, true);
|
|
811
|
+
}
|
|
364
812
|
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
813
|
+
mux(() => {
|
|
814
|
+
try {
|
|
815
|
+
logger.debug('Applying delta', {
|
|
816
|
+
collection,
|
|
817
|
+
documentId,
|
|
818
|
+
bytesLength: crdtBytes.byteLength,
|
|
819
|
+
});
|
|
820
|
+
|
|
821
|
+
const itemBefore = documentId ? extractItem<T>(ymap, documentId) : null;
|
|
822
|
+
applyUpdate(ydoc, new Uint8Array(crdtBytes), YjsOrigin.Server);
|
|
823
|
+
|
|
824
|
+
if (!documentId) {
|
|
825
|
+
logger.debug('Delta applied (no documentId)', { collection });
|
|
826
|
+
return;
|
|
827
|
+
}
|
|
828
|
+
|
|
829
|
+
const itemAfter = extractItem<T>(ymap, documentId);
|
|
830
|
+
if (itemAfter) {
|
|
831
|
+
logger.debug('Upserting item after delta', { collection, documentId });
|
|
832
|
+
ops.upsert([itemAfter]);
|
|
833
|
+
} else if (itemBefore) {
|
|
834
|
+
logger.debug('Deleting item after delta', { collection, documentId });
|
|
835
|
+
ops.delete([itemBefore]);
|
|
836
|
+
} else {
|
|
837
|
+
logger.debug('No change detected after delta', { collection, documentId });
|
|
838
|
+
}
|
|
839
|
+
} catch (error) {
|
|
840
|
+
logger.error('Error applying delta', {
|
|
841
|
+
collection,
|
|
842
|
+
documentId,
|
|
843
|
+
error: String(error),
|
|
844
|
+
});
|
|
845
|
+
throw new Error(`Delta application failed for ${documentId}: ${error}`);
|
|
846
|
+
} finally {
|
|
847
|
+
// Clear document-level flag after delta processing
|
|
848
|
+
if (documentId) {
|
|
849
|
+
prose.setApplyingFromServer(collection, documentId, false);
|
|
850
|
+
}
|
|
851
|
+
}
|
|
852
|
+
});
|
|
853
|
+
};
|
|
854
|
+
|
|
855
|
+
// Simple async subscription handler - bypasses Effect for reliability
|
|
856
|
+
const handleSubscriptionUpdate = async (response: any) => {
|
|
857
|
+
try {
|
|
858
|
+
// Validate response shape
|
|
859
|
+
if (!response || !Array.isArray(response.changes)) {
|
|
860
|
+
logger.error('Invalid subscription response', { response });
|
|
861
|
+
return;
|
|
862
|
+
}
|
|
863
|
+
|
|
864
|
+
const { changes, checkpoint: newCheckpoint } = response;
|
|
865
|
+
|
|
866
|
+
// Process each change
|
|
867
|
+
for (const change of changes) {
|
|
868
|
+
const { operationType, crdtBytes, documentId } = change;
|
|
869
|
+
if (!crdtBytes) {
|
|
870
|
+
logger.warn('Skipping change with missing crdtBytes', { change });
|
|
871
|
+
continue;
|
|
872
|
+
}
|
|
873
|
+
|
|
874
|
+
try {
|
|
875
|
+
if (operationType === 'snapshot') {
|
|
876
|
+
handleSnapshotChange(crdtBytes);
|
|
877
|
+
} else {
|
|
878
|
+
handleDeltaChange(crdtBytes, documentId);
|
|
879
|
+
}
|
|
880
|
+
} catch (changeError) {
|
|
881
|
+
logger.error('Failed to apply change', {
|
|
882
|
+
operationType,
|
|
883
|
+
documentId,
|
|
884
|
+
error: String(changeError),
|
|
885
|
+
});
|
|
886
|
+
// Continue processing other changes
|
|
887
|
+
}
|
|
888
|
+
}
|
|
889
|
+
|
|
890
|
+
// Save checkpoint using persistence KV store
|
|
891
|
+
if (newCheckpoint) {
|
|
892
|
+
try {
|
|
893
|
+
const key = `checkpoint:${collection}`;
|
|
894
|
+
await persistence.kv.set(key, newCheckpoint);
|
|
895
|
+
logger.debug('Checkpoint saved', { collection, checkpoint: newCheckpoint });
|
|
896
|
+
} catch (checkpointError) {
|
|
897
|
+
logger.error('Failed to save checkpoint', {
|
|
898
|
+
collection,
|
|
899
|
+
error: String(checkpointError),
|
|
900
|
+
});
|
|
901
|
+
}
|
|
902
|
+
}
|
|
903
|
+
} catch (error) {
|
|
904
|
+
logger.error('Subscription handler error', { collection, error: String(error) });
|
|
905
|
+
}
|
|
906
|
+
};
|
|
369
907
|
|
|
370
|
-
|
|
371
|
-
|
|
372
|
-
|
|
908
|
+
logger.info('Establishing subscription', {
|
|
909
|
+
collection,
|
|
910
|
+
checkpoint,
|
|
911
|
+
limit: 1000,
|
|
912
|
+
});
|
|
373
913
|
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
|
|
379
|
-
|
|
380
|
-
|
|
914
|
+
subscription = convexClient.onUpdate(
|
|
915
|
+
api.stream,
|
|
916
|
+
{ checkpoint, limit: 1000 },
|
|
917
|
+
(response: any) => {
|
|
918
|
+
logger.debug('Subscription received update', {
|
|
919
|
+
collection,
|
|
920
|
+
changesCount: response.changes?.length ?? 0,
|
|
921
|
+
checkpoint: response.checkpoint,
|
|
922
|
+
hasMore: response.hasMore,
|
|
381
923
|
});
|
|
382
|
-
|
|
924
|
+
|
|
925
|
+
// Call async handler directly - no Effect wrapper
|
|
926
|
+
handleSubscriptionUpdate(response);
|
|
383
927
|
}
|
|
384
|
-
|
|
928
|
+
);
|
|
929
|
+
|
|
930
|
+
// Note: markReady() was already called above (local-first)
|
|
931
|
+
// Subscription is background replication, not blocking
|
|
932
|
+
logger.info('Subscription established', { collection });
|
|
933
|
+
} catch (error) {
|
|
934
|
+
logger.error('Failed to set up collection', { error, collection });
|
|
935
|
+
// Still mark ready on error so UI isn't stuck loading
|
|
936
|
+
markReady();
|
|
937
|
+
}
|
|
938
|
+
})();
|
|
939
|
+
|
|
940
|
+
return {
|
|
941
|
+
material: docs,
|
|
942
|
+
cleanup: () => {
|
|
943
|
+
subscription?.();
|
|
385
944
|
|
|
386
|
-
//
|
|
387
|
-
|
|
388
|
-
const key = getKey(item as T);
|
|
945
|
+
// Clean up prose module state (debounce timers, pending state, observers)
|
|
946
|
+
prose.cleanup(collection);
|
|
389
947
|
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
948
|
+
const prefix = `${collection}:`;
|
|
949
|
+
|
|
950
|
+
// Destroy fragment undo managers
|
|
951
|
+
for (const [key, um] of fragmentUndoManagers) {
|
|
952
|
+
if (key.startsWith(prefix)) {
|
|
953
|
+
um.destroy();
|
|
954
|
+
fragmentUndoManagers.delete(key);
|
|
394
955
|
}
|
|
395
956
|
}
|
|
396
957
|
|
|
397
|
-
|
|
958
|
+
// Clean up mutex
|
|
959
|
+
collectionMutex.delete(collection);
|
|
398
960
|
|
|
399
|
-
//
|
|
400
|
-
|
|
961
|
+
// Clean up debounce config
|
|
962
|
+
debounceConfig.delete(collection);
|
|
401
963
|
|
|
402
|
-
|
|
403
|
-
|
|
404
|
-
deletedCount: deletedItems.length,
|
|
405
|
-
});
|
|
406
|
-
} catch (error: any) {
|
|
407
|
-
logger.error('Failed to sync items from subscription', {
|
|
408
|
-
error: error.message,
|
|
409
|
-
errorName: error.name,
|
|
410
|
-
stack: error?.stack,
|
|
411
|
-
collectionName,
|
|
412
|
-
itemCount: items.length,
|
|
413
|
-
});
|
|
414
|
-
throw error; // Re-throw to prevent silent failures
|
|
415
|
-
}
|
|
416
|
-
});
|
|
964
|
+
// Clean up collection references
|
|
965
|
+
collectionRefs.delete(collection);
|
|
417
966
|
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
967
|
+
collectionUndoConfig.delete(collection);
|
|
968
|
+
collectionDocs.delete(collection);
|
|
969
|
+
docPersistence?.destroy();
|
|
970
|
+
ydoc?.destroy();
|
|
971
|
+
cleanupFunctions.delete(collection);
|
|
972
|
+
},
|
|
424
973
|
};
|
|
425
974
|
},
|
|
426
975
|
},
|
|
427
976
|
};
|
|
428
977
|
}
|
|
429
|
-
|
|
430
|
-
/**
|
|
431
|
-
* Step 2: Wrap collection with offline support.
|
|
432
|
-
*
|
|
433
|
-
* This implements the CORRECT pattern:
|
|
434
|
-
* - Wraps collection ONCE with startOfflineExecutor
|
|
435
|
-
* - Returns raw collection (NO CUSTOM WRAPPER)
|
|
436
|
-
* - Uses beforeRetry filter for stale transactions
|
|
437
|
-
* - Connects to Convex connection state for retry triggers
|
|
438
|
-
*
|
|
439
|
-
* Config is automatically extracted from the rawCollection!
|
|
440
|
-
*
|
|
441
|
-
* @example
|
|
442
|
-
* ```typescript
|
|
443
|
-
* import { createCollection } from '@tanstack/react-db'
|
|
444
|
-
* import { convexCollectionOptions, createConvexCollection } from '@trestleinc/convex-replicate-core'
|
|
445
|
-
*
|
|
446
|
-
* // Step 1: Create raw collection with ALL config
|
|
447
|
-
* const rawCollection = createCollection(
|
|
448
|
-
* convexCollectionOptions<Task>({
|
|
449
|
-
* convexClient,
|
|
450
|
-
* api: api.tasks,
|
|
451
|
-
* collectionName: 'tasks',
|
|
452
|
-
* getKey: (task) => task.id,
|
|
453
|
-
* initialData,
|
|
454
|
-
* })
|
|
455
|
-
* )
|
|
456
|
-
*
|
|
457
|
-
* // Step 2: Wrap with offline support - params automatically extracted!
|
|
458
|
-
* const collection = createConvexCollection(rawCollection)
|
|
459
|
-
*
|
|
460
|
-
* // Use like a normal TanStack DB collection
|
|
461
|
-
* const tx = collection.insert({ id: '1', text: 'Buy milk', isCompleted: false })
|
|
462
|
-
* await tx.isPersisted.promise // Built-in promise (not custom awaitReplication)
|
|
463
|
-
* ```
|
|
464
|
-
*/
|
|
465
|
-
export function createConvexCollection<T extends object>(
|
|
466
|
-
rawCollection: Collection<T>
|
|
467
|
-
): ConvexCollection<T> {
|
|
468
|
-
// Extract config from rawCollection
|
|
469
|
-
const config = (rawCollection as any).config;
|
|
470
|
-
const convexClient = config._convexClient;
|
|
471
|
-
const collectionName = config._collectionName;
|
|
472
|
-
|
|
473
|
-
if (!convexClient || !collectionName) {
|
|
474
|
-
throw new Error(
|
|
475
|
-
'createConvexCollection requires a collection created with convexCollectionOptions. ' +
|
|
476
|
-
'Make sure you pass convexClient and collectionName to convexCollectionOptions.'
|
|
477
|
-
);
|
|
478
|
-
}
|
|
479
|
-
|
|
480
|
-
logger.info('Creating Convex collection with offline support', { collectionName });
|
|
481
|
-
|
|
482
|
-
// Create offline executor (wraps collection ONCE)
|
|
483
|
-
const offline: OfflineExecutor = startOfflineExecutor({
|
|
484
|
-
collections: { [collectionName]: rawCollection as any },
|
|
485
|
-
|
|
486
|
-
// Empty mutationFns - handlers in collection config will be used
|
|
487
|
-
mutationFns: {},
|
|
488
|
-
|
|
489
|
-
// Filter stale transactions before retry
|
|
490
|
-
beforeRetry: (transactions) => {
|
|
491
|
-
const cutoff = Date.now() - 24 * 60 * 60 * 1000; // 24 hours
|
|
492
|
-
const filtered = transactions.filter((tx) => {
|
|
493
|
-
const isRecent = tx.createdAt.getTime() > cutoff;
|
|
494
|
-
const notExhausted = tx.retryCount < 10;
|
|
495
|
-
return isRecent && notExhausted;
|
|
496
|
-
});
|
|
497
|
-
|
|
498
|
-
if (filtered.length < transactions.length) {
|
|
499
|
-
logger.warn('Filtered stale transactions', {
|
|
500
|
-
collectionName,
|
|
501
|
-
before: transactions.length,
|
|
502
|
-
after: filtered.length,
|
|
503
|
-
});
|
|
504
|
-
}
|
|
505
|
-
|
|
506
|
-
return filtered;
|
|
507
|
-
},
|
|
508
|
-
|
|
509
|
-
onLeadershipChange: (isLeader) => {
|
|
510
|
-
logger.info(isLeader ? 'Offline mode active' : 'Online-only mode', {
|
|
511
|
-
collectionName,
|
|
512
|
-
});
|
|
513
|
-
},
|
|
514
|
-
|
|
515
|
-
onStorageFailure: (diagnostic) => {
|
|
516
|
-
logger.warn('Storage failed - online-only mode', {
|
|
517
|
-
collectionName,
|
|
518
|
-
code: diagnostic.code,
|
|
519
|
-
message: diagnostic.message,
|
|
520
|
-
});
|
|
521
|
-
},
|
|
522
|
-
});
|
|
523
|
-
|
|
524
|
-
// Subscribe to Convex connection state for automatic retry trigger
|
|
525
|
-
if (convexClient.connectionState) {
|
|
526
|
-
const connectionState = convexClient.connectionState();
|
|
527
|
-
logger.debug('Initial connection state', {
|
|
528
|
-
collectionName,
|
|
529
|
-
isConnected: connectionState.isWebSocketConnected,
|
|
530
|
-
});
|
|
531
|
-
}
|
|
532
|
-
|
|
533
|
-
// Trigger retry when connection is restored
|
|
534
|
-
if (typeof window !== 'undefined') {
|
|
535
|
-
window.addEventListener('online', () => {
|
|
536
|
-
logger.info('Network online - notifying offline executor', { collectionName });
|
|
537
|
-
offline.notifyOnline();
|
|
538
|
-
});
|
|
539
|
-
}
|
|
540
|
-
|
|
541
|
-
logger.info('Offline support initialized', {
|
|
542
|
-
collectionName,
|
|
543
|
-
mode: offline.mode,
|
|
544
|
-
});
|
|
545
|
-
|
|
546
|
-
// Return collection directly - NO WRAPPER!
|
|
547
|
-
// Users call collection.insert/update/delete as normal
|
|
548
|
-
// Handlers run automatically, offline-transactions handles persistence
|
|
549
|
-
return rawCollection as ConvexCollection<T>;
|
|
550
|
-
}
|