@trestleinc/replicate 0.1.0 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. package/README.md +356 -420
  2. package/dist/client/collection.d.ts +78 -76
  3. package/dist/client/errors.d.ts +59 -0
  4. package/dist/client/index.d.ts +22 -18
  5. package/dist/client/logger.d.ts +0 -1
  6. package/dist/client/merge.d.ts +77 -0
  7. package/dist/client/persistence/adapters/index.d.ts +8 -0
  8. package/dist/client/persistence/adapters/opsqlite.d.ts +46 -0
  9. package/dist/client/persistence/adapters/sqljs.d.ts +83 -0
  10. package/dist/client/persistence/index.d.ts +49 -0
  11. package/dist/client/persistence/indexeddb.d.ts +17 -0
  12. package/dist/client/persistence/memory.d.ts +16 -0
  13. package/dist/client/persistence/sqlite-browser.d.ts +51 -0
  14. package/dist/client/persistence/sqlite-level.d.ts +63 -0
  15. package/dist/client/persistence/sqlite-rn.d.ts +36 -0
  16. package/dist/client/persistence/sqlite.d.ts +47 -0
  17. package/dist/client/persistence/types.d.ts +42 -0
  18. package/dist/client/prose.d.ts +56 -0
  19. package/dist/client/replicate.d.ts +40 -0
  20. package/dist/client/services/checkpoint.d.ts +18 -0
  21. package/dist/client/services/reconciliation.d.ts +24 -0
  22. package/dist/component/_generated/api.d.ts +35 -0
  23. package/dist/component/_generated/api.js +3 -3
  24. package/dist/component/_generated/component.d.ts +89 -0
  25. package/dist/component/_generated/component.js +0 -0
  26. package/dist/component/_generated/dataModel.d.ts +45 -0
  27. package/dist/component/_generated/dataModel.js +0 -0
  28. package/{src → dist}/component/_generated/server.d.ts +9 -38
  29. package/dist/component/convex.config.d.ts +2 -2
  30. package/dist/component/convex.config.js +2 -1
  31. package/dist/component/logger.d.ts +8 -0
  32. package/dist/component/logger.js +30 -0
  33. package/dist/component/public.d.ts +36 -61
  34. package/dist/component/public.js +232 -58
  35. package/dist/component/schema.d.ts +32 -8
  36. package/dist/component/schema.js +19 -6
  37. package/dist/index.js +1553 -308
  38. package/dist/server/builder.d.ts +94 -0
  39. package/dist/server/index.d.ts +14 -17
  40. package/dist/server/schema.d.ts +17 -63
  41. package/dist/server/storage.d.ts +80 -0
  42. package/dist/server.js +268 -83
  43. package/dist/shared/index.d.ts +5 -0
  44. package/dist/shared/index.js +2 -0
  45. package/dist/shared/types.d.ts +50 -0
  46. package/dist/shared/types.js +6 -0
  47. package/dist/shared.js +6 -0
  48. package/package.json +59 -49
  49. package/src/client/collection.ts +877 -450
  50. package/src/client/errors.ts +45 -0
  51. package/src/client/index.ts +52 -26
  52. package/src/client/logger.ts +2 -28
  53. package/src/client/merge.ts +374 -0
  54. package/src/client/persistence/adapters/index.ts +8 -0
  55. package/src/client/persistence/adapters/opsqlite.ts +54 -0
  56. package/src/client/persistence/adapters/sqljs.ts +128 -0
  57. package/src/client/persistence/index.ts +54 -0
  58. package/src/client/persistence/indexeddb.ts +110 -0
  59. package/src/client/persistence/memory.ts +61 -0
  60. package/src/client/persistence/sqlite-browser.ts +107 -0
  61. package/src/client/persistence/sqlite-level.ts +407 -0
  62. package/src/client/persistence/sqlite-rn.ts +44 -0
  63. package/src/client/persistence/sqlite.ts +161 -0
  64. package/src/client/persistence/types.ts +49 -0
  65. package/src/client/prose.ts +369 -0
  66. package/src/client/replicate.ts +80 -0
  67. package/src/client/services/checkpoint.ts +86 -0
  68. package/src/client/services/reconciliation.ts +108 -0
  69. package/src/component/_generated/api.ts +52 -0
  70. package/src/component/_generated/component.ts +103 -0
  71. package/src/component/_generated/{dataModel.d.ts → dataModel.ts} +1 -1
  72. package/src/component/_generated/server.ts +161 -0
  73. package/src/component/convex.config.ts +3 -1
  74. package/src/component/logger.ts +36 -0
  75. package/src/component/public.ts +364 -111
  76. package/src/component/schema.ts +18 -5
  77. package/src/env.d.ts +31 -0
  78. package/src/server/builder.ts +85 -0
  79. package/src/server/index.ts +9 -24
  80. package/src/server/schema.ts +20 -76
  81. package/src/server/storage.ts +313 -0
  82. package/src/shared/index.ts +5 -0
  83. package/src/shared/types.ts +52 -0
  84. package/LICENSE.package +0 -201
  85. package/dist/client/storage.d.ts +0 -143
  86. package/dist/server/replication.d.ts +0 -122
  87. package/dist/server/ssr.d.ts +0 -79
  88. package/dist/ssr.js +0 -19
  89. package/src/client/storage.ts +0 -206
  90. package/src/component/_generated/api.d.ts +0 -95
  91. package/src/component/_generated/api.js +0 -23
  92. package/src/component/_generated/server.js +0 -90
  93. package/src/server/replication.ts +0 -244
  94. package/src/server/ssr.ts +0 -106
package/dist/index.js CHANGED
@@ -1,375 +1,1620 @@
1
- import { componentsGeneric } from "convex/server";
2
- import { NonRetriableError, startOfflineExecutor } from "@tanstack/offline-transactions";
1
+ import { Doc, Map as external_yjs_Map, UndoManager, XmlElement, XmlFragment, XmlText, applyUpdateV2, encodeStateAsUpdateV2, encodeStateVector } from "yjs";
2
+ import { createMutex } from "lib0/mutex";
3
+ import { Context, Data, Effect, Layer } from "effect";
3
4
  import { getLogger } from "@logtape/logtape";
4
- import * as __WEBPACK_EXTERNAL_MODULE_yjs__ from "yjs";
5
- componentsGeneric();
6
- class ReplicateStorage {
7
- component;
8
- collectionName;
9
- constructor(component, collectionName){
10
- this.component = component;
11
- this.collectionName = collectionName;
12
- }
13
- async insertDocument(ctx, documentId, crdtBytes, version) {
14
- return ctx.runMutation(this.component.public.insertDocument, {
15
- collectionName: this.collectionName,
16
- documentId,
17
- crdtBytes,
18
- version
5
+ import { IndexeddbPersistence } from "y-indexeddb";
6
+ import { BrowserLevel } from "browser-level";
7
+ import { LeveldbPersistence } from "y-leveldb";
8
+ import { AbstractIterator, AbstractKeyIterator, AbstractLevel, AbstractValueIterator } from "abstract-level";
9
+ function logger_getLogger(category) {
10
+ return getLogger([
11
+ 'replicate',
12
+ ...category
13
+ ]);
14
+ }
15
+ class NetworkError extends Data.TaggedError('NetworkError') {
16
+ }
17
+ class IDBError extends Data.TaggedError('IDBError') {
18
+ }
19
+ class IDBWriteError extends Data.TaggedError('IDBWriteError') {
20
+ }
21
+ class ReconciliationError extends Data.TaggedError('ReconciliationError') {
22
+ }
23
+ class ProseError extends Data.TaggedError('ProseError') {
24
+ }
25
+ class CollectionNotReadyError extends Data.TaggedError('CollectionNotReadyError') {
26
+ }
27
+ class NonRetriableError extends Error {
28
+ constructor(message){
29
+ super(message);
30
+ this.name = 'NonRetriableError';
31
+ }
32
+ }
33
+ class Checkpoint extends Context.Tag('Checkpoint')() {
34
+ }
35
+ function createCheckpointLayer(kv) {
36
+ return Layer.succeed(Checkpoint, Checkpoint.of({
37
+ loadCheckpoint: (collection)=>Effect.gen(function*(_) {
38
+ const key = `checkpoint:${collection}`;
39
+ const stored = yield* _(Effect.tryPromise({
40
+ try: ()=>kv.get(key),
41
+ catch: (cause)=>new IDBError({
42
+ operation: 'get',
43
+ key,
44
+ cause
45
+ })
46
+ }));
47
+ if (stored) {
48
+ yield* _(Effect.logDebug('Loaded checkpoint from storage', {
49
+ collection,
50
+ checkpoint: stored
51
+ }));
52
+ return stored;
53
+ }
54
+ yield* _(Effect.logDebug('No stored checkpoint, using default', {
55
+ collection
56
+ }));
57
+ return {
58
+ lastModified: 0
59
+ };
60
+ }),
61
+ saveCheckpoint: (collection, checkpoint)=>Effect.gen(function*(_) {
62
+ const key = `checkpoint:${collection}`;
63
+ yield* _(Effect.tryPromise({
64
+ try: ()=>kv.set(key, checkpoint),
65
+ catch: (cause)=>new IDBWriteError({
66
+ key,
67
+ value: checkpoint,
68
+ cause
69
+ })
70
+ }));
71
+ yield* _(Effect.logDebug('Checkpoint saved', {
72
+ collection,
73
+ checkpoint
74
+ }));
75
+ }),
76
+ clearCheckpoint: (collection)=>Effect.gen(function*(_) {
77
+ const key = `checkpoint:${collection}`;
78
+ yield* _(Effect.tryPromise({
79
+ try: ()=>kv.del(key),
80
+ catch: (cause)=>new IDBError({
81
+ operation: 'delete',
82
+ key,
83
+ cause
84
+ })
85
+ }));
86
+ yield* _(Effect.logDebug('Checkpoint cleared', {
87
+ collection
88
+ }));
89
+ })
90
+ }));
91
+ }
92
+ const logger = logger_getLogger([
93
+ 'replicate',
94
+ 'merge'
95
+ ]);
96
+ async function createYjsDocument(collection, kv) {
97
+ const clientIdKey = `yjsClientId:${collection}`;
98
+ let clientId = await kv.get(clientIdKey);
99
+ if (!clientId) {
100
+ clientId = Math.floor(2147483647 * Math.random());
101
+ await kv.set(clientIdKey, clientId);
102
+ logger.info('Generated new Yjs clientID', {
103
+ collection,
104
+ clientId
19
105
  });
20
106
  }
21
- async updateDocument(ctx, documentId, crdtBytes, version) {
22
- return ctx.runMutation(this.component.public.updateDocument, {
23
- collectionName: this.collectionName,
24
- documentId,
25
- crdtBytes,
26
- version
107
+ const ydoc = new Doc({
108
+ guid: collection,
109
+ clientID: clientId
110
+ });
111
+ logger.info('Created Yjs document', {
112
+ collection,
113
+ clientId
114
+ });
115
+ return ydoc;
116
+ }
117
+ function applyUpdate(doc, update, origin) {
118
+ applyUpdateV2(doc, update, origin);
119
+ }
120
+ function getYMap(doc, name) {
121
+ return doc.getMap(name);
122
+ }
123
+ function yjsTransact(doc, fn, origin) {
124
+ return doc.transact(fn, origin);
125
+ }
126
+ function transactWithDelta(doc, fn, origin) {
127
+ const beforeVector = encodeStateVector(doc);
128
+ const result = doc.transact(fn, origin);
129
+ const delta = encodeStateAsUpdateV2(doc, beforeVector);
130
+ return {
131
+ result,
132
+ delta
133
+ };
134
+ }
135
+ function isYjsAbstractType(value) {
136
+ if (null === value || 'object' != typeof value) return false;
137
+ const v = value;
138
+ return '_map' in v && '_eH' in v && 'doc' in v;
139
+ }
140
+ function isYMap(value) {
141
+ if (!isYjsAbstractType(value)) return false;
142
+ const v = value;
143
+ return 'function' == typeof v.keys && 'function' == typeof v.get;
144
+ }
145
+ function isYArray(value) {
146
+ if (!isYjsAbstractType(value)) return false;
147
+ const v = value;
148
+ return 'function' == typeof v.toArray && 'function' != typeof v.get;
149
+ }
150
+ function isYXmlFragment(value) {
151
+ if (!isYjsAbstractType(value)) return false;
152
+ const v = value;
153
+ return 'function' == typeof v.toArray && 'function' != typeof v.keys;
154
+ }
155
+ function serialize(value) {
156
+ if (null == value) return value;
157
+ if ('object' != typeof value) return value;
158
+ if (isYXmlFragment(value)) return fragmentToJSON(value);
159
+ if (isYMap(value)) {
160
+ const result = {};
161
+ const ymap = value;
162
+ ymap.forEach((v, k)=>{
163
+ result[k] = serialize(v);
27
164
  });
165
+ return result;
28
166
  }
29
- async deleteDocument(ctx, documentId, crdtBytes, version) {
30
- return ctx.runMutation(this.component.public.deleteDocument, {
31
- collectionName: this.collectionName,
32
- documentId,
33
- crdtBytes,
34
- version
167
+ if (isYArray(value)) return value.toArray().map(serialize);
168
+ return value;
169
+ }
170
+ function serializeYMap(ymap) {
171
+ return serialize(ymap);
172
+ }
173
+ function extractItems(ymap) {
174
+ const items = [];
175
+ ymap.forEach((value)=>{
176
+ if (isYMap(value)) items.push(serialize(value));
177
+ });
178
+ return items;
179
+ }
180
+ function extractItem(ymap, key) {
181
+ const value = ymap.get(key);
182
+ if (isYMap(value)) return serialize(value);
183
+ return null;
184
+ }
185
+ function isDoc(value) {
186
+ return 'object' == typeof value && null !== value && 'type' in value && 'doc' === value.type;
187
+ }
188
+ function fragmentToJSON(fragment) {
189
+ const content = [];
190
+ for (const child of fragment.toArray())if (child instanceof XmlElement) content.push(xmlElementToJSON(child));
191
+ else if (child instanceof XmlText) {
192
+ const textContent = xmlTextToJSON(child);
193
+ if (textContent.length > 0) content.push({
194
+ type: 'paragraph',
195
+ content: textContent
196
+ });
197
+ }
198
+ return {
199
+ type: 'doc',
200
+ content: content.length > 0 ? content : [
201
+ {
202
+ type: 'paragraph'
203
+ }
204
+ ]
205
+ };
206
+ }
207
+ function xmlElementToJSON(element) {
208
+ const result = {
209
+ type: element.nodeName
210
+ };
211
+ const attrs = element.getAttributes();
212
+ if (Object.keys(attrs).length > 0) result.attrs = attrs;
213
+ const content = [];
214
+ for (const child of element.toArray())if (child instanceof XmlElement) content.push(xmlElementToJSON(child));
215
+ else if (child instanceof XmlText) content.push(...xmlTextToJSON(child));
216
+ if (content.length > 0) result.content = content;
217
+ return result;
218
+ }
219
+ function xmlTextToJSON(text) {
220
+ const result = [];
221
+ const delta = text.toDelta();
222
+ for (const op of delta)if ('string' == typeof op.insert) {
223
+ const node = {
224
+ type: 'text',
225
+ text: op.insert
226
+ };
227
+ if (op.attributes && Object.keys(op.attributes).length > 0) node.marks = Object.entries(op.attributes).map(([type, attrs])=>({
228
+ type,
229
+ attrs: 'object' == typeof attrs ? attrs : void 0
230
+ }));
231
+ result.push(node);
232
+ }
233
+ return result;
234
+ }
235
+ function fragmentFromJSON(fragment, json) {
236
+ if (!json.content) return;
237
+ for (const node of json.content)appendNodeToFragment(fragment, node);
238
+ }
239
+ function extract(content) {
240
+ if (!content || 'object' != typeof content) return '';
241
+ const doc = content;
242
+ if (!doc.content || !Array.isArray(doc.content)) return '';
243
+ return doc.content.map((block)=>{
244
+ if (!block.content || !Array.isArray(block.content)) return '';
245
+ return block.content.map((node)=>node.text || '').join('');
246
+ }).join(' ');
247
+ }
248
+ function appendNodeToFragment(parent, node) {
249
+ if ('text' === node.type) {
250
+ const text = new XmlText();
251
+ if (node.text) {
252
+ const attrs = {};
253
+ if (node.marks) for (const mark of node.marks)attrs[mark.type] = mark.attrs ?? true;
254
+ text.insert(0, node.text, Object.keys(attrs).length > 0 ? attrs : void 0);
255
+ }
256
+ parent.insert(parent.length, [
257
+ text
258
+ ]);
259
+ } else {
260
+ const element = new XmlElement(node.type);
261
+ if (node.attrs) for (const [key, value] of Object.entries(node.attrs))element.setAttribute(key, value);
262
+ if (node.content) for (const child of node.content)appendNodeToFragment(element, child);
263
+ parent.insert(parent.length, [
264
+ element
265
+ ]);
266
+ }
267
+ }
268
+ function serializeYMapValue(value) {
269
+ return serialize(value);
270
+ }
271
+ function getFragmentFromYMap(ymap, documentId, field) {
272
+ const doc = ymap.get(documentId);
273
+ if (!isYMap(doc)) return null;
274
+ const fieldValue = doc.get(field);
275
+ if (isYXmlFragment(fieldValue)) return fieldValue;
276
+ return null;
277
+ }
278
+ class Reconciliation extends Context.Tag('Reconciliation')() {
279
+ }
280
+ const ReconciliationLive = Layer.succeed(Reconciliation, Reconciliation.of({
281
+ reconcile: (ydoc, ymap, collection, serverDocs, getKey)=>Effect.gen(function*(_) {
282
+ const serverDocIds = new Set(serverDocs.map(getKey));
283
+ const toDelete = [];
284
+ ymap.forEach((_, key)=>{
285
+ if (!serverDocIds.has(key)) toDelete.push(key);
286
+ });
287
+ if (0 === toDelete.length) {
288
+ yield* _(Effect.logDebug('No phantom documents found', {
289
+ collection
290
+ }));
291
+ return [];
292
+ }
293
+ yield* _(Effect.logWarning(`Found ${toDelete.length} phantom documents`, {
294
+ collection,
295
+ phantomDocs: toDelete.slice(0, 10)
296
+ }));
297
+ const removedItems = [];
298
+ for (const key of toDelete){
299
+ const itemYMap = ymap.get(key);
300
+ if (itemYMap instanceof external_yjs_Map) removedItems.push(serializeYMap(itemYMap));
301
+ }
302
+ yjsTransact(ydoc, ()=>{
303
+ for (const key of toDelete)ymap.delete(key);
304
+ }, 'reconciliation');
305
+ yield* _(Effect.logInfo('Reconciliation completed', {
306
+ collection,
307
+ deletedCount: removedItems.length
308
+ }));
309
+ return removedItems;
310
+ }).pipe(Effect.catchAll((cause)=>Effect.fail(new ReconciliationError({
311
+ collection,
312
+ reason: 'Reconciliation failed',
313
+ cause
314
+ }))))
315
+ }));
316
+ function createReplicateOps(params) {
317
+ return {
318
+ insert (items) {
319
+ params.begin();
320
+ for (const item of items)params.write({
321
+ type: 'insert',
322
+ value: item
323
+ });
324
+ params.commit();
325
+ },
326
+ delete (items) {
327
+ params.begin();
328
+ for (const item of items)params.write({
329
+ type: 'delete',
330
+ value: item
331
+ });
332
+ params.commit();
333
+ },
334
+ upsert (items) {
335
+ params.begin();
336
+ for (const item of items)params.write({
337
+ type: 'update',
338
+ value: item
339
+ });
340
+ params.commit();
341
+ },
342
+ replace (items) {
343
+ params.begin();
344
+ params.truncate();
345
+ for (const item of items)params.write({
346
+ type: 'insert',
347
+ value: item
348
+ });
349
+ params.commit();
350
+ }
351
+ };
352
+ }
353
+ const SERVER_ORIGIN = 'server';
354
+ const prose_logger = logger_getLogger([
355
+ 'replicate',
356
+ 'prose'
357
+ ]);
358
+ const DEFAULT_DEBOUNCE_MS = 1000;
359
+ const applyingFromServer = new Map();
360
+ const debounceTimers = new Map();
361
+ const lastSyncedVectors = new Map();
362
+ const pendingState = new Map();
363
+ const pendingListeners = new Map();
364
+ const fragmentObservers = new Map();
365
+ const failedSyncQueue = new Map();
366
+ function isApplyingFromServer(collection, documentId) {
367
+ const key = `${collection}:${documentId}`;
368
+ return applyingFromServer.get(key) ?? false;
369
+ }
370
+ function setApplyingFromServer(collection, documentId, value) {
371
+ const key = `${collection}:${documentId}`;
372
+ if (value) applyingFromServer.set(key, true);
373
+ else applyingFromServer.delete(key);
374
+ }
375
+ function setPendingInternal(key, value) {
376
+ const current = pendingState.get(key) ?? false;
377
+ if (current !== value) {
378
+ pendingState.set(key, value);
379
+ const listeners = pendingListeners.get(key);
380
+ if (listeners) for (const cb of listeners)try {
381
+ cb(value);
382
+ } catch (err) {
383
+ prose_logger.error('Pending listener error', {
384
+ key,
385
+ error: String(err)
386
+ });
387
+ }
388
+ }
389
+ }
390
+ function isPending(collection, documentId) {
391
+ return pendingState.get(`${collection}:${documentId}`) ?? false;
392
+ }
393
+ function subscribePending(collection, documentId, callback) {
394
+ const key = `${collection}:${documentId}`;
395
+ let listeners = pendingListeners.get(key);
396
+ if (!listeners) {
397
+ listeners = new Set();
398
+ pendingListeners.set(key, listeners);
399
+ }
400
+ listeners.add(callback);
401
+ return ()=>{
402
+ listeners?.delete(callback);
403
+ if (listeners?.size === 0) pendingListeners.delete(key);
404
+ };
405
+ }
406
+ function cancelPending(collection, documentId) {
407
+ const key = `${collection}:${documentId}`;
408
+ const timer = debounceTimers.get(key);
409
+ if (timer) {
410
+ clearTimeout(timer);
411
+ debounceTimers.delete(key);
412
+ setPendingInternal(key, false);
413
+ prose_logger.debug('Cancelled pending sync due to remote update', {
414
+ collection,
415
+ documentId
35
416
  });
36
417
  }
37
- async stream(ctx, checkpoint, limit) {
38
- return ctx.runQuery(this.component.public.stream, {
39
- collectionName: this.collectionName,
40
- checkpoint,
41
- limit
418
+ }
419
+ function cancelAllPending(collection) {
420
+ const prefix = `${collection}:`;
421
+ for (const [key, timer] of debounceTimers)if (key.startsWith(prefix)) {
422
+ clearTimeout(timer);
423
+ debounceTimers.delete(key);
424
+ setPendingInternal(key, false);
425
+ }
426
+ prose_logger.debug('Cancelled all pending syncs', {
427
+ collection
428
+ });
429
+ }
430
+ function observeFragment(config) {
431
+ const { collection, documentId, field, fragment, ydoc, ymap, collectionRef, debounceMs = DEFAULT_DEBOUNCE_MS } = config;
432
+ const key = `${collection}:${documentId}`;
433
+ const existingCleanup = fragmentObservers.get(key);
434
+ if (existingCleanup) {
435
+ prose_logger.debug('Fragment already being observed', {
436
+ collection,
437
+ documentId,
438
+ field
42
439
  });
440
+ return existingCleanup;
43
441
  }
442
+ const observerHandler = (_events, transaction)=>{
443
+ if (transaction.origin === SERVER_ORIGIN) return;
444
+ const existing = debounceTimers.get(key);
445
+ if (existing) clearTimeout(existing);
446
+ setPendingInternal(key, true);
447
+ const timer = setTimeout(async ()=>{
448
+ debounceTimers.delete(key);
449
+ const itemYMap = ymap.get(documentId);
450
+ if (!itemYMap) {
451
+ prose_logger.error('Document not found', {
452
+ collection,
453
+ documentId
454
+ });
455
+ setPendingInternal(key, false);
456
+ return;
457
+ }
458
+ try {
459
+ const lastVector = lastSyncedVectors.get(key);
460
+ const delta = lastVector ? encodeStateAsUpdateV2(ydoc, lastVector) : encodeStateAsUpdateV2(ydoc);
461
+ if (delta.length <= 2) {
462
+ prose_logger.debug('No changes to sync', {
463
+ collection,
464
+ documentId
465
+ });
466
+ setPendingInternal(key, false);
467
+ return;
468
+ }
469
+ const crdtBytes = delta.buffer;
470
+ const currentVector = encodeStateVector(ydoc);
471
+ prose_logger.debug('Syncing prose delta', {
472
+ collection,
473
+ documentId,
474
+ deltaSize: delta.byteLength
475
+ });
476
+ const materializedDoc = serializeYMapValue(itemYMap);
477
+ const result = collectionRef.update(documentId, {
478
+ metadata: {
479
+ contentSync: {
480
+ crdtBytes,
481
+ materializedDoc
482
+ }
483
+ }
484
+ }, (draft)=>{
485
+ draft.updatedAt = Date.now();
486
+ });
487
+ await result.isPersisted.promise;
488
+ lastSyncedVectors.set(key, currentVector);
489
+ failedSyncQueue.delete(key);
490
+ setPendingInternal(key, false);
491
+ prose_logger.debug('Prose sync completed', {
492
+ collection,
493
+ documentId
494
+ });
495
+ } catch (err) {
496
+ prose_logger.error('Prose sync failed, queued for retry', {
497
+ collection,
498
+ documentId,
499
+ error: String(err)
500
+ });
501
+ failedSyncQueue.set(key, true);
502
+ }
503
+ }, debounceMs);
504
+ debounceTimers.set(key, timer);
505
+ if (failedSyncQueue.has(key)) {
506
+ failedSyncQueue.delete(key);
507
+ prose_logger.debug('Retrying failed sync', {
508
+ collection,
509
+ documentId
510
+ });
511
+ }
512
+ };
513
+ fragment.observeDeep(observerHandler);
514
+ const cleanup = ()=>{
515
+ fragment.unobserveDeep(observerHandler);
516
+ cancelPending(collection, documentId);
517
+ fragmentObservers.delete(key);
518
+ lastSyncedVectors.delete(key);
519
+ prose_logger.debug('Fragment observer cleaned up', {
520
+ collection,
521
+ documentId,
522
+ field
523
+ });
524
+ };
525
+ fragmentObservers.set(key, cleanup);
526
+ prose_logger.debug('Fragment observer registered', {
527
+ collection,
528
+ documentId,
529
+ field
530
+ });
531
+ return cleanup;
44
532
  }
45
- function logger_getLogger(category) {
46
- return getLogger([
47
- 'convex-replicate',
48
- ...category
49
- ]);
533
+ function prose_cleanup(collection) {
534
+ const prefix = `${collection}:`;
535
+ for (const [key, timer] of debounceTimers)if (key.startsWith(prefix)) {
536
+ clearTimeout(timer);
537
+ debounceTimers.delete(key);
538
+ }
539
+ for (const key of pendingState.keys())if (key.startsWith(prefix)) pendingState.delete(key);
540
+ for (const key of pendingListeners.keys())if (key.startsWith(prefix)) pendingListeners.delete(key);
541
+ for (const key of applyingFromServer.keys())if (key.startsWith(prefix)) applyingFromServer.delete(key);
542
+ for (const key of lastSyncedVectors.keys())if (key.startsWith(prefix)) lastSyncedVectors.delete(key);
543
+ for (const [key, cleanupFn] of fragmentObservers)if (key.startsWith(prefix)) {
544
+ cleanupFn();
545
+ fragmentObservers.delete(key);
546
+ }
547
+ for (const key of failedSyncQueue.keys())if (key.startsWith(prefix)) failedSyncQueue.delete(key);
548
+ prose_logger.debug('Prose cleanup complete', {
549
+ collection
550
+ });
50
551
  }
51
- const logger = logger_getLogger([
52
- 'convex-replicate',
552
+ const collection_logger = logger_getLogger([
553
+ 'replicate',
53
554
  'collection'
54
555
  ]);
55
- function convexCollectionOptions({ getKey, initialData, convexClient, api, collectionName }) {
56
- const ydoc = new __WEBPACK_EXTERNAL_MODULE_yjs__.Doc({
57
- guid: collectionName
556
+ function handleMutationError(error, operation, collection) {
557
+ const httpError = error;
558
+ collection_logger.error(`${operation} failed`, {
559
+ collection,
560
+ error: httpError?.message,
561
+ status: httpError?.status
58
562
  });
59
- const ymap = ydoc.getMap(collectionName);
60
- let pendingUpdate = null;
61
- ydoc.on('update', (update, origin)=>{
62
- pendingUpdate = update;
63
- logger.debug('Yjs update event fired', {
64
- collectionName,
65
- updateSize: update.length,
66
- origin
67
- });
563
+ if (httpError?.status === 401 || httpError?.status === 403) throw new NonRetriableError('Authentication failed');
564
+ if (httpError?.status === 422) throw new NonRetriableError('Validation error');
565
+ throw error;
566
+ }
567
+ const cleanupFunctions = new Map();
568
+ const collectionDocs = new Map();
569
+ const collectionUndoConfig = new Map();
570
+ const DEFAULT_UNDO_CAPTURE_TIMEOUT = 500;
571
+ const collection_DEFAULT_DEBOUNCE_MS = 1000;
572
+ const collectionMutex = new Map();
573
+ const fragmentUndoManagers = new Map();
574
+ const debounceConfig = new Map();
575
+ const collectionRefs = new Map();
576
+ const serverStateVectors = new Map();
577
+ function getOrCreateMutex(collection) {
578
+ let mux = collectionMutex.get(collection);
579
+ if (!mux) {
580
+ mux = createMutex();
581
+ collectionMutex.set(collection, mux);
582
+ }
583
+ return mux;
584
+ }
585
+ function getOrCreateFragmentUndoManager(collection, documentId, field, fragment) {
586
+ const key = `${collection}:${documentId}:${field}`;
587
+ let um = fragmentUndoManagers.get(key);
588
+ if (um) return um;
589
+ const config = collectionUndoConfig.get(collection);
590
+ um = new UndoManager([
591
+ fragment
592
+ ], {
593
+ captureTimeout: config?.captureTimeout ?? DEFAULT_UNDO_CAPTURE_TIMEOUT,
594
+ trackedOrigins: new Set([
595
+ "fragment"
596
+ ])
597
+ });
598
+ fragmentUndoManagers.set(key, um);
599
+ return um;
600
+ }
601
+ function convexCollectionOptions({ getKey, material, convexClient, api, collection, prose: proseFields, undoCaptureTimeout = 500, persistence }) {
602
+ const proseFieldSet = new Set(proseFields);
603
+ const utils = {
604
+ async prose (documentId, field) {
605
+ const fieldStr = field;
606
+ if (!proseFieldSet.has(fieldStr)) throw new ProseError({
607
+ documentId,
608
+ field: fieldStr,
609
+ collection
610
+ });
611
+ let docs = collectionDocs.get(collection);
612
+ if (!docs) {
613
+ await new Promise((resolve, reject)=>{
614
+ const maxWait = 10000;
615
+ const startTime = Date.now();
616
+ const check = setInterval(()=>{
617
+ if (collectionDocs.has(collection)) {
618
+ clearInterval(check);
619
+ resolve();
620
+ } else if (Date.now() - startTime > maxWait) {
621
+ clearInterval(check);
622
+ reject(new ProseError({
623
+ documentId,
624
+ field: fieldStr,
625
+ collection
626
+ }));
627
+ }
628
+ }, 10);
629
+ });
630
+ docs = collectionDocs.get(collection);
631
+ }
632
+ if (!docs) throw new ProseError({
633
+ documentId,
634
+ field: fieldStr,
635
+ collection
636
+ });
637
+ const fragment = getFragmentFromYMap(docs.ymap, documentId, fieldStr);
638
+ if (!fragment) throw new ProseError({
639
+ documentId,
640
+ field: fieldStr,
641
+ collection
642
+ });
643
+ const collectionRef = collectionRefs.get(collection);
644
+ if (collectionRef) observeFragment({
645
+ collection,
646
+ documentId,
647
+ field: fieldStr,
648
+ fragment,
649
+ ydoc: docs.ydoc,
650
+ ymap: docs.ymap,
651
+ collectionRef,
652
+ debounceMs: debounceConfig.get(collection) ?? collection_DEFAULT_DEBOUNCE_MS
653
+ });
654
+ const undoManager = getOrCreateFragmentUndoManager(collection, documentId, fieldStr, fragment);
655
+ return {
656
+ fragment,
657
+ provider: {
658
+ awareness: null
659
+ },
660
+ get pending () {
661
+ return isPending(collection, documentId);
662
+ },
663
+ onPendingChange (callback) {
664
+ return subscribePending(collection, documentId, callback);
665
+ },
666
+ undo () {
667
+ undoManager.undo();
668
+ },
669
+ redo () {
670
+ undoManager.redo();
671
+ },
672
+ canUndo () {
673
+ return undoManager.canUndo();
674
+ },
675
+ canRedo () {
676
+ return undoManager.canRedo();
677
+ }
678
+ };
679
+ }
680
+ };
681
+ let ydoc = null;
682
+ let ymap = null;
683
+ let docPersistence = null;
684
+ let ops = null;
685
+ const checkpointLayer = createCheckpointLayer(persistence.kv);
686
+ const servicesLayer = Layer.mergeAll(checkpointLayer, ReconciliationLive);
687
+ let resolvePersistenceReady;
688
+ const persistenceReadyPromise = new Promise((resolve)=>{
689
+ resolvePersistenceReady = resolve;
690
+ });
691
+ let resolveOptimisticReady;
692
+ const optimisticReadyPromise = new Promise((resolve)=>{
693
+ resolveOptimisticReady = resolve;
68
694
  });
695
+ const reconcile = (ops)=>Effect.gen(function*() {
696
+ if (!api.material) return;
697
+ const materialApi = api.material;
698
+ const reconciliation = yield* Reconciliation;
699
+ const serverResponse = yield* Effect.tryPromise({
700
+ try: ()=>convexClient.query(materialApi, {}),
701
+ catch: (error)=>new Error(`Reconciliation query failed: ${error}`)
702
+ });
703
+ const serverDocs = Array.isArray(serverResponse) ? serverResponse : serverResponse.documents || [];
704
+ const removedItems = yield* reconciliation.reconcile(ydoc, ymap, collection, serverDocs, (doc)=>String(getKey(doc)));
705
+ if (removedItems.length > 0) ops.delete(removedItems);
706
+ }).pipe(Effect.catchAll((error)=>Effect.gen(function*() {
707
+ yield* Effect.logError('Reconciliation failed', {
708
+ collection,
709
+ error
710
+ });
711
+ })));
712
+ const recoverSync = async ()=>{
713
+ if (!api.recovery) return void collection_logger.debug('No recovery API configured, skipping recovery sync', {
714
+ collection
715
+ });
716
+ try {
717
+ const localStateVector = encodeStateVector(ydoc);
718
+ collection_logger.debug('Starting recovery sync', {
719
+ collection,
720
+ localVectorSize: localStateVector.byteLength
721
+ });
722
+ const response = await convexClient.query(api.recovery, {
723
+ clientStateVector: localStateVector.buffer
724
+ });
725
+ if (response.diff) {
726
+ const mux = getOrCreateMutex(collection);
727
+ mux(()=>{
728
+ applyUpdate(ydoc, new Uint8Array(response.diff), "server");
729
+ });
730
+ collection_logger.info('Recovery sync applied diff', {
731
+ collection,
732
+ diffSize: response.diff.byteLength
733
+ });
734
+ } else collection_logger.debug('Recovery sync - no diff needed', {
735
+ collection
736
+ });
737
+ if (response.serverStateVector) serverStateVectors.set(collection, new Uint8Array(response.serverStateVector));
738
+ } catch (error) {
739
+ collection_logger.error('Recovery sync failed', {
740
+ collection,
741
+ error: String(error)
742
+ });
743
+ }
744
+ };
745
+ const applyYjsInsert = (mutations)=>{
746
+ const { delta } = transactWithDelta(ydoc, ()=>{
747
+ mutations.forEach((mut)=>{
748
+ const itemYMap = new external_yjs_Map();
749
+ ymap.set(String(mut.key), itemYMap);
750
+ Object.entries(mut.modified).forEach(([k, v])=>{
751
+ if (proseFieldSet.has(k) && isDoc(v)) {
752
+ const fragment = new XmlFragment();
753
+ itemYMap.set(k, fragment);
754
+ fragmentFromJSON(fragment, v);
755
+ } else itemYMap.set(k, v);
756
+ });
757
+ });
758
+ }, "local");
759
+ return delta;
760
+ };
761
+ const applyYjsUpdate = (mutations)=>{
762
+ const { delta } = transactWithDelta(ydoc, ()=>{
763
+ mutations.forEach((mut)=>{
764
+ const itemYMap = ymap.get(String(mut.key));
765
+ if (itemYMap) {
766
+ const modifiedFields = mut.modified;
767
+ if (!modifiedFields) return void collection_logger.warn('mut.modified is null/undefined', {
768
+ collection,
769
+ key: String(mut.key)
770
+ });
771
+ Object.entries(modifiedFields).forEach(([k, v])=>{
772
+ const existingValue = itemYMap.get(k);
773
+ if (proseFieldSet.has(k)) return void collection_logger.debug('Skipping prose field in applyYjsUpdate', {
774
+ field: k
775
+ });
776
+ if (existingValue instanceof XmlFragment) return void collection_logger.debug('Preserving live fragment field', {
777
+ field: k
778
+ });
779
+ itemYMap.set(k, v);
780
+ });
781
+ } else collection_logger.error('Update attempted on non-existent item', {
782
+ collection,
783
+ key: String(mut.key)
784
+ });
785
+ });
786
+ }, "local");
787
+ return delta;
788
+ };
789
+ const applyYjsDelete = (mutations)=>{
790
+ const { delta } = transactWithDelta(ydoc, ()=>{
791
+ mutations.forEach((mut)=>{
792
+ ymap.delete(String(mut.key));
793
+ });
794
+ }, "local");
795
+ return delta;
796
+ };
69
797
  return {
70
- id: collectionName,
798
+ id: collection,
71
799
  getKey,
72
800
  _convexClient: convexClient,
73
- _collectionName: collectionName,
801
+ _collection: collection,
802
+ _proseFields: proseFields,
803
+ _persistence: persistence,
804
+ utils,
74
805
  onInsert: async ({ transaction })=>{
75
- logger.debug('onInsert handler called', {
76
- collectionName,
77
- mutationCount: transaction.mutations.length
78
- });
79
806
  try {
80
- ydoc.transact(()=>{
81
- transaction.mutations.forEach((mut)=>{
82
- const itemYMap = new __WEBPACK_EXTERNAL_MODULE_yjs__.Map();
83
- Object.entries(mut.modified).forEach(([k, v])=>{
84
- itemYMap.set(k, v);
85
- });
86
- ymap.set(String(mut.key), itemYMap);
87
- });
88
- }, 'insert');
89
- if (pendingUpdate) {
90
- logger.debug('Sending insert delta to Convex', {
91
- collectionName,
92
- documentId: String(transaction.mutations[0].key),
93
- deltaSize: pendingUpdate.length
94
- });
95
- await convexClient.mutation(api.insertDocument, {
96
- collectionName,
97
- documentId: String(transaction.mutations[0].key),
98
- crdtBytes: pendingUpdate.buffer,
99
- materializedDoc: transaction.mutations[0].modified,
100
- version: Date.now()
101
- });
102
- pendingUpdate = null;
103
- logger.info('Insert persisted to Convex', {
104
- collectionName,
105
- documentId: String(transaction.mutations[0].key)
807
+ await Promise.all([
808
+ persistenceReadyPromise,
809
+ optimisticReadyPromise
810
+ ]);
811
+ const delta = applyYjsInsert(transaction.mutations);
812
+ if (delta.length > 0) {
813
+ const documentKey = String(transaction.mutations[0].key);
814
+ const itemYMap = ymap.get(documentKey);
815
+ const materializedDoc = itemYMap ? serializeYMapValue(itemYMap) : transaction.mutations[0].modified;
816
+ await convexClient.mutation(api.insert, {
817
+ documentId: documentKey,
818
+ crdtBytes: delta.slice().buffer,
819
+ materializedDoc
106
820
  });
107
821
  }
108
822
  } catch (error) {
109
- logger.error('Insert failed', {
110
- collectionName,
111
- error: error?.message,
112
- status: error?.status
113
- });
114
- if (error?.status === 401 || error?.status === 403) throw new NonRetriableError('Authentication failed');
115
- if (error?.status === 422) throw new NonRetriableError('Validation error');
116
- throw error;
823
+ handleMutationError(error, 'Insert', collection);
117
824
  }
118
825
  },
119
826
  onUpdate: async ({ transaction })=>{
120
- logger.debug('onUpdate handler called', {
121
- collectionName,
122
- mutationCount: transaction.mutations.length
123
- });
124
827
  try {
125
- ydoc.transact(()=>{
126
- transaction.mutations.forEach((mut)=>{
127
- const itemYMap = ymap.get(String(mut.key));
128
- if (itemYMap) Object.entries(mut.modified || {}).forEach(([k, v])=>{
129
- itemYMap.set(k, v);
130
- });
131
- else {
132
- const newYMap = new __WEBPACK_EXTERNAL_MODULE_yjs__.Map();
133
- Object.entries(mut.modified).forEach(([k, v])=>{
134
- newYMap.set(k, v);
135
- });
136
- ymap.set(String(mut.key), newYMap);
137
- }
138
- });
139
- }, 'update');
140
- if (pendingUpdate) {
141
- logger.debug('Sending update delta to Convex', {
142
- collectionName,
143
- documentId: String(transaction.mutations[0].key),
144
- deltaSize: pendingUpdate.length
145
- });
146
- await convexClient.mutation(api.updateDocument, {
147
- collectionName,
148
- documentId: String(transaction.mutations[0].key),
149
- crdtBytes: pendingUpdate.buffer,
150
- materializedDoc: transaction.mutations[0].modified,
151
- version: Date.now()
828
+ const mutation = transaction.mutations[0];
829
+ const documentKey = String(mutation.key);
830
+ if (isApplyingFromServer(collection, documentKey)) return void collection_logger.debug('Skipping onUpdate - data from server', {
831
+ collection,
832
+ documentKey
833
+ });
834
+ await Promise.all([
835
+ persistenceReadyPromise,
836
+ optimisticReadyPromise
837
+ ]);
838
+ const metadata = mutation.metadata;
839
+ if (metadata?.contentSync) {
840
+ const { crdtBytes, materializedDoc } = metadata.contentSync;
841
+ await convexClient.mutation(api.update, {
842
+ documentId: documentKey,
843
+ crdtBytes,
844
+ materializedDoc
152
845
  });
153
- pendingUpdate = null;
154
- logger.info('Update persisted to Convex', {
155
- collectionName,
156
- documentId: String(transaction.mutations[0].key)
846
+ return;
847
+ }
848
+ const delta = applyYjsUpdate(transaction.mutations);
849
+ if (delta.length > 0) {
850
+ const itemYMap = ymap.get(documentKey);
851
+ const fullDoc = itemYMap ? serializeYMapValue(itemYMap) : mutation.modified;
852
+ await convexClient.mutation(api.update, {
853
+ documentId: documentKey,
854
+ crdtBytes: delta.slice().buffer,
855
+ materializedDoc: fullDoc
157
856
  });
158
857
  }
159
858
  } catch (error) {
160
- logger.error('Update failed', {
161
- collectionName,
162
- error: error?.message,
163
- status: error?.status
164
- });
165
- if (error?.status === 401 || error?.status === 403) throw new NonRetriableError('Authentication failed');
166
- if (error?.status === 422) throw new NonRetriableError('Validation error');
167
- throw error;
859
+ handleMutationError(error, 'Update', collection);
168
860
  }
169
861
  },
170
862
  onDelete: async ({ transaction })=>{
171
- logger.debug('onDelete handler called', {
172
- collectionName,
173
- mutationCount: transaction.mutations.length
174
- });
175
863
  try {
176
- ydoc.transact(()=>{
177
- transaction.mutations.forEach((mut)=>{
178
- ymap.delete(String(mut.key));
179
- });
180
- }, 'delete');
181
- if (pendingUpdate) {
182
- logger.debug('Sending delete delta to Convex', {
183
- collectionName,
184
- documentId: String(transaction.mutations[0].key),
185
- deltaSize: pendingUpdate.length
186
- });
187
- await convexClient.mutation(api.deleteDocument, {
188
- collectionName,
189
- documentId: String(transaction.mutations[0].key),
190
- crdtBytes: pendingUpdate.buffer,
191
- version: Date.now()
192
- });
193
- pendingUpdate = null;
194
- logger.info('Delete persisted to Convex', {
195
- collectionName,
196
- documentId: String(transaction.mutations[0].key)
864
+ await Promise.all([
865
+ persistenceReadyPromise,
866
+ optimisticReadyPromise
867
+ ]);
868
+ const delta = applyYjsDelete(transaction.mutations);
869
+ const itemsToDelete = transaction.mutations.map((mut)=>mut.original).filter((item)=>void 0 !== item && Object.keys(item).length > 0);
870
+ ops.delete(itemsToDelete);
871
+ if (delta.length > 0) {
872
+ const documentKey = String(transaction.mutations[0].key);
873
+ await convexClient.mutation(api.remove, {
874
+ documentId: documentKey,
875
+ crdtBytes: delta.slice().buffer
197
876
  });
198
877
  }
199
878
  } catch (error) {
200
- logger.error('Delete failed', {
201
- collectionName,
202
- error: error?.message,
203
- status: error?.status
204
- });
205
- if (error?.status === 401 || error?.status === 403) throw new NonRetriableError('Authentication failed');
206
- if (error?.status === 422) throw new NonRetriableError('Validation error');
207
- throw error;
879
+ handleMutationError(error, 'Delete', collection);
208
880
  }
209
881
  },
210
882
  sync: {
883
+ rowUpdateMode: 'partial',
211
884
  sync: (params)=>{
212
- const { begin, write, commit, markReady } = params;
213
- if (initialData && initialData.length > 0) {
214
- ydoc.transact(()=>{
215
- for (const item of initialData){
216
- const key = getKey(item);
217
- const itemYMap = new __WEBPACK_EXTERNAL_MODULE_yjs__.Map();
218
- Object.entries(item).forEach(([k, v])=>{
219
- itemYMap.set(k, v);
220
- });
221
- ymap.set(String(key), itemYMap);
222
- }
223
- }, 'ssr-init');
224
- begin();
225
- for (const item of initialData)write({
226
- type: 'insert',
227
- value: item
228
- });
229
- commit();
230
- logger.debug('Initialized with SSR data', {
231
- collectionName,
232
- count: initialData.length
233
- });
885
+ const { markReady, collection: collectionInstance } = params;
886
+ collectionRefs.set(collection, collectionInstance);
887
+ const existingCleanup = cleanupFunctions.get(collection);
888
+ if (existingCleanup) {
889
+ existingCleanup();
890
+ cleanupFunctions.delete(collection);
234
891
  }
235
- logger.debug("Setting up Convex subscription", {
236
- collectionName
237
- });
238
- let previousItems = new Map();
239
- const subscription = convexClient.onUpdate(api.stream, {}, async (items)=>{
892
+ let subscription = null;
893
+ const ssrDocuments = material?.documents;
894
+ const ssrCheckpoint = material?.checkpoint;
895
+ const ssrCRDTBytes = material?.crdtBytes;
896
+ const docs = ssrDocuments ? [
897
+ ...ssrDocuments
898
+ ] : [];
899
+ (async ()=>{
240
900
  try {
241
- logger.debug("Subscription update received", {
242
- collectionName,
243
- itemCount: items.length
901
+ ydoc = await createYjsDocument(collection, persistence.kv);
902
+ ymap = getYMap(ydoc, collection);
903
+ collectionDocs.set(collection, {
904
+ ydoc,
905
+ ymap
244
906
  });
245
- const currentItems = new Map();
246
- for (const item of items){
247
- const key = getKey(item);
248
- currentItems.set(key, item);
249
- }
250
- const deletedItems = [];
251
- for (const [prevId, prevItem] of previousItems)if (!currentItems.has(prevId)) deletedItems.push(prevItem);
252
- if (deletedItems.length > 0) logger.info('Detected remote hard deletes', {
253
- collectionName,
254
- deletedCount: deletedItems.length,
255
- deletedIds: deletedItems.map((item)=>getKey(item))
907
+ const trackedOrigins = new Set([
908
+ "local"
909
+ ]);
910
+ collectionUndoConfig.set(collection, {
911
+ captureTimeout: undoCaptureTimeout,
912
+ trackedOrigins
913
+ });
914
+ docPersistence = persistence.createDocPersistence(collection, ydoc);
915
+ docPersistence.whenSynced.then(()=>{
916
+ collection_logger.debug('Persistence synced', {
917
+ collection
918
+ });
919
+ resolvePersistenceReady?.();
256
920
  });
257
- begin();
258
- for (const deletedItem of deletedItems){
259
- const deletedId = getKey(deletedItem);
260
- ydoc.transact(()=>{
261
- ymap.delete(String(deletedId));
262
- }, 'remote-delete');
263
- write({
264
- type: 'delete',
265
- value: deletedItem
921
+ await persistenceReadyPromise;
922
+ collection_logger.info('Persistence ready', {
923
+ collection,
924
+ ymapSize: ymap.size
925
+ });
926
+ ops = createReplicateOps(params);
927
+ resolveOptimisticReady?.();
928
+ if (ssrCRDTBytes) applyUpdate(ydoc, new Uint8Array(ssrCRDTBytes), "server");
929
+ await recoverSync();
930
+ if (ymap.size > 0) {
931
+ const items = extractItems(ymap);
932
+ ops.replace(items);
933
+ collection_logger.info('Data loaded to TanStack DB', {
934
+ collection,
935
+ itemCount: items.length
936
+ });
937
+ } else {
938
+ ops.replace([]);
939
+ collection_logger.info('No data, cleared TanStack DB', {
940
+ collection
266
941
  });
267
942
  }
268
- ydoc.transact(()=>{
269
- for (const item of items){
270
- const key = getKey(item);
271
- const itemYMap = new __WEBPACK_EXTERNAL_MODULE_yjs__.Map();
272
- Object.entries(item).forEach(([k, v])=>{
273
- itemYMap.set(k, v);
943
+ collection_logger.debug('Running reconciliation', {
944
+ collection,
945
+ ymapSize: ymap.size
946
+ });
947
+ await Effect.runPromise(reconcile(ops).pipe(Effect.provide(servicesLayer)));
948
+ collection_logger.debug('Reconciliation complete', {
949
+ collection
950
+ });
951
+ markReady();
952
+ collection_logger.info('Collection ready', {
953
+ collection,
954
+ ymapSize: ymap.size
955
+ });
956
+ const checkpoint = ssrCheckpoint || await Effect.runPromise(Effect.gen(function*() {
957
+ const checkpointSvc = yield* Checkpoint;
958
+ return yield* checkpointSvc.loadCheckpoint(collection);
959
+ }).pipe(Effect.provide(checkpointLayer)));
960
+ collection_logger.info('Checkpoint loaded', {
961
+ collection,
962
+ checkpoint,
963
+ source: ssrCheckpoint ? 'SSR' : 'IndexedDB',
964
+ ymapSize: ymap.size
965
+ });
966
+ const mux = getOrCreateMutex(collection);
967
+ const handleSnapshotChange = (crdtBytes)=>{
968
+ cancelAllPending(collection);
969
+ mux(()=>{
970
+ try {
971
+ collection_logger.debug('Applying snapshot', {
972
+ collection,
973
+ bytesLength: crdtBytes.byteLength
974
+ });
975
+ applyUpdate(ydoc, new Uint8Array(crdtBytes), "server");
976
+ const items = extractItems(ymap);
977
+ collection_logger.debug('Snapshot applied', {
978
+ collection,
979
+ itemCount: items.length
980
+ });
981
+ ops.replace(items);
982
+ } catch (error) {
983
+ collection_logger.error('Error applying snapshot', {
984
+ collection,
985
+ error: String(error)
986
+ });
987
+ throw new Error(`Snapshot application failed: ${error}`);
988
+ }
989
+ });
990
+ };
991
+ const handleDeltaChange = (crdtBytes, documentId)=>{
992
+ if (documentId) {
993
+ cancelPending(collection, documentId);
994
+ setApplyingFromServer(collection, documentId, true);
995
+ }
996
+ mux(()=>{
997
+ try {
998
+ collection_logger.debug('Applying delta', {
999
+ collection,
1000
+ documentId,
1001
+ bytesLength: crdtBytes.byteLength
1002
+ });
1003
+ const itemBefore = documentId ? extractItem(ymap, documentId) : null;
1004
+ applyUpdate(ydoc, new Uint8Array(crdtBytes), "server");
1005
+ if (!documentId) return void collection_logger.debug('Delta applied (no documentId)', {
1006
+ collection
1007
+ });
1008
+ const itemAfter = extractItem(ymap, documentId);
1009
+ if (itemAfter) {
1010
+ collection_logger.debug('Upserting item after delta', {
1011
+ collection,
1012
+ documentId
1013
+ });
1014
+ ops.upsert([
1015
+ itemAfter
1016
+ ]);
1017
+ } else if (itemBefore) {
1018
+ collection_logger.debug('Deleting item after delta', {
1019
+ collection,
1020
+ documentId
1021
+ });
1022
+ ops.delete([
1023
+ itemBefore
1024
+ ]);
1025
+ } else collection_logger.debug('No change detected after delta', {
1026
+ collection,
1027
+ documentId
1028
+ });
1029
+ } catch (error) {
1030
+ collection_logger.error('Error applying delta', {
1031
+ collection,
1032
+ documentId,
1033
+ error: String(error)
1034
+ });
1035
+ throw new Error(`Delta application failed for ${documentId}: ${error}`);
1036
+ } finally{
1037
+ if (documentId) setApplyingFromServer(collection, documentId, false);
1038
+ }
1039
+ });
1040
+ };
1041
+ const handleSubscriptionUpdate = async (response)=>{
1042
+ try {
1043
+ if (!response || !Array.isArray(response.changes)) return void collection_logger.error("Invalid subscription response", {
1044
+ response
1045
+ });
1046
+ const { changes, checkpoint: newCheckpoint } = response;
1047
+ for (const change of changes){
1048
+ const { operationType, crdtBytes, documentId } = change;
1049
+ if (!crdtBytes) {
1050
+ collection_logger.warn('Skipping change with missing crdtBytes', {
1051
+ change
1052
+ });
1053
+ continue;
1054
+ }
1055
+ try {
1056
+ if ('snapshot' === operationType) handleSnapshotChange(crdtBytes);
1057
+ else handleDeltaChange(crdtBytes, documentId);
1058
+ } catch (changeError) {
1059
+ collection_logger.error('Failed to apply change', {
1060
+ operationType,
1061
+ documentId,
1062
+ error: String(changeError)
1063
+ });
1064
+ }
1065
+ }
1066
+ if (newCheckpoint) try {
1067
+ const key = `checkpoint:${collection}`;
1068
+ await persistence.kv.set(key, newCheckpoint);
1069
+ collection_logger.debug('Checkpoint saved', {
1070
+ collection,
1071
+ checkpoint: newCheckpoint
1072
+ });
1073
+ } catch (checkpointError) {
1074
+ collection_logger.error('Failed to save checkpoint', {
1075
+ collection,
1076
+ error: String(checkpointError)
1077
+ });
1078
+ }
1079
+ } catch (error) {
1080
+ collection_logger.error("Subscription handler error", {
1081
+ collection,
1082
+ error: String(error)
274
1083
  });
275
- ymap.set(String(key), itemYMap);
276
1084
  }
277
- }, "subscription-sync");
278
- for (const item of items){
279
- const key = getKey(item);
280
- params.collection.has(key) ? write({
281
- type: 'update',
282
- value: item
283
- }) : write({
284
- type: 'insert',
285
- value: item
1085
+ };
1086
+ collection_logger.info("Establishing subscription", {
1087
+ collection,
1088
+ checkpoint,
1089
+ limit: 1000
1090
+ });
1091
+ subscription = convexClient.onUpdate(api.stream, {
1092
+ checkpoint,
1093
+ limit: 1000
1094
+ }, (response)=>{
1095
+ collection_logger.debug("Subscription received update", {
1096
+ collection,
1097
+ changesCount: response.changes?.length ?? 0,
1098
+ checkpoint: response.checkpoint,
1099
+ hasMore: response.hasMore
286
1100
  });
287
- }
288
- commit();
289
- previousItems = currentItems;
290
- logger.debug('Successfully synced items to collection', {
291
- count: items.length,
292
- deletedCount: deletedItems.length
1101
+ handleSubscriptionUpdate(response);
1102
+ });
1103
+ collection_logger.info("Subscription established", {
1104
+ collection
293
1105
  });
294
1106
  } catch (error) {
295
- logger.error("Failed to sync items from subscription", {
296
- error: error.message,
297
- errorName: error.name,
298
- stack: error?.stack,
299
- collectionName,
300
- itemCount: items.length
1107
+ collection_logger.error('Failed to set up collection', {
1108
+ error,
1109
+ collection
301
1110
  });
302
- throw error;
1111
+ markReady();
1112
+ }
1113
+ })();
1114
+ return {
1115
+ material: docs,
1116
+ cleanup: ()=>{
1117
+ subscription?.();
1118
+ prose_cleanup(collection);
1119
+ const prefix = `${collection}:`;
1120
+ for (const [key, um] of fragmentUndoManagers)if (key.startsWith(prefix)) {
1121
+ um.destroy();
1122
+ fragmentUndoManagers.delete(key);
1123
+ }
1124
+ collectionMutex.delete(collection);
1125
+ debounceConfig.delete(collection);
1126
+ collectionRefs.delete(collection);
1127
+ collectionUndoConfig.delete(collection);
1128
+ collectionDocs.delete(collection);
1129
+ docPersistence?.destroy();
1130
+ ydoc?.destroy();
1131
+ cleanupFunctions.delete(collection);
303
1132
  }
304
- });
305
- markReady();
306
- return ()=>{
307
- logger.debug("Cleaning up Convex subscription", {
308
- collectionName
309
- });
310
- subscription();
311
1133
  };
312
1134
  }
313
1135
  }
314
1136
  };
315
1137
  }
316
- function createConvexCollection(rawCollection) {
317
- const config = rawCollection.config;
318
- const convexClient = config._convexClient;
319
- const collectionName = config._collectionName;
320
- if (!convexClient || !collectionName) throw new Error("createConvexCollection requires a collection created with convexCollectionOptions. Make sure you pass convexClient and collectionName to convexCollectionOptions.");
321
- logger.info('Creating Convex collection with offline support', {
322
- collectionName
323
- });
324
- const offline = startOfflineExecutor({
325
- collections: {
326
- [collectionName]: rawCollection
327
- },
328
- mutationFns: {},
329
- beforeRetry: (transactions)=>{
330
- const cutoff = Date.now() - 86400000;
331
- const filtered = transactions.filter((tx)=>{
332
- const isRecent = tx.createdAt.getTime() > cutoff;
333
- const notExhausted = tx.retryCount < 10;
334
- return isRecent && notExhausted;
335
- });
336
- if (filtered.length < transactions.length) logger.warn('Filtered stale transactions', {
337
- collectionName,
338
- before: transactions.length,
339
- after: filtered.length
340
- });
341
- return filtered;
342
- },
343
- onLeadershipChange: (isLeader)=>{
344
- logger.info(isLeader ? 'Offline mode active' : 'Online-only mode', {
345
- collectionName
346
- });
347
- },
348
- onStorageFailure: (diagnostic)=>{
349
- logger.warn('Storage failed - online-only mode', {
350
- collectionName,
351
- code: diagnostic.code,
352
- message: diagnostic.message
353
- });
1138
+ class BrowserLevelKeyValueStore {
1139
+ db;
1140
+ constructor(dbName){
1141
+ this.db = new BrowserLevel(dbName);
1142
+ }
1143
+ async get(key) {
1144
+ try {
1145
+ const value = await this.db.get(key);
1146
+ if (void 0 === value) return;
1147
+ return JSON.parse(value);
1148
+ } catch (err) {
1149
+ if ('LEVEL_NOT_FOUND' === err.code) return;
1150
+ throw err;
354
1151
  }
355
- });
356
- if (convexClient.connectionState) {
357
- const connectionState = convexClient.connectionState();
358
- logger.debug('Initial connection state', {
359
- collectionName,
360
- isConnected: connectionState.isWebSocketConnected
1152
+ }
1153
+ async set(key, value) {
1154
+ await this.db.put(key, JSON.stringify(value));
1155
+ }
1156
+ async del(key) {
1157
+ try {
1158
+ await this.db.del(key);
1159
+ } catch (err) {
1160
+ if ('LEVEL_NOT_FOUND' !== err.code) throw err;
1161
+ }
1162
+ }
1163
+ async close() {
1164
+ await this.db.close();
1165
+ }
1166
+ }
1167
+ class IndexedDBPersistenceProvider {
1168
+ persistence;
1169
+ whenSynced;
1170
+ constructor(collection, ydoc){
1171
+ this.persistence = new IndexeddbPersistence(collection, ydoc);
1172
+ this.whenSynced = new Promise((resolve)=>{
1173
+ if (this.persistence.synced) resolve();
1174
+ else this.persistence.once('synced', ()=>resolve());
1175
+ });
1176
+ }
1177
+ destroy() {
1178
+ this.persistence.destroy();
1179
+ }
1180
+ }
1181
+ function indexeddbPersistence(dbName = 'replicate-kv') {
1182
+ const kv = new BrowserLevelKeyValueStore(dbName);
1183
+ return {
1184
+ createDocPersistence: (collection, ydoc)=>new IndexedDBPersistenceProvider(collection, ydoc),
1185
+ kv
1186
+ };
1187
+ }
1188
+ class MemoryKeyValueStore {
1189
+ store = new Map();
1190
+ async get(key) {
1191
+ return this.store.get(key);
1192
+ }
1193
+ async set(key, value) {
1194
+ this.store.set(key, value);
1195
+ }
1196
+ async del(key) {
1197
+ this.store.delete(key);
1198
+ }
1199
+ }
1200
+ class MemoryPersistenceProvider {
1201
+ whenSynced = Promise.resolve();
1202
+ destroy() {}
1203
+ }
1204
+ function memoryPersistence() {
1205
+ const kv = new MemoryKeyValueStore();
1206
+ return {
1207
+ createDocPersistence: (_collection, _ydoc)=>new MemoryPersistenceProvider(),
1208
+ kv
1209
+ };
1210
+ }
1211
+ class SqliteLevel extends AbstractLevel {
1212
+ adapter = null;
1213
+ adapterFactory = null;
1214
+ constructor(_location, options){
1215
+ super({
1216
+ encodings: {
1217
+ utf8: true,
1218
+ buffer: true,
1219
+ view: true
1220
+ },
1221
+ seek: true,
1222
+ permanence: true,
1223
+ createIfMissing: true,
1224
+ errorIfExists: false,
1225
+ additionalMethods: {}
1226
+ }, {
1227
+ keyEncoding: options?.keyEncoding ?? 'utf8',
1228
+ valueEncoding: options?.valueEncoding ?? 'utf8'
361
1229
  });
1230
+ if (options?.adapter) this.adapter = options.adapter;
1231
+ }
1232
+ setAdapterFactory(factory) {
1233
+ this.adapterFactory = factory;
1234
+ }
1235
+ async _open() {
1236
+ if (!this.adapter) if (this.adapterFactory) this.adapter = await this.adapterFactory();
1237
+ else throw new Error('No SQLite adapter configured. Call setAdapterFactory() before open().');
1238
+ await this.adapter.execute(`
1239
+ CREATE TABLE IF NOT EXISTS entries (
1240
+ key BLOB PRIMARY KEY,
1241
+ value BLOB NOT NULL
1242
+ )
1243
+ `);
1244
+ await this.adapter.execute(`
1245
+ CREATE INDEX IF NOT EXISTS entries_key_idx ON entries (key)
1246
+ `);
1247
+ }
1248
+ async _close() {
1249
+ if (this.adapter) {
1250
+ this.adapter.close();
1251
+ this.adapter = null;
1252
+ }
1253
+ }
1254
+ async _get(key) {
1255
+ if (!this.adapter) throw new Error('Database not open');
1256
+ const keyBytes = this.encodeKey(key);
1257
+ const result = await this.adapter.execute('SELECT value FROM entries WHERE key = ?', [
1258
+ keyBytes
1259
+ ]);
1260
+ if (0 === result.rows.length) return;
1261
+ return this.decodeValue(result.rows[0].value);
1262
+ }
1263
+ async _put(key, value) {
1264
+ if (!this.adapter) throw new Error('Database not open');
1265
+ const keyBytes = this.encodeKey(key);
1266
+ const valueBytes = this.encodeValue(value);
1267
+ await this.adapter.execute('INSERT OR REPLACE INTO entries (key, value) VALUES (?, ?)', [
1268
+ keyBytes,
1269
+ valueBytes
1270
+ ]);
1271
+ }
1272
+ async _del(key) {
1273
+ if (!this.adapter) throw new Error('Database not open');
1274
+ const keyBytes = this.encodeKey(key);
1275
+ await this.adapter.execute('DELETE FROM entries WHERE key = ?', [
1276
+ keyBytes
1277
+ ]);
1278
+ }
1279
+ async _batch(operations) {
1280
+ if (!this.adapter) throw new Error('Database not open');
1281
+ await this.adapter.execute('BEGIN TRANSACTION');
1282
+ try {
1283
+ for (const op of operations)if ('put' === op.type) {
1284
+ const keyBytes = this.encodeKey(op.key);
1285
+ const valueBytes = this.encodeValue(op.value);
1286
+ await this.adapter.execute('INSERT OR REPLACE INTO entries (key, value) VALUES (?, ?)', [
1287
+ keyBytes,
1288
+ valueBytes
1289
+ ]);
1290
+ } else if ('del' === op.type) {
1291
+ const keyBytes = this.encodeKey(op.key);
1292
+ await this.adapter.execute('DELETE FROM entries WHERE key = ?', [
1293
+ keyBytes
1294
+ ]);
1295
+ }
1296
+ await this.adapter.execute('COMMIT');
1297
+ } catch (error) {
1298
+ await this.adapter.execute('ROLLBACK');
1299
+ throw error;
1300
+ }
1301
+ }
1302
+ async _clear() {
1303
+ if (!this.adapter) throw new Error('Database not open');
1304
+ await this.adapter.execute('DELETE FROM entries');
1305
+ }
1306
+ _iterator(options) {
1307
+ if (!this.adapter) throw new Error('Database not open');
1308
+ return new SqliteIterator(this, this.adapter, options);
1309
+ }
1310
+ _keys(options) {
1311
+ if (!this.adapter) throw new Error('Database not open');
1312
+ return new SqliteKeyIterator(this, this.adapter, options);
1313
+ }
1314
+ _values(options) {
1315
+ if (!this.adapter) throw new Error('Database not open');
1316
+ return new SqliteValueIterator(this, this.adapter, options);
1317
+ }
1318
+ encodeKey(key) {
1319
+ if (key instanceof Uint8Array) return key;
1320
+ if ('string' == typeof key) return new TextEncoder().encode(key);
1321
+ return new TextEncoder().encode(String(key));
1322
+ }
1323
+ encodeValue(value) {
1324
+ if (value instanceof Uint8Array) return value;
1325
+ if ('string' == typeof value) return new TextEncoder().encode(value);
1326
+ return new TextEncoder().encode(JSON.stringify(value));
1327
+ }
1328
+ decodeValue(bytes) {
1329
+ return new TextDecoder().decode(bytes);
1330
+ }
1331
+ }
1332
+ class SqliteIterator extends AbstractIterator {
1333
+ adapter;
1334
+ options;
1335
+ rows = null;
1336
+ index = 0;
1337
+ constructor(db, adapter, options){
1338
+ super(db, options);
1339
+ this.adapter = adapter;
1340
+ this.options = options;
1341
+ }
1342
+ async _next() {
1343
+ if (null === this.rows) await this.loadRows();
1344
+ if (this.rows && this.index < this.rows.length) {
1345
+ const row = this.rows[this.index++];
1346
+ const key = new TextDecoder().decode(row.key);
1347
+ const value = new TextDecoder().decode(row.value);
1348
+ return [
1349
+ key,
1350
+ value
1351
+ ];
1352
+ }
1353
+ }
1354
+ async _nextv(size) {
1355
+ if (null === this.rows) await this.loadRows();
1356
+ const result = [];
1357
+ while(this.rows && this.index < this.rows.length && result.length < size){
1358
+ const row = this.rows[this.index++];
1359
+ const key = new TextDecoder().decode(row.key);
1360
+ const value = new TextDecoder().decode(row.value);
1361
+ result.push([
1362
+ key,
1363
+ value
1364
+ ]);
1365
+ }
1366
+ return result;
1367
+ }
1368
+ async loadRows() {
1369
+ const { reverse, limit, gt, gte, lt, lte } = this.options;
1370
+ let sql = 'SELECT key, value FROM entries';
1371
+ const params = [];
1372
+ const conditions = [];
1373
+ if (void 0 !== gt) {
1374
+ conditions.push('key > ?');
1375
+ params.push(this.encodeKey(gt));
1376
+ }
1377
+ if (void 0 !== gte) {
1378
+ conditions.push('key >= ?');
1379
+ params.push(this.encodeKey(gte));
1380
+ }
1381
+ if (void 0 !== lt) {
1382
+ conditions.push('key < ?');
1383
+ params.push(this.encodeKey(lt));
1384
+ }
1385
+ if (void 0 !== lte) {
1386
+ conditions.push('key <= ?');
1387
+ params.push(this.encodeKey(lte));
1388
+ }
1389
+ if (conditions.length > 0) sql += ` WHERE ${conditions.join(' AND ')}`;
1390
+ sql += ` ORDER BY key ${reverse ? 'DESC' : 'ASC'}`;
1391
+ if (void 0 !== limit && limit >= 0) sql += ` LIMIT ${limit}`;
1392
+ const result = await this.adapter.execute(sql, params);
1393
+ this.rows = result.rows;
1394
+ }
1395
+ encodeKey(key) {
1396
+ if (key instanceof Uint8Array) return key;
1397
+ if ('string' == typeof key) return new TextEncoder().encode(key);
1398
+ return new TextEncoder().encode(String(key));
1399
+ }
1400
+ }
1401
+ class SqliteKeyIterator extends AbstractKeyIterator {
1402
+ adapter;
1403
+ options;
1404
+ rows = null;
1405
+ index = 0;
1406
+ constructor(db, adapter, options){
1407
+ super(db, options);
1408
+ this.adapter = adapter;
1409
+ this.options = options;
1410
+ }
1411
+ async _next() {
1412
+ if (null === this.rows) await this.loadRows();
1413
+ if (this.rows && this.index < this.rows.length) {
1414
+ const row = this.rows[this.index++];
1415
+ return new TextDecoder().decode(row.key);
1416
+ }
1417
+ }
1418
+ async loadRows() {
1419
+ const { reverse, limit } = this.options;
1420
+ let sql = 'SELECT key FROM entries';
1421
+ sql += ` ORDER BY key ${reverse ? 'DESC' : 'ASC'}`;
1422
+ if (void 0 !== limit && limit >= 0) sql += ` LIMIT ${limit}`;
1423
+ const result = await this.adapter.execute(sql);
1424
+ this.rows = result.rows;
362
1425
  }
363
- if ('undefined' != typeof window) window.addEventListener('online', ()=>{
364
- logger.info('Network online - notifying offline executor', {
365
- collectionName
1426
+ }
1427
+ class SqliteValueIterator extends AbstractValueIterator {
1428
+ adapter;
1429
+ options;
1430
+ rows = null;
1431
+ index = 0;
1432
+ constructor(db, adapter, options){
1433
+ super(db, options);
1434
+ this.adapter = adapter;
1435
+ this.options = options;
1436
+ }
1437
+ async _next() {
1438
+ if (null === this.rows) await this.loadRows();
1439
+ if (this.rows && this.index < this.rows.length) {
1440
+ const row = this.rows[this.index++];
1441
+ return new TextDecoder().decode(row.value);
1442
+ }
1443
+ }
1444
+ async loadRows() {
1445
+ const { reverse, limit } = this.options;
1446
+ let sql = 'SELECT value FROM entries';
1447
+ sql += ` ORDER BY key ${reverse ? 'DESC' : 'ASC'}`;
1448
+ if (void 0 !== limit && limit >= 0) sql += ` LIMIT ${limit}`;
1449
+ const result = await this.adapter.execute(sql);
1450
+ this.rows = result.rows;
1451
+ }
1452
+ }
1453
+ class SqliteKeyValueStore {
1454
+ db;
1455
+ prefix = 'kv:';
1456
+ constructor(db){
1457
+ this.db = db;
1458
+ }
1459
+ async get(key) {
1460
+ try {
1461
+ const value = await this.db.get(this.prefix + key);
1462
+ if (void 0 === value) return;
1463
+ return JSON.parse(value);
1464
+ } catch {
1465
+ return;
1466
+ }
1467
+ }
1468
+ async set(key, value) {
1469
+ await this.db.put(this.prefix + key, JSON.stringify(value));
1470
+ }
1471
+ async del(key) {
1472
+ await this.db.del(this.prefix + key);
1473
+ }
1474
+ }
1475
+ class SqlitePersistenceProvider {
1476
+ persistence;
1477
+ whenSynced;
1478
+ constructor(collection, _ydoc, leveldb){
1479
+ this.persistence = leveldb;
1480
+ this.whenSynced = this.persistence.getYDoc(collection).then((storedDoc)=>{
1481
+ storedDoc.store;
366
1482
  });
367
- offline.notifyOnline();
1483
+ }
1484
+ destroy() {
1485
+ this.persistence.destroy();
1486
+ }
1487
+ }
1488
+ async function sqlitePersistence(options) {
1489
+ const { adapter, dbName = 'replicate' } = options;
1490
+ const db = new SqliteLevel(dbName);
1491
+ db.setAdapterFactory(()=>Promise.resolve(adapter));
1492
+ await db.open();
1493
+ const leveldb = new LeveldbPersistence(dbName, {
1494
+ level: db
1495
+ });
1496
+ const kv = new SqliteKeyValueStore(db);
1497
+ return {
1498
+ createDocPersistence: (collection, ydoc)=>new SqlitePersistenceProvider(collection, ydoc, leveldb),
1499
+ kv
1500
+ };
1501
+ }
1502
+ class SqlJsAdapter {
1503
+ db;
1504
+ onPersist;
1505
+ constructor(db, options = {}){
1506
+ this.db = db;
1507
+ this.onPersist = options.onPersist;
1508
+ }
1509
+ async execute(sql, params) {
1510
+ const rows = [];
1511
+ if (sql.trim().toUpperCase().startsWith('CREATE') || sql.trim().toUpperCase().startsWith('INSERT') || sql.trim().toUpperCase().startsWith('UPDATE') || sql.trim().toUpperCase().startsWith('DELETE') || sql.trim().toUpperCase().startsWith('BEGIN') || sql.trim().toUpperCase().startsWith('COMMIT') || sql.trim().toUpperCase().startsWith('ROLLBACK')) {
1512
+ this.db.run(sql, params);
1513
+ await this.persist();
1514
+ return {
1515
+ rows
1516
+ };
1517
+ }
1518
+ const stmt = this.db.prepare(sql);
1519
+ if (params && params.length > 0) stmt.bind(params);
1520
+ while(stmt.step())rows.push(stmt.getAsObject());
1521
+ stmt.free();
1522
+ return {
1523
+ rows
1524
+ };
1525
+ }
1526
+ close() {
1527
+ this.db.close();
1528
+ }
1529
+ async persist() {
1530
+ if (this.onPersist) {
1531
+ const data = this.db.export();
1532
+ await this.onPersist(new Uint8Array(data));
1533
+ }
1534
+ }
1535
+ }
1536
+ async function loadFromOPFS(dbName) {
1537
+ try {
1538
+ const root = await navigator.storage.getDirectory();
1539
+ const handle = await root.getFileHandle(`${dbName}.sqlite`);
1540
+ const file = await handle.getFile();
1541
+ const buffer = await file.arrayBuffer();
1542
+ return new Uint8Array(buffer);
1543
+ } catch {
1544
+ return null;
1545
+ }
1546
+ }
1547
+ function createOPFSSaver(dbName) {
1548
+ return async (data)=>{
1549
+ try {
1550
+ const root = await navigator.storage.getDirectory();
1551
+ const handle = await root.getFileHandle(`${dbName}.sqlite`, {
1552
+ create: true
1553
+ });
1554
+ const writable = await handle.createWritable();
1555
+ const buffer = new ArrayBuffer(data.length);
1556
+ new Uint8Array(buffer).set(data);
1557
+ await writable.write(buffer);
1558
+ await writable.close();
1559
+ } catch {}
1560
+ };
1561
+ }
1562
+ async function createBrowserSqlitePersistence(SQL, dbName) {
1563
+ const existingData = await loadFromOPFS(dbName);
1564
+ const db = existingData ? new SQL.Database(existingData) : new SQL.Database();
1565
+ const adapter = new SqlJsAdapter(db, {
1566
+ onPersist: createOPFSSaver(dbName)
368
1567
  });
369
- logger.info('Offline support initialized', {
370
- collectionName,
371
- mode: offline.mode
1568
+ return sqlitePersistence({
1569
+ adapter,
1570
+ dbName
372
1571
  });
373
- return rawCollection;
374
1572
  }
375
- export { NonRetriableError, ReplicateStorage, __WEBPACK_EXTERNAL_MODULE_yjs__ as Y, convexCollectionOptions, createConvexCollection };
1573
+ class OPSqliteAdapter {
1574
+ db;
1575
+ constructor(db){
1576
+ this.db = db;
1577
+ }
1578
+ async execute(sql, params) {
1579
+ const result = await this.db.execute(sql, params);
1580
+ return {
1581
+ rows: result.rows || []
1582
+ };
1583
+ }
1584
+ close() {
1585
+ this.db.close();
1586
+ }
1587
+ }
1588
+ async function createReactNativeSqlitePersistence(db, dbName) {
1589
+ const adapter = new OPSqliteAdapter(db);
1590
+ return sqlitePersistence({
1591
+ adapter,
1592
+ dbName
1593
+ });
1594
+ }
1595
+ const persistence_persistence = {
1596
+ indexeddb: indexeddbPersistence,
1597
+ memory: memoryPersistence,
1598
+ sqlite: {
1599
+ browser: createBrowserSqlitePersistence,
1600
+ native: createReactNativeSqlitePersistence,
1601
+ create: sqlitePersistence
1602
+ }
1603
+ };
1604
+ const errors = {
1605
+ Network: NetworkError,
1606
+ IDB: IDBError,
1607
+ IDBWrite: IDBWriteError,
1608
+ Reconciliation: ReconciliationError,
1609
+ Prose: ProseError,
1610
+ CollectionNotReady: CollectionNotReadyError,
1611
+ NonRetriable: NonRetriableError
1612
+ };
1613
+ const prose = {
1614
+ extract: extract
1615
+ };
1616
+ const adapters = {
1617
+ sqljs: SqlJsAdapter,
1618
+ opsqlite: OPSqliteAdapter
1619
+ };
1620
+ export { adapters, convexCollectionOptions, errors, persistence_persistence as persistence, prose };