@fatagnus/dink-convex 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +190 -0
- package/README.md +282 -0
- package/convex/convex.config.ts +23 -0
- package/convex/crons.ts +37 -0
- package/convex/http.ts +421 -0
- package/convex/index.ts +20 -0
- package/convex/install.ts +172 -0
- package/convex/outbox.ts +198 -0
- package/convex/outboxProcessor.ts +240 -0
- package/convex/schema.ts +97 -0
- package/convex/sync.ts +327 -0
- package/dist/component.d.ts +34 -0
- package/dist/component.d.ts.map +1 -0
- package/dist/component.js +35 -0
- package/dist/component.js.map +1 -0
- package/dist/crdt.d.ts +82 -0
- package/dist/crdt.d.ts.map +1 -0
- package/dist/crdt.js +134 -0
- package/dist/crdt.js.map +1 -0
- package/dist/factories.d.ts +80 -0
- package/dist/factories.d.ts.map +1 -0
- package/dist/factories.js +159 -0
- package/dist/factories.js.map +1 -0
- package/dist/http.d.ts +238 -0
- package/dist/http.d.ts.map +1 -0
- package/dist/http.js +222 -0
- package/dist/http.js.map +1 -0
- package/dist/httpFactory.d.ts +39 -0
- package/dist/httpFactory.d.ts.map +1 -0
- package/dist/httpFactory.js +128 -0
- package/dist/httpFactory.js.map +1 -0
- package/dist/index.d.ts +68 -0
- package/dist/index.d.ts.map +1 -0
- package/dist/index.js +73 -0
- package/dist/index.js.map +1 -0
- package/dist/schema.d.ts +217 -0
- package/dist/schema.d.ts.map +1 -0
- package/dist/schema.js +195 -0
- package/dist/schema.js.map +1 -0
- package/dist/syncFactories.d.ts +240 -0
- package/dist/syncFactories.d.ts.map +1 -0
- package/dist/syncFactories.js +623 -0
- package/dist/syncFactories.js.map +1 -0
- package/dist/triggers.d.ts +442 -0
- package/dist/triggers.d.ts.map +1 -0
- package/dist/triggers.js +705 -0
- package/dist/triggers.js.map +1 -0
- package/package.json +108 -0
- package/scripts/check-peer-deps.cjs +132 -0
package/dist/triggers.js
ADDED
|
@@ -0,0 +1,705 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Triggers setup for automatic sync on database writes.
|
|
3
|
+
*
|
|
4
|
+
* Uses convex-helpers Triggers to intercept database operations and
|
|
5
|
+
* automatically generate CRDT deltas for synced tables.
|
|
6
|
+
*
|
|
7
|
+
* @module triggers
|
|
8
|
+
*/
|
|
9
|
+
import * as Y from "yjs";
|
|
10
|
+
import { Triggers } from "convex-helpers/server/triggers";
|
|
11
|
+
/**
|
|
12
|
+
* Set of table names that are registered for sync.
|
|
13
|
+
* Used to check if a table should have triggers applied.
|
|
14
|
+
*/
|
|
15
|
+
const syncedTables = new Set();
|
|
16
|
+
/**
|
|
17
|
+
* Global sequence counter for delta ordering.
|
|
18
|
+
* @deprecated Use getNextSeqAsync or createSequenceCounter for persisted sequences.
|
|
19
|
+
* This in-memory counter resets on redeploy and is kept for backward compatibility.
|
|
20
|
+
*/
|
|
21
|
+
let globalSeq = 0;
|
|
22
|
+
/**
|
|
23
|
+
* Get the next sequence number for delta ordering.
|
|
24
|
+
* @deprecated Use getNextSeqAsync or createSequenceCounter for persisted sequences.
|
|
25
|
+
* This synchronous version uses an in-memory counter that resets on redeploy.
|
|
26
|
+
* @returns The next sequence number
|
|
27
|
+
*/
|
|
28
|
+
export function getNextSeq() {
|
|
29
|
+
return ++globalSeq;
|
|
30
|
+
}
|
|
31
|
+
/**
|
|
32
|
+
* Get the next sequence number by querying the database.
|
|
33
|
+
* This async version queries _sync_deltas to find the max seq and returns max + 1.
|
|
34
|
+
*
|
|
35
|
+
* @param db - Database context with query capability
|
|
36
|
+
* @param collection - The collection name to get the next sequence for
|
|
37
|
+
* @returns Promise<number> - The next sequence number
|
|
38
|
+
*/
|
|
39
|
+
export async function getNextSeqAsync(db, collection) {
|
|
40
|
+
const latest = await db
|
|
41
|
+
.query("sync_deltas")
|
|
42
|
+
.withIndex("by_seq", (q) => q.eq("collection", collection))
|
|
43
|
+
.order("desc")
|
|
44
|
+
.first();
|
|
45
|
+
return (latest?.seq ?? 0) + 1;
|
|
46
|
+
}
|
|
47
|
+
/**
|
|
48
|
+
* Create a sequence counter with request-level caching.
|
|
49
|
+
*
|
|
50
|
+
* The counter queries the database once per collection to get the max seq,
|
|
51
|
+
* then increments locally for subsequent calls within the same request context.
|
|
52
|
+
* This avoids querying the database on every call while still persisting
|
|
53
|
+
* sequences across redeploys.
|
|
54
|
+
*
|
|
55
|
+
* @returns SequenceCounter - A counter object with getNext and reset methods
|
|
56
|
+
*
|
|
57
|
+
* @example
|
|
58
|
+
* ```typescript
|
|
59
|
+
* const counter = createSequenceCounter();
|
|
60
|
+
* const seq1 = await counter.getNext(ctx.db, "tasks"); // Queries DB
|
|
61
|
+
* const seq2 = await counter.getNext(ctx.db, "tasks"); // Uses cache
|
|
62
|
+
* const seq3 = await counter.getNext(ctx.db, "users"); // Queries DB (different collection)
|
|
63
|
+
* ```
|
|
64
|
+
*/
|
|
65
|
+
export function createSequenceCounter() {
|
|
66
|
+
// Cache of collection -> current sequence number
|
|
67
|
+
const cache = new Map();
|
|
68
|
+
return {
|
|
69
|
+
async getNext(db, collection) {
|
|
70
|
+
if (cache.has(collection)) {
|
|
71
|
+
// Increment cached value
|
|
72
|
+
const next = cache.get(collection) + 1;
|
|
73
|
+
cache.set(collection, next);
|
|
74
|
+
return next;
|
|
75
|
+
}
|
|
76
|
+
// Query database for max seq
|
|
77
|
+
const nextSeq = await getNextSeqAsync(db, collection);
|
|
78
|
+
cache.set(collection, nextSeq);
|
|
79
|
+
return nextSeq;
|
|
80
|
+
},
|
|
81
|
+
reset() {
|
|
82
|
+
cache.clear();
|
|
83
|
+
},
|
|
84
|
+
};
|
|
85
|
+
}
|
|
86
|
+
/**
|
|
87
|
+
* Register a table for sync.
|
|
88
|
+
* Tables registered here will have triggers applied to generate CRDT deltas.
|
|
89
|
+
*
|
|
90
|
+
* @param tableName - The name of the table to register
|
|
91
|
+
*/
|
|
92
|
+
export function registerSyncedTable(tableName) {
|
|
93
|
+
syncedTables.add(tableName);
|
|
94
|
+
}
|
|
95
|
+
/**
|
|
96
|
+
* Check if a table is registered for sync.
|
|
97
|
+
*
|
|
98
|
+
* @param tableName - The name of the table to check
|
|
99
|
+
* @returns True if the table is registered for sync
|
|
100
|
+
*/
|
|
101
|
+
export function isSyncedTable(tableName) {
|
|
102
|
+
return syncedTables.has(tableName);
|
|
103
|
+
}
|
|
104
|
+
/**
|
|
105
|
+
* Get sync configuration for a table.
|
|
106
|
+
* In production, this would query the _sync_config table.
|
|
107
|
+
*
|
|
108
|
+
* @param tableName - The name of the table
|
|
109
|
+
* @returns Sync configuration or null if not synced
|
|
110
|
+
*/
|
|
111
|
+
export function getSyncConfig(tableName) {
|
|
112
|
+
if (syncedTables.has(tableName)) {
|
|
113
|
+
return { enabled: true };
|
|
114
|
+
}
|
|
115
|
+
return null;
|
|
116
|
+
}
|
|
117
|
+
/**
|
|
118
|
+
* Check if sync is enabled for a collection by querying _sync_config table.
|
|
119
|
+
* This is the production-ready version that queries the database.
|
|
120
|
+
*
|
|
121
|
+
* @param db - Database context with query capability
|
|
122
|
+
* @param collection - The collection name to check
|
|
123
|
+
* @returns Promise<boolean> - True if sync is enabled for this collection
|
|
124
|
+
*/
|
|
125
|
+
export async function checkSyncConfigEnabled(db, collection) {
|
|
126
|
+
const config = await db
|
|
127
|
+
.query("sync_config")
|
|
128
|
+
.withIndex("by_collection", (q) => q.eq("collection", collection))
|
|
129
|
+
.first();
|
|
130
|
+
return config?.enabled === true;
|
|
131
|
+
}
|
|
132
|
+
/**
|
|
133
|
+
* Create a sync config checker function.
|
|
134
|
+
* Returns a function that can be used to check sync status with caching.
|
|
135
|
+
*
|
|
136
|
+
* @returns A checker function that takes (db, collection) and returns Promise<boolean>
|
|
137
|
+
*/
|
|
138
|
+
export function createSyncConfigChecker() {
|
|
139
|
+
// Cache for sync config lookups within a request
|
|
140
|
+
const cache = new Map();
|
|
141
|
+
return async (db, collection) => {
|
|
142
|
+
// Check cache first
|
|
143
|
+
if (cache.has(collection)) {
|
|
144
|
+
return cache.get(collection);
|
|
145
|
+
}
|
|
146
|
+
// Query the database
|
|
147
|
+
const enabled = await checkSyncConfigEnabled(db, collection);
|
|
148
|
+
cache.set(collection, enabled);
|
|
149
|
+
return enabled;
|
|
150
|
+
};
|
|
151
|
+
}
|
|
152
|
+
/**
|
|
153
|
+
* Serialize a value to a format suitable for Yjs.
|
|
154
|
+
* Handles nested objects, arrays, and primitive types.
|
|
155
|
+
*/
|
|
156
|
+
function serializeValue(value) {
|
|
157
|
+
if (value === null || value === undefined) {
|
|
158
|
+
return value;
|
|
159
|
+
}
|
|
160
|
+
if (Array.isArray(value)) {
|
|
161
|
+
return value.map(serializeValue);
|
|
162
|
+
}
|
|
163
|
+
if (typeof value === "object") {
|
|
164
|
+
const result = {};
|
|
165
|
+
for (const [k, v] of Object.entries(value)) {
|
|
166
|
+
result[k] = serializeValue(v);
|
|
167
|
+
}
|
|
168
|
+
return result;
|
|
169
|
+
}
|
|
170
|
+
return value;
|
|
171
|
+
}
|
|
172
|
+
/**
|
|
173
|
+
* Generate a CRDT delta for a newly inserted document.
|
|
174
|
+
* Creates a Yjs document with all fields set.
|
|
175
|
+
*
|
|
176
|
+
* @param doc - The inserted document
|
|
177
|
+
* @returns CRDT delta as Uint8Array
|
|
178
|
+
*/
|
|
179
|
+
export function generateInsertDelta(doc) {
|
|
180
|
+
const ydoc = new Y.Doc();
|
|
181
|
+
const ymap = ydoc.getMap("root");
|
|
182
|
+
// Add all fields to the Yjs document
|
|
183
|
+
for (const [key, value] of Object.entries(doc)) {
|
|
184
|
+
// Skip internal Convex fields
|
|
185
|
+
if (key === "_id" || key === "_creationTime") {
|
|
186
|
+
continue;
|
|
187
|
+
}
|
|
188
|
+
ymap.set(key, serializeValue(value));
|
|
189
|
+
}
|
|
190
|
+
// Encode the state as an update (delta)
|
|
191
|
+
return Y.encodeStateAsUpdate(ydoc);
|
|
192
|
+
}
|
|
193
|
+
/**
|
|
194
|
+
* Generate a CRDT delta for an updated document.
|
|
195
|
+
* Creates a delta containing only the changed fields.
|
|
196
|
+
*
|
|
197
|
+
* @param oldDoc - The document before update
|
|
198
|
+
* @param newDoc - The document after update
|
|
199
|
+
* @returns CRDT delta as Uint8Array
|
|
200
|
+
*/
|
|
201
|
+
export function generateUpdateDelta(oldDoc, newDoc) {
|
|
202
|
+
const ydoc = new Y.Doc();
|
|
203
|
+
const ymap = ydoc.getMap("root");
|
|
204
|
+
// Find changed fields and add them to the delta
|
|
205
|
+
for (const [key, newValue] of Object.entries(newDoc)) {
|
|
206
|
+
// Skip internal Convex fields
|
|
207
|
+
if (key === "_id" || key === "_creationTime") {
|
|
208
|
+
continue;
|
|
209
|
+
}
|
|
210
|
+
const oldValue = oldDoc[key];
|
|
211
|
+
// Check if the value has changed
|
|
212
|
+
if (JSON.stringify(oldValue) !== JSON.stringify(newValue)) {
|
|
213
|
+
ymap.set(key, serializeValue(newValue));
|
|
214
|
+
}
|
|
215
|
+
}
|
|
216
|
+
// Check for deleted fields
|
|
217
|
+
for (const key of Object.keys(oldDoc)) {
|
|
218
|
+
if (key === "_id" || key === "_creationTime") {
|
|
219
|
+
continue;
|
|
220
|
+
}
|
|
221
|
+
if (!(key in newDoc)) {
|
|
222
|
+
ymap.delete(key);
|
|
223
|
+
}
|
|
224
|
+
}
|
|
225
|
+
return Y.encodeStateAsUpdate(ydoc);
|
|
226
|
+
}
|
|
227
|
+
/**
|
|
228
|
+
* Generate a tombstone CRDT delta for a deleted document.
|
|
229
|
+
* Creates a special delta indicating the document was deleted.
|
|
230
|
+
*
|
|
231
|
+
* @param doc - The deleted document
|
|
232
|
+
* @returns CRDT delta as Uint8Array representing a tombstone
|
|
233
|
+
*/
|
|
234
|
+
export function generateDeleteDelta(doc) {
|
|
235
|
+
const ydoc = new Y.Doc();
|
|
236
|
+
const ymap = ydoc.getMap("root");
|
|
237
|
+
// Mark as deleted with tombstone flag
|
|
238
|
+
ymap.set("_deleted", true);
|
|
239
|
+
ymap.set("syncId", doc.syncId);
|
|
240
|
+
ymap.set("_deletedAt", Date.now());
|
|
241
|
+
return Y.encodeStateAsUpdate(ydoc);
|
|
242
|
+
}
|
|
243
|
+
/**
|
|
244
|
+
* Create a delta record for storage in _sync_deltas.
|
|
245
|
+
*
|
|
246
|
+
* @param input - Delta record input
|
|
247
|
+
* @returns Complete delta record with timestamp
|
|
248
|
+
*/
|
|
249
|
+
export function createDeltaRecord(input) {
|
|
250
|
+
return {
|
|
251
|
+
collection: input.collection,
|
|
252
|
+
docId: input.docId,
|
|
253
|
+
bytes: input.bytes,
|
|
254
|
+
seq: input.seq,
|
|
255
|
+
timestamp: Date.now(),
|
|
256
|
+
};
|
|
257
|
+
}
|
|
258
|
+
/**
|
|
259
|
+
* Create an outbox record for storage in _sync_outbox.
|
|
260
|
+
*
|
|
261
|
+
* @param input - Outbox record input
|
|
262
|
+
* @returns Complete outbox record with status and timestamps
|
|
263
|
+
*/
|
|
264
|
+
export function createOutboxRecord(input) {
|
|
265
|
+
return {
|
|
266
|
+
collection: input.collection,
|
|
267
|
+
docId: input.docId,
|
|
268
|
+
delta: input.delta,
|
|
269
|
+
seq: input.seq,
|
|
270
|
+
status: "pending",
|
|
271
|
+
retries: 0,
|
|
272
|
+
createdAt: Date.now(),
|
|
273
|
+
};
|
|
274
|
+
}
|
|
275
|
+
/**
|
|
276
|
+
* Create a sync trigger handler for a specific table.
|
|
277
|
+
* This handler generates CRDT deltas and stores them on database changes.
|
|
278
|
+
*
|
|
279
|
+
* @param tableName - The name of the table to handle
|
|
280
|
+
* @returns Trigger handler function
|
|
281
|
+
*/
|
|
282
|
+
export function createSyncTriggerHandler(tableName) {
|
|
283
|
+
return async function syncTriggerHandler(ctx, change) {
|
|
284
|
+
// Check if table is synced (in-memory check)
|
|
285
|
+
if (!isSyncedTable(tableName)) {
|
|
286
|
+
return;
|
|
287
|
+
}
|
|
288
|
+
const seq = getNextSeq();
|
|
289
|
+
let delta;
|
|
290
|
+
let docId;
|
|
291
|
+
switch (change.operation) {
|
|
292
|
+
case "insert":
|
|
293
|
+
if (!change.newDoc)
|
|
294
|
+
return;
|
|
295
|
+
delta = generateInsertDelta(change.newDoc);
|
|
296
|
+
docId = change.newDoc.syncId;
|
|
297
|
+
break;
|
|
298
|
+
case "update":
|
|
299
|
+
if (!change.oldDoc || !change.newDoc)
|
|
300
|
+
return;
|
|
301
|
+
delta = generateUpdateDelta(change.oldDoc, change.newDoc);
|
|
302
|
+
docId = change.newDoc.syncId;
|
|
303
|
+
break;
|
|
304
|
+
case "delete":
|
|
305
|
+
if (!change.oldDoc)
|
|
306
|
+
return;
|
|
307
|
+
delta = generateDeleteDelta(change.oldDoc);
|
|
308
|
+
docId = change.oldDoc.syncId;
|
|
309
|
+
break;
|
|
310
|
+
default:
|
|
311
|
+
return;
|
|
312
|
+
}
|
|
313
|
+
// Create records for storage
|
|
314
|
+
const deltaRecord = createDeltaRecord({
|
|
315
|
+
collection: tableName,
|
|
316
|
+
docId,
|
|
317
|
+
bytes: delta,
|
|
318
|
+
seq,
|
|
319
|
+
});
|
|
320
|
+
const outboxRecord = createOutboxRecord({
|
|
321
|
+
collection: tableName,
|
|
322
|
+
docId,
|
|
323
|
+
delta,
|
|
324
|
+
seq,
|
|
325
|
+
});
|
|
326
|
+
// Store in sync_deltas and sync_outbox
|
|
327
|
+
// Convert Uint8Array to ArrayBuffer for Convex v.bytes() compatibility
|
|
328
|
+
const bytesBuffer = delta.buffer.slice(delta.byteOffset, delta.byteOffset + delta.byteLength);
|
|
329
|
+
await ctx.innerDb.insert("sync_deltas", { ...deltaRecord, bytes: bytesBuffer });
|
|
330
|
+
await ctx.innerDb.insert("sync_outbox", { ...outboxRecord, delta: bytesBuffer });
|
|
331
|
+
};
|
|
332
|
+
}
|
|
333
|
+
/**
|
|
334
|
+
* Create a sync trigger handler that uses persisted sequence numbers.
|
|
335
|
+
* This async version queries the database to get the max sequence number,
|
|
336
|
+
* ensuring sequences persist across redeploys.
|
|
337
|
+
*
|
|
338
|
+
* @param tableName - The name of the table to handle
|
|
339
|
+
* @returns Trigger handler function that uses async sequence generation
|
|
340
|
+
*/
|
|
341
|
+
export function createSyncTriggerHandlerAsync(tableName) {
|
|
342
|
+
// Create a sequence counter for this handler
|
|
343
|
+
const sequenceCounter = createSequenceCounter();
|
|
344
|
+
return async function syncTriggerHandlerAsync(ctx, change) {
|
|
345
|
+
// Check if table is synced (in-memory check)
|
|
346
|
+
if (!isSyncedTable(tableName)) {
|
|
347
|
+
return;
|
|
348
|
+
}
|
|
349
|
+
// Get persisted sequence number
|
|
350
|
+
const seq = await sequenceCounter.getNext(ctx.db, tableName);
|
|
351
|
+
let delta;
|
|
352
|
+
let docId;
|
|
353
|
+
switch (change.operation) {
|
|
354
|
+
case "insert":
|
|
355
|
+
if (!change.newDoc)
|
|
356
|
+
return;
|
|
357
|
+
delta = generateInsertDelta(change.newDoc);
|
|
358
|
+
docId = change.newDoc.syncId;
|
|
359
|
+
break;
|
|
360
|
+
case "update":
|
|
361
|
+
if (!change.oldDoc || !change.newDoc)
|
|
362
|
+
return;
|
|
363
|
+
delta = generateUpdateDelta(change.oldDoc, change.newDoc);
|
|
364
|
+
docId = change.newDoc.syncId;
|
|
365
|
+
break;
|
|
366
|
+
case "delete":
|
|
367
|
+
if (!change.oldDoc)
|
|
368
|
+
return;
|
|
369
|
+
delta = generateDeleteDelta(change.oldDoc);
|
|
370
|
+
docId = change.oldDoc.syncId;
|
|
371
|
+
break;
|
|
372
|
+
default:
|
|
373
|
+
return;
|
|
374
|
+
}
|
|
375
|
+
// Create records for storage
|
|
376
|
+
const deltaRecord = createDeltaRecord({
|
|
377
|
+
collection: tableName,
|
|
378
|
+
docId,
|
|
379
|
+
bytes: delta,
|
|
380
|
+
seq,
|
|
381
|
+
});
|
|
382
|
+
const outboxRecord = createOutboxRecord({
|
|
383
|
+
collection: tableName,
|
|
384
|
+
docId,
|
|
385
|
+
delta,
|
|
386
|
+
seq,
|
|
387
|
+
});
|
|
388
|
+
// Store in sync_deltas and sync_outbox
|
|
389
|
+
// Convert Uint8Array to ArrayBuffer for Convex v.bytes() compatibility
|
|
390
|
+
const bytesBuffer = delta.buffer.slice(delta.byteOffset, delta.byteOffset + delta.byteLength);
|
|
391
|
+
await ctx.innerDb.insert("sync_deltas", { ...deltaRecord, bytes: bytesBuffer });
|
|
392
|
+
await ctx.innerDb.insert("sync_outbox", { ...outboxRecord, delta: bytesBuffer });
|
|
393
|
+
};
|
|
394
|
+
}
|
|
395
|
+
/**
|
|
396
|
+
* Map-like wrapper that provides backward compatibility for registered triggers.
|
|
397
|
+
* Wraps the object-based `registered` property from convex-helpers Triggers
|
|
398
|
+
* to provide Map-like `has()` and `get()` methods.
|
|
399
|
+
*/
|
|
400
|
+
class RegisteredTriggersMap {
|
|
401
|
+
_storage;
|
|
402
|
+
constructor() {
|
|
403
|
+
this._storage = new Map();
|
|
404
|
+
}
|
|
405
|
+
has(tableName) {
|
|
406
|
+
return this._storage.has(tableName);
|
|
407
|
+
}
|
|
408
|
+
get(tableName) {
|
|
409
|
+
return this._storage.get(tableName);
|
|
410
|
+
}
|
|
411
|
+
set(tableName, handlers) {
|
|
412
|
+
this._storage.set(tableName, handlers);
|
|
413
|
+
}
|
|
414
|
+
getOrCreate(tableName) {
|
|
415
|
+
if (!this._storage.has(tableName)) {
|
|
416
|
+
this._storage.set(tableName, []);
|
|
417
|
+
}
|
|
418
|
+
return this._storage.get(tableName);
|
|
419
|
+
}
|
|
420
|
+
}
|
|
421
|
+
/**
|
|
422
|
+
* Sync triggers wrapper that uses convex-helpers Triggers internally.
|
|
423
|
+
* Provides backward compatibility with Map-like `registered` property
|
|
424
|
+
* while leveraging the real Triggers implementation.
|
|
425
|
+
*/
|
|
426
|
+
class SyncTriggersWrapper {
|
|
427
|
+
/**
|
|
428
|
+
* Internal Triggers instance from convex-helpers.
|
|
429
|
+
* Uses GenericDataModel for flexibility across different Convex apps.
|
|
430
|
+
*/
|
|
431
|
+
_triggers;
|
|
432
|
+
/**
|
|
433
|
+
* Map-like storage for backward compatibility with existing tests.
|
|
434
|
+
*/
|
|
435
|
+
registered;
|
|
436
|
+
constructor() {
|
|
437
|
+
this._triggers = new Triggers();
|
|
438
|
+
this.registered = new RegisteredTriggersMap();
|
|
439
|
+
}
|
|
440
|
+
/**
|
|
441
|
+
* Register a trigger for a table.
|
|
442
|
+
* Registers with both the internal Triggers instance and the Map for compatibility.
|
|
443
|
+
*/
|
|
444
|
+
register(tableName, handler) {
|
|
445
|
+
// Add to Map-like storage for backward compatibility
|
|
446
|
+
const handlers = this.registered.getOrCreate(tableName);
|
|
447
|
+
handlers.push(handler);
|
|
448
|
+
// Also register with the real Triggers instance
|
|
449
|
+
// Cast is needed because GenericDataModel doesn't have specific table names
|
|
450
|
+
this._triggers.register(tableName, handler);
|
|
451
|
+
}
|
|
452
|
+
/**
|
|
453
|
+
* Wrap DB to apply triggers.
|
|
454
|
+
* Uses the real Triggers.wrapDB from convex-helpers.
|
|
455
|
+
*/
|
|
456
|
+
wrapDB(ctx) {
|
|
457
|
+
// Use the real wrapDB from convex-helpers Triggers
|
|
458
|
+
// Cast through unknown to handle the generic type conversion safely
|
|
459
|
+
return this._triggers.wrapDB(ctx);
|
|
460
|
+
}
|
|
461
|
+
/**
|
|
462
|
+
* Get the underlying Triggers instance for advanced usage.
|
|
463
|
+
*/
|
|
464
|
+
getTriggersInstance() {
|
|
465
|
+
return this._triggers;
|
|
466
|
+
}
|
|
467
|
+
}
|
|
468
|
+
/**
|
|
469
|
+
* Sync triggers instance.
|
|
470
|
+
* Uses convex-helpers Triggers internally for real database wrapping
|
|
471
|
+
* while maintaining backward compatibility with Map-like `registered` property.
|
|
472
|
+
*/
|
|
473
|
+
export const syncTriggers = new SyncTriggersWrapper();
|
|
474
|
+
/**
|
|
475
|
+
* Create a new Triggers instance for sync operations.
|
|
476
|
+
* This factory function creates a real convex-helpers Triggers instance
|
|
477
|
+
* that can be used to wrap database operations with trigger support.
|
|
478
|
+
*
|
|
479
|
+
* @returns A new Triggers instance with register and wrapDB methods
|
|
480
|
+
*
|
|
481
|
+
* @example
|
|
482
|
+
* ```typescript
|
|
483
|
+
* import { createSyncTriggers } from "@fatagnus/dink-convex";
|
|
484
|
+
* import { customMutation } from "convex-helpers/server/customFunctions";
|
|
485
|
+
* import { mutation as rawMutation } from "./_generated/server";
|
|
486
|
+
*
|
|
487
|
+
* const triggers = createSyncTriggers();
|
|
488
|
+
* triggers.register("tasks", async (ctx, change) => {
|
|
489
|
+
* console.log("Task changed:", change);
|
|
490
|
+
* });
|
|
491
|
+
*
|
|
492
|
+
* export const mutation = customMutation(rawMutation, customCtx(triggers.wrapDB));
|
|
493
|
+
* ```
|
|
494
|
+
*/
|
|
495
|
+
export function createSyncTriggers() {
|
|
496
|
+
return new Triggers();
|
|
497
|
+
}
|
|
498
|
+
/**
|
|
499
|
+
* Create a Triggers instance with sync handlers pre-registered for specified tables.
|
|
500
|
+
* This is a convenience function that creates a Triggers instance and automatically
|
|
501
|
+
* registers sync trigger handlers that generate CRDT deltas for the specified tables.
|
|
502
|
+
*
|
|
503
|
+
* @param tableNames - Array of table names to register sync handlers for
|
|
504
|
+
* @returns A Triggers instance with sync handlers registered
|
|
505
|
+
*
|
|
506
|
+
* @example
|
|
507
|
+
* ```typescript
|
|
508
|
+
* import { createSyncTriggersWithHandlers } from "@fatagnus/dink-convex";
|
|
509
|
+
* import { customMutation } from "convex-helpers/server/customFunctions";
|
|
510
|
+
* import { mutation as rawMutation } from "./_generated/server";
|
|
511
|
+
*
|
|
512
|
+
* // Creates triggers with sync handlers for tasks and users tables
|
|
513
|
+
* const triggers = createSyncTriggersWithHandlers(["tasks", "users"]);
|
|
514
|
+
*
|
|
515
|
+
* export const mutation = customMutation(rawMutation, customCtx(triggers.wrapDB));
|
|
516
|
+
* ```
|
|
517
|
+
*/
|
|
518
|
+
export function createSyncTriggersWithHandlers(tableNames) {
|
|
519
|
+
const triggers = new Triggers();
|
|
520
|
+
// Register sync handlers for each table
|
|
521
|
+
for (const tableName of tableNames) {
|
|
522
|
+
// Register the table for sync tracking
|
|
523
|
+
registerSyncedTable(tableName);
|
|
524
|
+
// Create and register the sync trigger handler
|
|
525
|
+
const handler = createSyncTriggerHandler(tableName);
|
|
526
|
+
triggers.register(tableName, handler);
|
|
527
|
+
}
|
|
528
|
+
return triggers;
|
|
529
|
+
}
|
|
530
|
+
/**
|
|
531
|
+
* Create a sync trigger handler that checks _sync_config before processing.
|
|
532
|
+
* This is the production version that queries the database for config.
|
|
533
|
+
*
|
|
534
|
+
* @param tableName - The name of the table to handle
|
|
535
|
+
* @returns Trigger handler function that checks _sync_config
|
|
536
|
+
*/
|
|
537
|
+
export function createConfigAwareSyncTriggerHandler(tableName) {
|
|
538
|
+
return async function configAwareSyncTriggerHandler(ctx, change) {
|
|
539
|
+
// Check _sync_config table for this collection
|
|
540
|
+
const isEnabled = await checkSyncConfigEnabled(ctx.db, tableName);
|
|
541
|
+
if (!isEnabled) {
|
|
542
|
+
return;
|
|
543
|
+
}
|
|
544
|
+
const seq = getNextSeq();
|
|
545
|
+
let delta;
|
|
546
|
+
let docId;
|
|
547
|
+
switch (change.operation) {
|
|
548
|
+
case "insert":
|
|
549
|
+
if (!change.newDoc)
|
|
550
|
+
return;
|
|
551
|
+
delta = generateInsertDelta(change.newDoc);
|
|
552
|
+
docId = change.newDoc.syncId;
|
|
553
|
+
break;
|
|
554
|
+
case "update":
|
|
555
|
+
if (!change.oldDoc || !change.newDoc)
|
|
556
|
+
return;
|
|
557
|
+
delta = generateUpdateDelta(change.oldDoc, change.newDoc);
|
|
558
|
+
docId = change.newDoc.syncId;
|
|
559
|
+
break;
|
|
560
|
+
case "delete":
|
|
561
|
+
if (!change.oldDoc)
|
|
562
|
+
return;
|
|
563
|
+
delta = generateDeleteDelta(change.oldDoc);
|
|
564
|
+
docId = change.oldDoc.syncId;
|
|
565
|
+
break;
|
|
566
|
+
default:
|
|
567
|
+
return;
|
|
568
|
+
}
|
|
569
|
+
// Create records for storage
|
|
570
|
+
const deltaRecord = createDeltaRecord({
|
|
571
|
+
collection: tableName,
|
|
572
|
+
docId,
|
|
573
|
+
bytes: delta,
|
|
574
|
+
seq,
|
|
575
|
+
});
|
|
576
|
+
const outboxRecord = createOutboxRecord({
|
|
577
|
+
collection: tableName,
|
|
578
|
+
docId,
|
|
579
|
+
delta,
|
|
580
|
+
seq,
|
|
581
|
+
});
|
|
582
|
+
// Store in sync_deltas and sync_outbox
|
|
583
|
+
// Convert Uint8Array to ArrayBuffer for Convex v.bytes() compatibility
|
|
584
|
+
const bytesBuffer = delta.buffer.slice(delta.byteOffset, delta.byteOffset + delta.byteLength);
|
|
585
|
+
await ctx.innerDb.insert("sync_deltas", { ...deltaRecord, bytes: bytesBuffer });
|
|
586
|
+
await ctx.innerDb.insert("sync_outbox", { ...outboxRecord, delta: bytesBuffer });
|
|
587
|
+
};
|
|
588
|
+
}
|
|
589
|
+
/**
|
|
590
|
+
* Schedule immediate outbox processing.
|
|
591
|
+
*
|
|
592
|
+
* This helper function calls scheduler.runAfter(0, ...) to trigger
|
|
593
|
+
* immediate processing of the outbox. It's designed to be called
|
|
594
|
+
* after inserting to _sync_outbox to provide near real-time sync.
|
|
595
|
+
*
|
|
596
|
+
* If scheduler is null/undefined, this is a no-op (graceful degradation
|
|
597
|
+
* for contexts where scheduling isn't available).
|
|
598
|
+
*
|
|
599
|
+
* @param scheduler - The Convex scheduler object or null/undefined
|
|
600
|
+
* @returns Promise that resolves when scheduling is complete
|
|
601
|
+
*/
|
|
602
|
+
export async function scheduleImmediateProcessing(scheduler) {
|
|
603
|
+
if (!scheduler) {
|
|
604
|
+
return;
|
|
605
|
+
}
|
|
606
|
+
// Use a placeholder for the function reference
|
|
607
|
+
// In actual Convex usage, this would be internal.outboxProcessor.processOutboxBatch
|
|
608
|
+
// The actual function reference needs to be passed by the caller in production
|
|
609
|
+
scheduler.runAfter(0, "internal.outboxProcessor.processOutboxBatch", {});
|
|
610
|
+
}
|
|
611
|
+
/**
|
|
612
|
+
* Create a sync trigger handler with immediate scheduling support.
|
|
613
|
+
*
|
|
614
|
+
* This handler generates CRDT deltas, stores them, and schedules
|
|
615
|
+
* immediate outbox processing for near real-time sync.
|
|
616
|
+
*
|
|
617
|
+
* @param tableName - The name of the table to handle
|
|
618
|
+
* @returns Trigger handler function with scheduling support
|
|
619
|
+
*/
|
|
620
|
+
export function createSyncTriggerHandlerWithScheduling(tableName) {
|
|
621
|
+
return async function syncTriggerHandlerWithScheduling(ctx, change) {
|
|
622
|
+
// Check if table is synced (in-memory check)
|
|
623
|
+
if (!isSyncedTable(tableName)) {
|
|
624
|
+
return;
|
|
625
|
+
}
|
|
626
|
+
const seq = getNextSeq();
|
|
627
|
+
let delta;
|
|
628
|
+
let docId;
|
|
629
|
+
switch (change.operation) {
|
|
630
|
+
case "insert":
|
|
631
|
+
if (!change.newDoc)
|
|
632
|
+
return;
|
|
633
|
+
delta = generateInsertDelta(change.newDoc);
|
|
634
|
+
docId = change.newDoc.syncId;
|
|
635
|
+
break;
|
|
636
|
+
case "update":
|
|
637
|
+
if (!change.oldDoc || !change.newDoc)
|
|
638
|
+
return;
|
|
639
|
+
delta = generateUpdateDelta(change.oldDoc, change.newDoc);
|
|
640
|
+
docId = change.newDoc.syncId;
|
|
641
|
+
break;
|
|
642
|
+
case "delete":
|
|
643
|
+
if (!change.oldDoc)
|
|
644
|
+
return;
|
|
645
|
+
delta = generateDeleteDelta(change.oldDoc);
|
|
646
|
+
docId = change.oldDoc.syncId;
|
|
647
|
+
break;
|
|
648
|
+
default:
|
|
649
|
+
return;
|
|
650
|
+
}
|
|
651
|
+
// Create records for storage
|
|
652
|
+
const deltaRecord = createDeltaRecord({
|
|
653
|
+
collection: tableName,
|
|
654
|
+
docId,
|
|
655
|
+
bytes: delta,
|
|
656
|
+
seq,
|
|
657
|
+
});
|
|
658
|
+
const outboxRecord = createOutboxRecord({
|
|
659
|
+
collection: tableName,
|
|
660
|
+
docId,
|
|
661
|
+
delta,
|
|
662
|
+
seq,
|
|
663
|
+
});
|
|
664
|
+
// Store in sync_deltas and sync_outbox
|
|
665
|
+
// Convert Uint8Array to ArrayBuffer for Convex v.bytes() compatibility
|
|
666
|
+
const bytesBuffer = delta.buffer.slice(delta.byteOffset, delta.byteOffset + delta.byteLength);
|
|
667
|
+
await ctx.innerDb.insert("sync_deltas", { ...deltaRecord, bytes: bytesBuffer });
|
|
668
|
+
await ctx.innerDb.insert("sync_outbox", { ...outboxRecord, delta: bytesBuffer });
|
|
669
|
+
// Schedule immediate outbox processing if scheduler is available
|
|
670
|
+
await scheduleImmediateProcessing(ctx.scheduler);
|
|
671
|
+
};
|
|
672
|
+
}
|
|
673
|
+
/**
|
|
674
|
+
* Custom mutation wrapper that includes trigger logic.
|
|
675
|
+
*
|
|
676
|
+
* This is a factory function that creates mutations with sync triggers enabled.
|
|
677
|
+
* In production, this would use customMutation from convex-helpers combined with
|
|
678
|
+
* the syncTriggers.wrapDB function.
|
|
679
|
+
*
|
|
680
|
+
* @example
|
|
681
|
+
* ```typescript
|
|
682
|
+
* import { customMutation } from "@fatagnus/dink-convex";
|
|
683
|
+
*
|
|
684
|
+
* export const createTask = customMutation({
|
|
685
|
+
* args: { text: v.string() },
|
|
686
|
+
* handler: async (ctx, args) => {
|
|
687
|
+
* return await ctx.db.insert("tasks", {
|
|
688
|
+
* text: args.text,
|
|
689
|
+
* syncId: generateSyncId(),
|
|
690
|
+
* });
|
|
691
|
+
* },
|
|
692
|
+
* });
|
|
693
|
+
* ```
|
|
694
|
+
*/
|
|
695
|
+
export function customMutation(config) {
|
|
696
|
+
return {
|
|
697
|
+
args: config.args,
|
|
698
|
+
handler: async (ctx, args) => {
|
|
699
|
+
// Wrap the context with triggers
|
|
700
|
+
const wrappedCtx = syncTriggers.wrapDB(ctx);
|
|
701
|
+
return config.handler(wrappedCtx, args);
|
|
702
|
+
},
|
|
703
|
+
};
|
|
704
|
+
}
|
|
705
|
+
//# sourceMappingURL=triggers.js.map
|