@trestleinc/replicate 1.1.2 → 1.2.0-preview.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +40 -41
- package/package.json +3 -1
- package/src/client/collection.ts +334 -523
- package/src/client/errors.ts +1 -1
- package/src/client/index.ts +4 -7
- package/src/client/merge.ts +2 -2
- package/src/client/persistence/indexeddb.ts +10 -14
- package/src/client/prose.ts +147 -203
- package/src/client/services/awareness.ts +373 -0
- package/src/client/services/context.ts +114 -0
- package/src/client/services/seq.ts +78 -0
- package/src/client/services/session.ts +20 -0
- package/src/client/services/sync.ts +122 -0
- package/src/client/subdocs.ts +263 -0
- package/src/component/_generated/api.ts +2 -2
- package/src/component/_generated/component.ts +73 -28
- package/src/component/mutations.ts +734 -0
- package/src/component/schema.ts +31 -14
- package/src/server/collection.ts +98 -0
- package/src/server/index.ts +2 -2
- package/src/server/{storage.ts → replicate.ts} +214 -75
- package/dist/client/index.d.ts +0 -314
- package/dist/client/index.js +0 -4027
- package/dist/component/_generated/api.d.ts +0 -31
- package/dist/component/_generated/api.js +0 -25
- package/dist/component/_generated/component.d.ts +0 -91
- package/dist/component/_generated/component.js +0 -1
- package/dist/component/_generated/dataModel.d.ts +0 -42
- package/dist/component/_generated/dataModel.js +0 -1
- package/dist/component/_generated/server.d.ts +0 -117
- package/dist/component/_generated/server.js +0 -73
- package/dist/component/_virtual/rolldown_runtime.js +0 -18
- package/dist/component/convex.config.d.ts +0 -6
- package/dist/component/convex.config.js +0 -8
- package/dist/component/logger.d.ts +0 -12
- package/dist/component/logger.js +0 -27
- package/dist/component/public.d.ts +0 -83
- package/dist/component/public.js +0 -325
- package/dist/component/schema.d.ts +0 -54
- package/dist/component/schema.js +0 -29
- package/dist/component/shared/types.d.ts +0 -9
- package/dist/component/shared/types.js +0 -15
- package/dist/server/index.d.ts +0 -135
- package/dist/server/index.js +0 -368
- package/dist/shared/index.d.ts +0 -29
- package/dist/shared/index.js +0 -1
- package/src/client/prose-schema.ts +0 -55
- package/src/client/services/cursor.ts +0 -109
- package/src/component/public.ts +0 -453
- package/src/server/builder.ts +0 -98
- /package/src/client/{replicate.ts → ops.ts} +0 -0
|
@@ -0,0 +1,734 @@
|
|
|
1
|
+
import * as Y from "yjs";
|
|
2
|
+
import { v } from "convex/values";
|
|
3
|
+
import { mutation, query } from "$/component/_generated/server";
|
|
4
|
+
import { api } from "$/component/_generated/api";
|
|
5
|
+
import { getLogger } from "$/component/logger";
|
|
6
|
+
import { OperationType } from "$/shared/types";
|
|
7
|
+
|
|
8
|
+
export { OperationType };
|
|
9
|
+
|
|
10
|
+
// Delta count threshold matching y-indexeddb's PREFERRED_TRIM_SIZE
|
|
11
|
+
const DEFAULT_DELTA_COUNT_THRESHOLD = 500;
|
|
12
|
+
|
|
13
|
+
async function getNextSeq(ctx: any, collection: string): Promise<number> {
|
|
14
|
+
const latest = await ctx.db
|
|
15
|
+
.query("documents")
|
|
16
|
+
.withIndex("by_seq", (q: any) => q.eq("collection", collection))
|
|
17
|
+
.order("desc")
|
|
18
|
+
.first();
|
|
19
|
+
return (latest?.seq ?? 0) + 1;
|
|
20
|
+
}
|
|
21
|
+
|
|
22
|
+
export const insertDocument = mutation({
|
|
23
|
+
args: {
|
|
24
|
+
collection: v.string(),
|
|
25
|
+
document: v.string(),
|
|
26
|
+
bytes: v.bytes(),
|
|
27
|
+
},
|
|
28
|
+
returns: v.object({
|
|
29
|
+
success: v.boolean(),
|
|
30
|
+
seq: v.number(),
|
|
31
|
+
}),
|
|
32
|
+
handler: async (ctx, args) => {
|
|
33
|
+
const seq = await getNextSeq(ctx, args.collection);
|
|
34
|
+
|
|
35
|
+
await ctx.db.insert("documents", {
|
|
36
|
+
collection: args.collection,
|
|
37
|
+
document: args.document,
|
|
38
|
+
bytes: args.bytes,
|
|
39
|
+
seq,
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
return { success: true, seq };
|
|
43
|
+
},
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
export const updateDocument = mutation({
|
|
47
|
+
args: {
|
|
48
|
+
collection: v.string(),
|
|
49
|
+
document: v.string(),
|
|
50
|
+
bytes: v.bytes(),
|
|
51
|
+
},
|
|
52
|
+
returns: v.object({
|
|
53
|
+
success: v.boolean(),
|
|
54
|
+
seq: v.number(),
|
|
55
|
+
}),
|
|
56
|
+
handler: async (ctx, args) => {
|
|
57
|
+
const seq = await getNextSeq(ctx, args.collection);
|
|
58
|
+
|
|
59
|
+
await ctx.db.insert("documents", {
|
|
60
|
+
collection: args.collection,
|
|
61
|
+
document: args.document,
|
|
62
|
+
bytes: args.bytes,
|
|
63
|
+
seq,
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
return { success: true, seq };
|
|
67
|
+
},
|
|
68
|
+
});
|
|
69
|
+
|
|
70
|
+
export const deleteDocument = mutation({
|
|
71
|
+
args: {
|
|
72
|
+
collection: v.string(),
|
|
73
|
+
document: v.string(),
|
|
74
|
+
bytes: v.bytes(),
|
|
75
|
+
},
|
|
76
|
+
returns: v.object({
|
|
77
|
+
success: v.boolean(),
|
|
78
|
+
seq: v.number(),
|
|
79
|
+
}),
|
|
80
|
+
handler: async (ctx, args) => {
|
|
81
|
+
const seq = await getNextSeq(ctx, args.collection);
|
|
82
|
+
|
|
83
|
+
await ctx.db.insert("documents", {
|
|
84
|
+
collection: args.collection,
|
|
85
|
+
document: args.document,
|
|
86
|
+
bytes: args.bytes,
|
|
87
|
+
seq,
|
|
88
|
+
});
|
|
89
|
+
|
|
90
|
+
return { success: true, seq };
|
|
91
|
+
},
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
const DEFAULT_HEARTBEAT_INTERVAL = 10000;
|
|
95
|
+
|
|
96
|
+
export const mark = mutation({
|
|
97
|
+
args: {
|
|
98
|
+
collection: v.string(),
|
|
99
|
+
document: v.string(),
|
|
100
|
+
client: v.string(),
|
|
101
|
+
vector: v.optional(v.bytes()),
|
|
102
|
+
seq: v.optional(v.number()),
|
|
103
|
+
user: v.optional(v.string()),
|
|
104
|
+
profile: v.optional(v.object({
|
|
105
|
+
name: v.optional(v.string()),
|
|
106
|
+
color: v.optional(v.string()),
|
|
107
|
+
avatar: v.optional(v.string()),
|
|
108
|
+
})),
|
|
109
|
+
cursor: v.optional(v.object({
|
|
110
|
+
anchor: v.any(),
|
|
111
|
+
head: v.any(),
|
|
112
|
+
field: v.optional(v.string()),
|
|
113
|
+
})),
|
|
114
|
+
interval: v.optional(v.number()),
|
|
115
|
+
},
|
|
116
|
+
returns: v.null(),
|
|
117
|
+
handler: async (ctx, args) => {
|
|
118
|
+
const now = Date.now();
|
|
119
|
+
const interval = args.interval ?? DEFAULT_HEARTBEAT_INTERVAL;
|
|
120
|
+
|
|
121
|
+
const existing = await ctx.db
|
|
122
|
+
.query("sessions")
|
|
123
|
+
.withIndex("by_client", (q: any) =>
|
|
124
|
+
q.eq("collection", args.collection)
|
|
125
|
+
.eq("document", args.document)
|
|
126
|
+
.eq("client", args.client),
|
|
127
|
+
)
|
|
128
|
+
.first();
|
|
129
|
+
|
|
130
|
+
if (existing?.timeout) {
|
|
131
|
+
await ctx.scheduler.cancel(existing.timeout);
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
const timeout = await ctx.scheduler.runAfter(
|
|
135
|
+
interval * 2.5,
|
|
136
|
+
api.mutations.disconnect,
|
|
137
|
+
{
|
|
138
|
+
collection: args.collection,
|
|
139
|
+
document: args.document,
|
|
140
|
+
client: args.client,
|
|
141
|
+
},
|
|
142
|
+
);
|
|
143
|
+
|
|
144
|
+
const updates: Record<string, unknown> = {
|
|
145
|
+
seen: now,
|
|
146
|
+
timeout,
|
|
147
|
+
connected: true,
|
|
148
|
+
};
|
|
149
|
+
|
|
150
|
+
if (args.vector !== undefined) updates.vector = args.vector;
|
|
151
|
+
if (args.seq !== undefined) {
|
|
152
|
+
updates.seq = existing ? Math.max(existing.seq, args.seq) : args.seq;
|
|
153
|
+
}
|
|
154
|
+
if (args.user !== undefined) updates.user = args.user;
|
|
155
|
+
if (args.profile !== undefined) updates.profile = args.profile;
|
|
156
|
+
if (args.cursor !== undefined) {
|
|
157
|
+
updates.cursor = args.cursor;
|
|
158
|
+
}
|
|
159
|
+
|
|
160
|
+
if (existing) {
|
|
161
|
+
await ctx.db.patch(existing._id, updates);
|
|
162
|
+
}
|
|
163
|
+
else {
|
|
164
|
+
await ctx.db.insert("sessions", {
|
|
165
|
+
collection: args.collection,
|
|
166
|
+
document: args.document,
|
|
167
|
+
client: args.client,
|
|
168
|
+
vector: args.vector,
|
|
169
|
+
connected: true,
|
|
170
|
+
seq: args.seq ?? 0,
|
|
171
|
+
seen: now,
|
|
172
|
+
user: args.user,
|
|
173
|
+
profile: args.profile,
|
|
174
|
+
cursor: args.cursor,
|
|
175
|
+
timeout,
|
|
176
|
+
});
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
return null;
|
|
180
|
+
},
|
|
181
|
+
});
|
|
182
|
+
|
|
183
|
+
export const compact = mutation({
|
|
184
|
+
args: {
|
|
185
|
+
collection: v.string(),
|
|
186
|
+
document: v.string(),
|
|
187
|
+
},
|
|
188
|
+
returns: v.object({
|
|
189
|
+
success: v.boolean(),
|
|
190
|
+
removed: v.number(),
|
|
191
|
+
retained: v.number(),
|
|
192
|
+
size: v.number(),
|
|
193
|
+
}),
|
|
194
|
+
handler: async (ctx, args) => {
|
|
195
|
+
const logger = getLogger(["compaction"]);
|
|
196
|
+
const now = Date.now();
|
|
197
|
+
|
|
198
|
+
const deltas = await ctx.db
|
|
199
|
+
.query("documents")
|
|
200
|
+
.withIndex("by_document", (q: any) =>
|
|
201
|
+
q.eq("collection", args.collection).eq("document", args.document),
|
|
202
|
+
)
|
|
203
|
+
.collect();
|
|
204
|
+
|
|
205
|
+
if (deltas.length === 0) {
|
|
206
|
+
return { success: true, removed: 0, retained: 0, size: 0 };
|
|
207
|
+
}
|
|
208
|
+
|
|
209
|
+
const existing = await ctx.db
|
|
210
|
+
.query("snapshots")
|
|
211
|
+
.withIndex("by_document", (q: any) =>
|
|
212
|
+
q.eq("collection", args.collection).eq("document", args.document),
|
|
213
|
+
)
|
|
214
|
+
.first();
|
|
215
|
+
|
|
216
|
+
const updates: Uint8Array[] = [];
|
|
217
|
+
if (existing) {
|
|
218
|
+
updates.push(new Uint8Array(existing.bytes));
|
|
219
|
+
}
|
|
220
|
+
updates.push(...deltas.map((d: any) => new Uint8Array(d.bytes)));
|
|
221
|
+
|
|
222
|
+
const merged = Y.mergeUpdatesV2(updates);
|
|
223
|
+
const vector = Y.encodeStateVectorFromUpdateV2(merged);
|
|
224
|
+
|
|
225
|
+
const sessions = await ctx.db
|
|
226
|
+
.query("sessions")
|
|
227
|
+
.withIndex("by_document", (q: any) =>
|
|
228
|
+
q.eq("collection", args.collection)
|
|
229
|
+
.eq("document", args.document),
|
|
230
|
+
)
|
|
231
|
+
.filter((q: any) => q.eq(q.field("connected"), true))
|
|
232
|
+
.collect();
|
|
233
|
+
|
|
234
|
+
let canDeleteAll = true;
|
|
235
|
+
for (const session of sessions) {
|
|
236
|
+
if (!session.vector) {
|
|
237
|
+
canDeleteAll = false;
|
|
238
|
+
logger.warn("Session without vector, skipping full compaction", {
|
|
239
|
+
client: session.client,
|
|
240
|
+
});
|
|
241
|
+
break;
|
|
242
|
+
}
|
|
243
|
+
|
|
244
|
+
const sessionVector = new Uint8Array(session.vector);
|
|
245
|
+
const missing = Y.diffUpdateV2(merged, sessionVector);
|
|
246
|
+
|
|
247
|
+
if (missing.byteLength > 2) {
|
|
248
|
+
canDeleteAll = false;
|
|
249
|
+
logger.debug("Session still needs data", {
|
|
250
|
+
client: session.client,
|
|
251
|
+
missingSize: missing.byteLength,
|
|
252
|
+
});
|
|
253
|
+
break;
|
|
254
|
+
}
|
|
255
|
+
}
|
|
256
|
+
|
|
257
|
+
const seq = Math.max(...deltas.map((d: any) => d.seq));
|
|
258
|
+
|
|
259
|
+
if (existing) {
|
|
260
|
+
await ctx.db.patch(existing._id, {
|
|
261
|
+
bytes: merged.buffer as ArrayBuffer,
|
|
262
|
+
vector: vector.buffer as ArrayBuffer,
|
|
263
|
+
seq,
|
|
264
|
+
created: now,
|
|
265
|
+
});
|
|
266
|
+
}
|
|
267
|
+
else {
|
|
268
|
+
await ctx.db.insert("snapshots", {
|
|
269
|
+
collection: args.collection,
|
|
270
|
+
document: args.document,
|
|
271
|
+
bytes: merged.buffer as ArrayBuffer,
|
|
272
|
+
vector: vector.buffer as ArrayBuffer,
|
|
273
|
+
seq,
|
|
274
|
+
created: now,
|
|
275
|
+
});
|
|
276
|
+
}
|
|
277
|
+
|
|
278
|
+
let removed = 0;
|
|
279
|
+
if (canDeleteAll) {
|
|
280
|
+
for (const delta of deltas) {
|
|
281
|
+
await ctx.db.delete(delta._id);
|
|
282
|
+
removed++;
|
|
283
|
+
}
|
|
284
|
+
logger.info("Full compaction completed", {
|
|
285
|
+
document: args.document,
|
|
286
|
+
removed,
|
|
287
|
+
size: merged.byteLength,
|
|
288
|
+
});
|
|
289
|
+
}
|
|
290
|
+
else {
|
|
291
|
+
logger.info("Snapshot created, deltas retained (clients still syncing)", {
|
|
292
|
+
document: args.document,
|
|
293
|
+
deltaCount: deltas.length,
|
|
294
|
+
activeCount: sessions.length,
|
|
295
|
+
});
|
|
296
|
+
}
|
|
297
|
+
|
|
298
|
+
const disconnected = await ctx.db
|
|
299
|
+
.query("sessions")
|
|
300
|
+
.withIndex("by_document", (q: any) =>
|
|
301
|
+
q.eq("collection", args.collection)
|
|
302
|
+
.eq("document", args.document),
|
|
303
|
+
)
|
|
304
|
+
.filter((q: any) => q.eq(q.field("connected"), false))
|
|
305
|
+
.collect();
|
|
306
|
+
|
|
307
|
+
let cleaned = 0;
|
|
308
|
+
for (const session of disconnected) {
|
|
309
|
+
if (!session.vector) {
|
|
310
|
+
await ctx.db.delete(session._id);
|
|
311
|
+
cleaned++;
|
|
312
|
+
continue;
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
const sessionVector = new Uint8Array(session.vector);
|
|
316
|
+
const missing = Y.diffUpdateV2(merged, sessionVector);
|
|
317
|
+
|
|
318
|
+
if (missing.byteLength <= 2) {
|
|
319
|
+
await ctx.db.delete(session._id);
|
|
320
|
+
cleaned++;
|
|
321
|
+
}
|
|
322
|
+
}
|
|
323
|
+
|
|
324
|
+
if (cleaned > 0) {
|
|
325
|
+
logger.info("Cleaned up disconnected sessions", {
|
|
326
|
+
document: args.document,
|
|
327
|
+
cleaned,
|
|
328
|
+
});
|
|
329
|
+
}
|
|
330
|
+
|
|
331
|
+
return {
|
|
332
|
+
success: true,
|
|
333
|
+
removed,
|
|
334
|
+
retained: deltas.length - removed,
|
|
335
|
+
size: merged.byteLength,
|
|
336
|
+
};
|
|
337
|
+
},
|
|
338
|
+
});
|
|
339
|
+
|
|
340
|
+
export const stream = query({
|
|
341
|
+
args: {
|
|
342
|
+
collection: v.string(),
|
|
343
|
+
seq: v.number(),
|
|
344
|
+
limit: v.optional(v.number()),
|
|
345
|
+
threshold: v.optional(v.number()),
|
|
346
|
+
},
|
|
347
|
+
returns: v.object({
|
|
348
|
+
changes: v.array(
|
|
349
|
+
v.object({
|
|
350
|
+
document: v.string(),
|
|
351
|
+
bytes: v.bytes(),
|
|
352
|
+
seq: v.number(),
|
|
353
|
+
type: v.string(),
|
|
354
|
+
}),
|
|
355
|
+
),
|
|
356
|
+
seq: v.number(),
|
|
357
|
+
more: v.boolean(),
|
|
358
|
+
compact: v.optional(v.object({
|
|
359
|
+
documents: v.array(v.string()),
|
|
360
|
+
})),
|
|
361
|
+
}),
|
|
362
|
+
handler: async (ctx, args) => {
|
|
363
|
+
const limit = args.limit ?? 100;
|
|
364
|
+
const threshold = args.threshold ?? DEFAULT_DELTA_COUNT_THRESHOLD;
|
|
365
|
+
|
|
366
|
+
const documents = await ctx.db
|
|
367
|
+
.query("documents")
|
|
368
|
+
.withIndex("by_seq", (q: any) =>
|
|
369
|
+
q.eq("collection", args.collection).gt("seq", args.seq),
|
|
370
|
+
)
|
|
371
|
+
.order("asc")
|
|
372
|
+
.take(limit);
|
|
373
|
+
|
|
374
|
+
if (documents.length > 0) {
|
|
375
|
+
const changes = documents.map((doc: any) => ({
|
|
376
|
+
document: doc.document,
|
|
377
|
+
bytes: doc.bytes,
|
|
378
|
+
seq: doc.seq,
|
|
379
|
+
type: OperationType.Delta,
|
|
380
|
+
}));
|
|
381
|
+
|
|
382
|
+
const newSeq = documents[documents.length - 1]?.seq ?? args.seq;
|
|
383
|
+
|
|
384
|
+
const allDocs = await ctx.db
|
|
385
|
+
.query("documents")
|
|
386
|
+
.withIndex("by_collection", (q: any) => q.eq("collection", args.collection))
|
|
387
|
+
.collect();
|
|
388
|
+
|
|
389
|
+
const countByDoc = new Map<string, number>();
|
|
390
|
+
for (const doc of allDocs) {
|
|
391
|
+
const current = countByDoc.get(doc.document) ?? 0;
|
|
392
|
+
countByDoc.set(doc.document, current + 1);
|
|
393
|
+
}
|
|
394
|
+
|
|
395
|
+
const documentsNeedingCompaction: string[] = [];
|
|
396
|
+
for (const [docId, count] of countByDoc) {
|
|
397
|
+
if (count >= threshold) {
|
|
398
|
+
documentsNeedingCompaction.push(docId);
|
|
399
|
+
}
|
|
400
|
+
}
|
|
401
|
+
|
|
402
|
+
return {
|
|
403
|
+
changes,
|
|
404
|
+
seq: newSeq,
|
|
405
|
+
more: documents.length === limit,
|
|
406
|
+
compact: documentsNeedingCompaction.length > 0
|
|
407
|
+
? { documents: documentsNeedingCompaction }
|
|
408
|
+
: undefined,
|
|
409
|
+
};
|
|
410
|
+
}
|
|
411
|
+
|
|
412
|
+
const oldest = await ctx.db
|
|
413
|
+
.query("documents")
|
|
414
|
+
.withIndex("by_seq", (q: any) => q.eq("collection", args.collection))
|
|
415
|
+
.order("asc")
|
|
416
|
+
.first();
|
|
417
|
+
|
|
418
|
+
if (oldest && args.seq < oldest.seq) {
|
|
419
|
+
const snapshots = await ctx.db
|
|
420
|
+
.query("snapshots")
|
|
421
|
+
.withIndex("by_document", (q: any) => q.eq("collection", args.collection))
|
|
422
|
+
.collect();
|
|
423
|
+
|
|
424
|
+
if (snapshots.length === 0) {
|
|
425
|
+
throw new Error(
|
|
426
|
+
`Disparity detected but no snapshots available for collection: ${args.collection}. `
|
|
427
|
+
+ `Client seq: ${args.seq}, Oldest delta seq: ${oldest.seq}`,
|
|
428
|
+
);
|
|
429
|
+
}
|
|
430
|
+
|
|
431
|
+
const changes = snapshots.map((s: any) => ({
|
|
432
|
+
document: s.document,
|
|
433
|
+
bytes: s.bytes,
|
|
434
|
+
seq: s.seq,
|
|
435
|
+
type: OperationType.Snapshot,
|
|
436
|
+
}));
|
|
437
|
+
|
|
438
|
+
const latestSeq = Math.max(...snapshots.map((s: any) => s.seq));
|
|
439
|
+
|
|
440
|
+
return {
|
|
441
|
+
changes,
|
|
442
|
+
seq: latestSeq,
|
|
443
|
+
more: false,
|
|
444
|
+
compact: undefined,
|
|
445
|
+
};
|
|
446
|
+
}
|
|
447
|
+
|
|
448
|
+
return {
|
|
449
|
+
changes: [],
|
|
450
|
+
seq: args.seq,
|
|
451
|
+
more: false,
|
|
452
|
+
compact: undefined,
|
|
453
|
+
};
|
|
454
|
+
},
|
|
455
|
+
});
|
|
456
|
+
|
|
457
|
+
export const recovery = query({
|
|
458
|
+
args: {
|
|
459
|
+
collection: v.string(),
|
|
460
|
+
document: v.string(),
|
|
461
|
+
vector: v.bytes(),
|
|
462
|
+
},
|
|
463
|
+
returns: v.object({
|
|
464
|
+
diff: v.optional(v.bytes()),
|
|
465
|
+
vector: v.bytes(),
|
|
466
|
+
}),
|
|
467
|
+
handler: async (ctx, args) => {
|
|
468
|
+
const logger = getLogger(["recovery"]);
|
|
469
|
+
|
|
470
|
+
const snapshot = await ctx.db
|
|
471
|
+
.query("snapshots")
|
|
472
|
+
.withIndex("by_document", (q: any) =>
|
|
473
|
+
q.eq("collection", args.collection).eq("document", args.document),
|
|
474
|
+
)
|
|
475
|
+
.first();
|
|
476
|
+
|
|
477
|
+
const deltas = await ctx.db
|
|
478
|
+
.query("documents")
|
|
479
|
+
.withIndex("by_document", (q: any) =>
|
|
480
|
+
q.eq("collection", args.collection).eq("document", args.document),
|
|
481
|
+
)
|
|
482
|
+
.collect();
|
|
483
|
+
|
|
484
|
+
if (!snapshot && deltas.length === 0) {
|
|
485
|
+
const emptyDoc = new Y.Doc();
|
|
486
|
+
const emptyVector = Y.encodeStateVector(emptyDoc);
|
|
487
|
+
emptyDoc.destroy();
|
|
488
|
+
return {
|
|
489
|
+
vector: emptyVector.buffer as ArrayBuffer,
|
|
490
|
+
};
|
|
491
|
+
}
|
|
492
|
+
|
|
493
|
+
const updates: Uint8Array[] = [];
|
|
494
|
+
|
|
495
|
+
if (snapshot) {
|
|
496
|
+
updates.push(new Uint8Array(snapshot.bytes));
|
|
497
|
+
}
|
|
498
|
+
|
|
499
|
+
for (const delta of deltas) {
|
|
500
|
+
updates.push(new Uint8Array(delta.bytes));
|
|
501
|
+
}
|
|
502
|
+
|
|
503
|
+
const merged = Y.mergeUpdatesV2(updates);
|
|
504
|
+
const clientVector = new Uint8Array(args.vector);
|
|
505
|
+
const diff = Y.diffUpdateV2(merged, clientVector);
|
|
506
|
+
const serverVector = Y.encodeStateVectorFromUpdateV2(merged);
|
|
507
|
+
|
|
508
|
+
logger.debug("Recovery sync computed", {
|
|
509
|
+
collection: args.collection,
|
|
510
|
+
document: args.document,
|
|
511
|
+
hasSnapshot: !!snapshot,
|
|
512
|
+
deltaCount: deltas.length,
|
|
513
|
+
diffSize: diff.byteLength,
|
|
514
|
+
hasDiff: diff.byteLength > 0,
|
|
515
|
+
});
|
|
516
|
+
|
|
517
|
+
return {
|
|
518
|
+
diff: diff.byteLength > 0 ? (diff.buffer as ArrayBuffer) : undefined,
|
|
519
|
+
vector: serverVector.buffer as ArrayBuffer,
|
|
520
|
+
};
|
|
521
|
+
},
|
|
522
|
+
});
|
|
523
|
+
|
|
524
|
+
export const getDocumentState = query({
|
|
525
|
+
args: {
|
|
526
|
+
collection: v.string(),
|
|
527
|
+
document: v.string(),
|
|
528
|
+
},
|
|
529
|
+
returns: v.union(
|
|
530
|
+
v.object({
|
|
531
|
+
bytes: v.bytes(),
|
|
532
|
+
seq: v.number(),
|
|
533
|
+
}),
|
|
534
|
+
v.null(),
|
|
535
|
+
),
|
|
536
|
+
handler: async (ctx, args) => {
|
|
537
|
+
const snapshot = await ctx.db
|
|
538
|
+
.query("snapshots")
|
|
539
|
+
.withIndex("by_document", (q: any) =>
|
|
540
|
+
q.eq("collection", args.collection).eq("document", args.document),
|
|
541
|
+
)
|
|
542
|
+
.first();
|
|
543
|
+
|
|
544
|
+
const deltas = await ctx.db
|
|
545
|
+
.query("documents")
|
|
546
|
+
.withIndex("by_document", (q: any) =>
|
|
547
|
+
q.eq("collection", args.collection).eq("document", args.document),
|
|
548
|
+
)
|
|
549
|
+
.collect();
|
|
550
|
+
|
|
551
|
+
if (!snapshot && deltas.length === 0) {
|
|
552
|
+
return null;
|
|
553
|
+
}
|
|
554
|
+
|
|
555
|
+
const updates: Uint8Array[] = [];
|
|
556
|
+
let latestSeq = 0;
|
|
557
|
+
|
|
558
|
+
if (snapshot) {
|
|
559
|
+
updates.push(new Uint8Array(snapshot.bytes));
|
|
560
|
+
latestSeq = Math.max(latestSeq, snapshot.seq);
|
|
561
|
+
}
|
|
562
|
+
|
|
563
|
+
for (const delta of deltas) {
|
|
564
|
+
updates.push(new Uint8Array(delta.bytes));
|
|
565
|
+
latestSeq = Math.max(latestSeq, delta.seq);
|
|
566
|
+
}
|
|
567
|
+
|
|
568
|
+
const merged = Y.mergeUpdatesV2(updates);
|
|
569
|
+
|
|
570
|
+
return {
|
|
571
|
+
bytes: merged.buffer as ArrayBuffer,
|
|
572
|
+
seq: latestSeq,
|
|
573
|
+
};
|
|
574
|
+
},
|
|
575
|
+
});
|
|
576
|
+
|
|
577
|
+
export const sessions = query({
|
|
578
|
+
args: {
|
|
579
|
+
collection: v.string(),
|
|
580
|
+
document: v.string(),
|
|
581
|
+
connected: v.optional(v.boolean()),
|
|
582
|
+
exclude: v.optional(v.string()),
|
|
583
|
+
group: v.optional(v.boolean()),
|
|
584
|
+
},
|
|
585
|
+
returns: v.array(v.object({
|
|
586
|
+
client: v.string(),
|
|
587
|
+
document: v.string(),
|
|
588
|
+
user: v.optional(v.string()),
|
|
589
|
+
profile: v.optional(v.any()),
|
|
590
|
+
cursor: v.optional(v.object({
|
|
591
|
+
anchor: v.any(),
|
|
592
|
+
head: v.any(),
|
|
593
|
+
field: v.optional(v.string()),
|
|
594
|
+
})),
|
|
595
|
+
seen: v.number(),
|
|
596
|
+
})),
|
|
597
|
+
handler: async (ctx, args) => {
|
|
598
|
+
let query = ctx.db
|
|
599
|
+
.query("sessions")
|
|
600
|
+
.withIndex("by_document", (q: any) =>
|
|
601
|
+
q.eq("collection", args.collection)
|
|
602
|
+
.eq("document", args.document),
|
|
603
|
+
);
|
|
604
|
+
|
|
605
|
+
if (args.connected !== undefined) {
|
|
606
|
+
query = query.filter((q: any) => q.eq(q.field("connected"), args.connected));
|
|
607
|
+
}
|
|
608
|
+
|
|
609
|
+
const records = await query.collect();
|
|
610
|
+
|
|
611
|
+
let results = records
|
|
612
|
+
.filter((p: any) => !args.exclude || p.client !== args.exclude)
|
|
613
|
+
.map((p: any) => ({
|
|
614
|
+
client: p.client,
|
|
615
|
+
document: p.document,
|
|
616
|
+
user: p.user,
|
|
617
|
+
profile: p.profile,
|
|
618
|
+
cursor: p.cursor,
|
|
619
|
+
seen: p.seen,
|
|
620
|
+
}));
|
|
621
|
+
|
|
622
|
+
if (args.group) {
|
|
623
|
+
const byUser = new Map<string, typeof results[0]>();
|
|
624
|
+
for (const p of results) {
|
|
625
|
+
const key = p.user ?? p.client;
|
|
626
|
+
const existing = byUser.get(key);
|
|
627
|
+
if (!existing || p.seen > existing.seen) {
|
|
628
|
+
byUser.set(key, p);
|
|
629
|
+
}
|
|
630
|
+
}
|
|
631
|
+
results = Array.from(byUser.values());
|
|
632
|
+
}
|
|
633
|
+
|
|
634
|
+
return results;
|
|
635
|
+
},
|
|
636
|
+
});
|
|
637
|
+
|
|
638
|
+
export const cursors = query({
|
|
639
|
+
args: {
|
|
640
|
+
collection: v.string(),
|
|
641
|
+
document: v.string(),
|
|
642
|
+
exclude: v.optional(v.string()),
|
|
643
|
+
},
|
|
644
|
+
returns: v.array(v.object({
|
|
645
|
+
client: v.string(),
|
|
646
|
+
user: v.optional(v.string()),
|
|
647
|
+
profile: v.optional(v.any()),
|
|
648
|
+
cursor: v.object({
|
|
649
|
+
anchor: v.any(),
|
|
650
|
+
head: v.any(),
|
|
651
|
+
field: v.optional(v.string()),
|
|
652
|
+
}),
|
|
653
|
+
})),
|
|
654
|
+
handler: async (ctx, args) => {
|
|
655
|
+
const records = await ctx.db
|
|
656
|
+
.query("sessions")
|
|
657
|
+
.withIndex("by_document", (q: any) =>
|
|
658
|
+
q.eq("collection", args.collection)
|
|
659
|
+
.eq("document", args.document),
|
|
660
|
+
)
|
|
661
|
+
.collect();
|
|
662
|
+
|
|
663
|
+
return records
|
|
664
|
+
.filter((p: any) => p.client !== args.exclude)
|
|
665
|
+
.filter((p: any) => p.cursor)
|
|
666
|
+
.map((p: any) => ({
|
|
667
|
+
client: p.client,
|
|
668
|
+
user: p.user,
|
|
669
|
+
profile: p.profile,
|
|
670
|
+
cursor: p.cursor,
|
|
671
|
+
}));
|
|
672
|
+
},
|
|
673
|
+
});
|
|
674
|
+
|
|
675
|
+
export const leave = mutation({
|
|
676
|
+
args: {
|
|
677
|
+
collection: v.string(),
|
|
678
|
+
document: v.string(),
|
|
679
|
+
client: v.string(),
|
|
680
|
+
},
|
|
681
|
+
returns: v.null(),
|
|
682
|
+
handler: async (ctx, args) => {
|
|
683
|
+
const existing = await ctx.db
|
|
684
|
+
.query("sessions")
|
|
685
|
+
.withIndex("by_client", (q: any) =>
|
|
686
|
+
q.eq("collection", args.collection)
|
|
687
|
+
.eq("document", args.document)
|
|
688
|
+
.eq("client", args.client),
|
|
689
|
+
)
|
|
690
|
+
.first();
|
|
691
|
+
|
|
692
|
+
if (existing) {
|
|
693
|
+
if (existing.timeout) {
|
|
694
|
+
await ctx.scheduler.cancel(existing.timeout);
|
|
695
|
+
}
|
|
696
|
+
await ctx.db.patch(existing._id, {
|
|
697
|
+
connected: false,
|
|
698
|
+
cursor: undefined,
|
|
699
|
+
timeout: undefined,
|
|
700
|
+
});
|
|
701
|
+
}
|
|
702
|
+
|
|
703
|
+
return null;
|
|
704
|
+
},
|
|
705
|
+
});
|
|
706
|
+
|
|
707
|
+
export const disconnect = mutation({
|
|
708
|
+
args: {
|
|
709
|
+
collection: v.string(),
|
|
710
|
+
document: v.string(),
|
|
711
|
+
client: v.string(),
|
|
712
|
+
},
|
|
713
|
+
returns: v.null(),
|
|
714
|
+
handler: async (ctx, args) => {
|
|
715
|
+
const existing = await ctx.db
|
|
716
|
+
.query("sessions")
|
|
717
|
+
.withIndex("by_client", (q: any) =>
|
|
718
|
+
q.eq("collection", args.collection)
|
|
719
|
+
.eq("document", args.document)
|
|
720
|
+
.eq("client", args.client),
|
|
721
|
+
)
|
|
722
|
+
.first();
|
|
723
|
+
|
|
724
|
+
if (existing) {
|
|
725
|
+
await ctx.db.patch(existing._id, {
|
|
726
|
+
connected: false,
|
|
727
|
+
cursor: undefined,
|
|
728
|
+
timeout: undefined,
|
|
729
|
+
});
|
|
730
|
+
}
|
|
731
|
+
|
|
732
|
+
return null;
|
|
733
|
+
},
|
|
734
|
+
});
|