@convex-dev/rag 0.1.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. package/LICENSE +201 -0
  2. package/README.md +371 -0
  3. package/dist/client/_generated/_ignore.d.ts +1 -0
  4. package/dist/client/_generated/_ignore.d.ts.map +1 -0
  5. package/dist/client/_generated/_ignore.js +3 -0
  6. package/dist/client/_generated/_ignore.js.map +1 -0
  7. package/dist/client/defaultChunker.d.ts +15 -0
  8. package/dist/client/defaultChunker.d.ts.map +1 -0
  9. package/dist/client/defaultChunker.js +148 -0
  10. package/dist/client/defaultChunker.js.map +1 -0
  11. package/dist/client/fileUtils.d.ts +24 -0
  12. package/dist/client/fileUtils.d.ts.map +1 -0
  13. package/dist/client/fileUtils.js +179 -0
  14. package/dist/client/fileUtils.js.map +1 -0
  15. package/dist/client/index.d.ts +442 -0
  16. package/dist/client/index.d.ts.map +1 -0
  17. package/dist/client/index.js +597 -0
  18. package/dist/client/index.js.map +1 -0
  19. package/dist/client/types.d.ts +29 -0
  20. package/dist/client/types.d.ts.map +1 -0
  21. package/dist/client/types.js +2 -0
  22. package/dist/client/types.js.map +1 -0
  23. package/dist/component/_generated/api.d.ts +439 -0
  24. package/dist/component/_generated/api.d.ts.map +1 -0
  25. package/dist/component/_generated/api.js +22 -0
  26. package/dist/component/_generated/api.js.map +1 -0
  27. package/dist/component/_generated/dataModel.d.ts +60 -0
  28. package/dist/component/_generated/server.d.ts +149 -0
  29. package/dist/component/_generated/server.d.ts.map +1 -0
  30. package/dist/component/_generated/server.js +74 -0
  31. package/dist/component/_generated/server.js.map +1 -0
  32. package/dist/component/chunks.d.ts +139 -0
  33. package/dist/component/chunks.d.ts.map +1 -0
  34. package/dist/component/chunks.js +413 -0
  35. package/dist/component/chunks.js.map +1 -0
  36. package/dist/component/convex.config.d.ts +3 -0
  37. package/dist/component/convex.config.d.ts.map +1 -0
  38. package/dist/component/convex.config.js +6 -0
  39. package/dist/component/convex.config.js.map +1 -0
  40. package/dist/component/embeddings/importance.d.ts +21 -0
  41. package/dist/component/embeddings/importance.d.ts.map +1 -0
  42. package/dist/component/embeddings/importance.js +67 -0
  43. package/dist/component/embeddings/importance.js.map +1 -0
  44. package/dist/component/embeddings/index.d.ts +23 -0
  45. package/dist/component/embeddings/index.d.ts.map +1 -0
  46. package/dist/component/embeddings/index.js +54 -0
  47. package/dist/component/embeddings/index.js.map +1 -0
  48. package/dist/component/embeddings/tables.d.ts +39 -0
  49. package/dist/component/embeddings/tables.d.ts.map +1 -0
  50. package/dist/component/embeddings/tables.js +53 -0
  51. package/dist/component/embeddings/tables.js.map +1 -0
  52. package/dist/component/entries.d.ts +167 -0
  53. package/dist/component/entries.d.ts.map +1 -0
  54. package/dist/component/entries.js +409 -0
  55. package/dist/component/entries.js.map +1 -0
  56. package/dist/component/filters.d.ts +46 -0
  57. package/dist/component/filters.d.ts.map +1 -0
  58. package/dist/component/filters.js +72 -0
  59. package/dist/component/filters.js.map +1 -0
  60. package/dist/component/namespaces.d.ts +131 -0
  61. package/dist/component/namespaces.d.ts.map +1 -0
  62. package/dist/component/namespaces.js +222 -0
  63. package/dist/component/namespaces.js.map +1 -0
  64. package/dist/component/schema.d.ts +1697 -0
  65. package/dist/component/schema.d.ts.map +1 -0
  66. package/dist/component/schema.js +88 -0
  67. package/dist/component/schema.js.map +1 -0
  68. package/dist/component/search.d.ts +20 -0
  69. package/dist/component/search.d.ts.map +1 -0
  70. package/dist/component/search.js +69 -0
  71. package/dist/component/search.js.map +1 -0
  72. package/dist/package.json +3 -0
  73. package/dist/react/index.d.ts +2 -0
  74. package/dist/react/index.d.ts.map +1 -0
  75. package/dist/react/index.js +6 -0
  76. package/dist/react/index.js.map +1 -0
  77. package/dist/shared.d.ts +479 -0
  78. package/dist/shared.d.ts.map +1 -0
  79. package/dist/shared.js +98 -0
  80. package/dist/shared.js.map +1 -0
  81. package/package.json +97 -0
  82. package/src/client/_generated/_ignore.ts +1 -0
  83. package/src/client/defaultChunker.test.ts +243 -0
  84. package/src/client/defaultChunker.ts +183 -0
  85. package/src/client/fileUtils.ts +179 -0
  86. package/src/client/index.test.ts +475 -0
  87. package/src/client/index.ts +1125 -0
  88. package/src/client/setup.test.ts +28 -0
  89. package/src/client/types.ts +69 -0
  90. package/src/component/_generated/api.d.ts +439 -0
  91. package/src/component/_generated/api.js +23 -0
  92. package/src/component/_generated/dataModel.d.ts +60 -0
  93. package/src/component/_generated/server.d.ts +149 -0
  94. package/src/component/_generated/server.js +90 -0
  95. package/src/component/chunks.test.ts +915 -0
  96. package/src/component/chunks.ts +555 -0
  97. package/src/component/convex.config.ts +7 -0
  98. package/src/component/embeddings/importance.test.ts +249 -0
  99. package/src/component/embeddings/importance.ts +75 -0
  100. package/src/component/embeddings/index.test.ts +482 -0
  101. package/src/component/embeddings/index.ts +99 -0
  102. package/src/component/embeddings/tables.ts +114 -0
  103. package/src/component/entries.test.ts +341 -0
  104. package/src/component/entries.ts +546 -0
  105. package/src/component/filters.ts +119 -0
  106. package/src/component/namespaces.ts +299 -0
  107. package/src/component/schema.ts +106 -0
  108. package/src/component/search.test.ts +445 -0
  109. package/src/component/search.ts +97 -0
  110. package/src/component/setup.test.ts +5 -0
  111. package/src/react/index.ts +7 -0
  112. package/src/shared.ts +247 -0
  113. package/src/vitest.config.ts +7 -0
@@ -0,0 +1,546 @@
1
+ import { assert, omit } from "convex-helpers";
2
+ import { createFunctionHandle, paginationOptsValidator } from "convex/server";
3
+ import { v, type Value } from "convex/values";
4
+ import type { ChunkerAction, EntryFilterValues, EntryId } from "../shared.js";
5
+ import {
6
+ statuses,
7
+ vActiveStatus,
8
+ vCreateChunkArgs,
9
+ vEntry,
10
+ vPaginationResult,
11
+ vStatus,
12
+ type Entry,
13
+ } from "../shared.js";
14
+ import { api, internal } from "./_generated/api.js";
15
+ import type { Doc, Id } from "./_generated/dataModel.js";
16
+ import {
17
+ internalMutation,
18
+ mutation,
19
+ query,
20
+ type MutationCtx,
21
+ type QueryCtx,
22
+ } from "./_generated/server.js";
23
+ import { deleteChunksPage, insertChunks } from "./chunks.js";
24
+ import schema, { type StatusWithOnComplete } from "./schema.js";
25
+ import { mergedStream } from "convex-helpers/server/stream";
26
+ import { stream } from "convex-helpers/server/stream";
27
+ import {
28
+ getCompatibleNamespaceHandler,
29
+ publicNamespace,
30
+ vNamespaceLookupArgs,
31
+ } from "./namespaces.js";
32
+ import type { OnComplete } from "../shared.js";
33
+ import {
34
+ vResultValidator,
35
+ vWorkIdValidator,
36
+ Workpool,
37
+ } from "@convex-dev/workpool";
38
+ import { components } from "./_generated/api.js";
39
+
40
+ const workpool = new Workpool(components.workpool, {
41
+ retryActionsByDefault: true,
42
+ defaultRetryBehavior: {
43
+ maxAttempts: 3,
44
+ initialBackoffMs: 1000,
45
+ base: 2,
46
+ },
47
+ maxParallelism: 10,
48
+ });
49
+
50
+ export const addAsync = mutation({
51
+ args: {
52
+ entry: v.object({
53
+ ...omit(schema.tables.entries.validator.fields, ["version", "status"]),
54
+ }),
55
+ onComplete: v.optional(v.string()),
56
+ chunker: v.string(),
57
+ },
58
+ returns: v.object({
59
+ entryId: v.id("entries"),
60
+ status: vActiveStatus,
61
+ created: v.boolean(),
62
+ }),
63
+ handler: async (ctx, args) => {
64
+ const { namespaceId, key } = args.entry;
65
+ const namespace = await ctx.db.get(namespaceId);
66
+ assert(namespace, `Namespace ${namespaceId} not found`);
67
+ // iterate through the latest versions of the entry
68
+ const existing = await findExistingEntry(ctx, namespaceId, key);
69
+ if (
70
+ existing?.status.kind === "ready" &&
71
+ entryIsSame(existing, args.entry)
72
+ ) {
73
+ return {
74
+ entryId: existing._id,
75
+ status: existing.status.kind,
76
+ created: false,
77
+ };
78
+ }
79
+ const version = existing ? existing.version + 1 : 0;
80
+ const status: StatusWithOnComplete = {
81
+ kind: "pending",
82
+ onComplete: args.onComplete,
83
+ };
84
+ const entryId = await ctx.db.insert("entries", {
85
+ ...args.entry,
86
+ version,
87
+ status,
88
+ });
89
+ const chunkerAction = args.chunker as unknown as ChunkerAction;
90
+ // TODO: Cancel any existing chunker actions for this entry?
91
+ await workpool.enqueueAction(
92
+ ctx,
93
+ chunkerAction,
94
+ {
95
+ namespace: publicNamespace(namespace),
96
+ entry: publicEntry({
97
+ ...args.entry,
98
+ _id: entryId,
99
+ status: status,
100
+ }),
101
+ insertChunks: await createFunctionHandle(api.chunks.insert),
102
+ },
103
+ {
104
+ name: workpoolName(namespace.namespace, args.entry.key, entryId),
105
+ onComplete: internal.entries.addAsyncOnComplete,
106
+ context: entryId,
107
+ }
108
+ );
109
+ return { entryId, status: status.kind, created: true };
110
+ },
111
+ });
112
+
113
+ function workpoolName(
114
+ namespace: string,
115
+ key: string | undefined,
116
+ entryId: Id<"entries">
117
+ ) {
118
+ return `async-chunker-${namespace}-${key ? key + "-" + entryId : entryId}`;
119
+ }
120
+
121
+ export const addAsyncOnComplete = internalMutation({
122
+ args: {
123
+ workId: vWorkIdValidator,
124
+ context: v.id("entries"),
125
+ result: vResultValidator,
126
+ },
127
+ returns: v.null(),
128
+ handler: async (ctx, args) => {
129
+ const entryId = args.context;
130
+ const entry = await ctx.db.get(args.context);
131
+ if (!entry) {
132
+ console.error(
133
+ `Entry ${args.context} not found when trying to complete chunker for async add`
134
+ );
135
+ return;
136
+ }
137
+ if (args.result.kind === "success") {
138
+ await promoteToReadyHandler(ctx, { entryId });
139
+ } else {
140
+ // await deleteAsyncHandler(ctx, { entryId, startOrder: 0 });
141
+ const namespace = await ctx.db.get(entry.namespaceId);
142
+ assert(namespace, `Namespace ${entry.namespaceId} not found`);
143
+ if (entry.status.kind === "pending" && entry.status.onComplete) {
144
+ await runOnComplete(
145
+ ctx,
146
+ entry.status.onComplete,
147
+ namespace,
148
+ entry,
149
+ null,
150
+ args.result.kind === "canceled" ? "Canceled" : args.result.error
151
+ );
152
+ }
153
+ }
154
+ },
155
+ });
156
+
157
+ type AddEntryArgs = Pick<
158
+ Doc<"entries">,
159
+ "key" | "contentHash" | "importance" | "filterValues"
160
+ >;
161
+
162
+ async function findExistingEntry(
163
+ ctx: MutationCtx,
164
+ namespaceId: Id<"namespaces">,
165
+ key: string | undefined
166
+ ) {
167
+ if (!key) {
168
+ return null;
169
+ }
170
+ const existing = await mergedStream(
171
+ statuses.map((status) =>
172
+ stream(ctx.db, schema)
173
+ .query("entries")
174
+ .withIndex("namespaceId_status_key_version", (q) =>
175
+ q
176
+ .eq("namespaceId", namespaceId)
177
+ .eq("status.kind", status)
178
+ .eq("key", key)
179
+ )
180
+ .order("desc")
181
+ ),
182
+ ["version"]
183
+ ).first();
184
+ return existing;
185
+ }
186
+
187
+ export const add = mutation({
188
+ args: {
189
+ entry: v.object({
190
+ ...omit(schema.tables.entries.validator.fields, ["version", "status"]),
191
+ }),
192
+ onComplete: v.optional(v.string()),
193
+ // If we can commit all chunks at the same time, the status is "ready"
194
+ allChunks: v.optional(v.array(vCreateChunkArgs)),
195
+ },
196
+ returns: v.object({
197
+ entryId: v.id("entries"),
198
+ status: vStatus,
199
+ created: v.boolean(),
200
+ replacedVersion: v.union(vEntry, v.null()),
201
+ }),
202
+ handler: async (ctx, args) => {
203
+ const { namespaceId, key } = args.entry;
204
+ const namespace = await ctx.db.get(namespaceId);
205
+ assert(namespace, `Namespace ${namespaceId} not found`);
206
+ // iterate through the latest versions of the entry
207
+ const existing = await findExistingEntry(ctx, namespaceId, key);
208
+ if (
209
+ existing?.status.kind === "ready" &&
210
+ entryIsSame(existing, args.entry)
211
+ ) {
212
+ return {
213
+ entryId: existing._id,
214
+ status: existing.status.kind,
215
+ created: false,
216
+ replacedVersion: null,
217
+ };
218
+ }
219
+ const version = existing ? existing.version + 1 : 0;
220
+ const entryId = await ctx.db.insert("entries", {
221
+ ...args.entry,
222
+ version,
223
+ status: { kind: "pending", onComplete: args.onComplete },
224
+ });
225
+ if (args.allChunks) {
226
+ await insertChunks(ctx, {
227
+ entryId,
228
+ startOrder: 0,
229
+ chunks: args.allChunks,
230
+ });
231
+ const { replacedVersion } = await promoteToReadyHandler(ctx, {
232
+ entryId,
233
+ });
234
+ return {
235
+ entryId,
236
+ status: "ready" as const,
237
+ created: true,
238
+ replacedVersion,
239
+ };
240
+ }
241
+ return {
242
+ entryId,
243
+ status: "pending" as const,
244
+ created: true,
245
+ replacedVersion: null,
246
+ };
247
+ },
248
+ });
249
+
250
+ async function runOnComplete(
251
+ ctx: MutationCtx,
252
+ onComplete: string,
253
+ namespace: Doc<"namespaces">,
254
+ entry: Doc<"entries">,
255
+ replacedEntry: Doc<"entries"> | null,
256
+ error?: string
257
+ ) {
258
+ await ctx.runMutation(onComplete as unknown as OnComplete, {
259
+ namespace: publicNamespace(namespace),
260
+ entry: publicEntry(entry),
261
+ replacedEntry: replacedEntry ? publicEntry(replacedEntry) : undefined,
262
+ error,
263
+ });
264
+ }
265
+
266
+ function entryIsSame(existing: Doc<"entries">, newEntry: AddEntryArgs) {
267
+ if (!existing.contentHash || !newEntry.contentHash) {
268
+ return false;
269
+ }
270
+ if (existing.contentHash !== newEntry.contentHash) {
271
+ return false;
272
+ }
273
+ if (existing.importance !== newEntry.importance) {
274
+ return false;
275
+ }
276
+ if (newEntry.filterValues.length !== existing.filterValues.length) {
277
+ return false;
278
+ }
279
+ if (
280
+ !existing.filterValues.every((filter) =>
281
+ newEntry.filterValues.some(
282
+ (f) => f.name === filter.name && f.value === filter.value
283
+ )
284
+ )
285
+ ) {
286
+ return false;
287
+ }
288
+ return true;
289
+ }
290
+
291
+ /**
292
+ * Lists entries in order of their most recent change
293
+ */
294
+ export const list = query({
295
+ args: {
296
+ namespaceId: v.id("namespaces"),
297
+ order: v.optional(v.union(v.literal("desc"), v.literal("asc"))),
298
+ status: vStatus,
299
+ paginationOpts: paginationOptsValidator,
300
+ },
301
+ returns: vPaginationResult(vEntry),
302
+ handler: async (ctx, args) => {
303
+ const results = await stream(ctx.db, schema)
304
+ .query("entries")
305
+ .withIndex("status_namespaceId", (q) =>
306
+ q
307
+ .eq("status.kind", args.status ?? "ready")
308
+ .eq("namespaceId", args.namespaceId)
309
+ )
310
+ .order(args.order ?? "asc")
311
+ .paginate(args.paginationOpts);
312
+ return {
313
+ ...results,
314
+ page: results.page.map(publicEntry),
315
+ };
316
+ },
317
+ });
318
+
319
+ /**
320
+ * Gets a entry by its id.
321
+ */
322
+ export const get = query({
323
+ args: { entryId: v.id("entries") },
324
+ returns: v.union(vEntry, v.null()),
325
+ handler: async (ctx, args) => {
326
+ const entry = await ctx.db.get(args.entryId);
327
+ if (!entry) {
328
+ return null;
329
+ }
330
+ return publicEntry(entry);
331
+ },
332
+ });
333
+
334
+ /**
335
+ * Finds a entry by its key and content hash.
336
+ */
337
+ export const findByContentHash = query({
338
+ args: {
339
+ ...vNamespaceLookupArgs,
340
+ key: v.string(),
341
+ contentHash: v.string(),
342
+ },
343
+ returns: v.union(vEntry, v.null()),
344
+ handler: async (ctx, args) => {
345
+ const namespace = await getCompatibleNamespaceHandler(ctx, args);
346
+ if (!namespace) {
347
+ return null;
348
+ }
349
+ let attempts = 0;
350
+ for await (const entry of mergedStream(
351
+ statuses.map((status) =>
352
+ stream(ctx.db, schema)
353
+ .query("entries")
354
+ .withIndex("namespaceId_status_key_version", (q) =>
355
+ q
356
+ .eq("namespaceId", namespace._id)
357
+ .eq("status.kind", status)
358
+ .eq("key", args.key)
359
+ )
360
+ .order("desc")
361
+ ),
362
+ ["version"]
363
+ )) {
364
+ attempts++;
365
+ if (attempts > 20) {
366
+ console.debug(
367
+ `Giving up after checking ${attempts} entries for ${args.key} content hash ${args.contentHash}, returning null`
368
+ );
369
+ return null;
370
+ }
371
+ if (
372
+ entryIsSame(entry, {
373
+ key: args.key,
374
+ contentHash: args.contentHash,
375
+ filterValues: entry.filterValues,
376
+ importance: entry.importance,
377
+ })
378
+ ) {
379
+ return publicEntry(entry);
380
+ }
381
+ }
382
+ return null;
383
+ },
384
+ });
385
+
386
+ /**
387
+ * Promotes a entry to ready, replacing any existing ready entry by key.
388
+ * It will also call the associated onComplete function if it was pending.
389
+ * Note: this will not replace the chunks automatically, so you should first
390
+ * call `replaceChunksPage` on all its chunks.
391
+ * Edge case: if the entry has already been replaced, it will return the
392
+ * same entry (replacedVersion.entryId === args.entryId).
393
+ */
394
+ export const promoteToReady = mutation({
395
+ args: v.object({
396
+ entryId: v.id("entries"),
397
+ }),
398
+ returns: v.object({
399
+ replacedVersion: v.union(vEntry, v.null()),
400
+ }),
401
+ handler: promoteToReadyHandler,
402
+ });
403
+
404
+ async function promoteToReadyHandler(
405
+ ctx: MutationCtx,
406
+ args: { entryId: Id<"entries"> }
407
+ ) {
408
+ const entry = await ctx.db.get(args.entryId);
409
+ assert(entry, `Entry ${args.entryId} not found`);
410
+ const namespace = await ctx.db.get(entry.namespaceId);
411
+ assert(namespace, `Namespace for ${entry.namespaceId} not found`);
412
+ if (entry.status.kind === "ready") {
413
+ console.debug(`Entry ${args.entryId} is already ready, skipping...`);
414
+ return { replacedVersion: null };
415
+ } else if (entry.status.kind === "replaced") {
416
+ console.debug(
417
+ `Entry ${args.entryId} is already replaced, returning the current version...`
418
+ );
419
+ return { replacedVersion: publicEntry(entry) };
420
+ }
421
+ const previousEntry = await getPreviousEntry(ctx, entry);
422
+ // First mark the previous entry as replaced,
423
+ // so there are never two "ready" entries.
424
+ if (previousEntry) {
425
+ previousEntry.status = { kind: "replaced", replacedAt: Date.now() };
426
+ await ctx.db.replace(previousEntry._id, previousEntry);
427
+ }
428
+ const previousStatus = entry.status;
429
+ entry.status = { kind: "ready" };
430
+ // Only then mark the current entry as ready,
431
+ // so there are never two "ready" entries.
432
+ await ctx.db.replace(args.entryId, entry);
433
+ // Then run the onComplete function where it can observe itself as "ready".
434
+ if (previousStatus.kind === "pending" && previousStatus.onComplete) {
435
+ await runOnComplete(
436
+ ctx,
437
+ previousStatus.onComplete,
438
+ namespace,
439
+ entry,
440
+ previousEntry
441
+ );
442
+ }
443
+ // Then mark all previous pending entries as replaced,
444
+ // so they can observe the new entry and onComplete side-effects.
445
+ if (entry.key) {
446
+ const previousPendingEntries = await ctx.db
447
+ .query("entries")
448
+ .withIndex("namespaceId_status_key_version", (q) =>
449
+ q
450
+ .eq("namespaceId", entry.namespaceId)
451
+ .eq("status.kind", "pending")
452
+ .eq("key", entry.key)
453
+ .lt("version", entry.version)
454
+ )
455
+ .collect();
456
+ await Promise.all(
457
+ previousPendingEntries.map(async (entry) => {
458
+ const previousStatus = entry.status;
459
+ entry.status = { kind: "replaced", replacedAt: Date.now() };
460
+ await ctx.db.replace(entry._id, entry);
461
+ if (previousStatus.kind === "pending" && previousStatus.onComplete) {
462
+ await runOnComplete(
463
+ ctx,
464
+ previousStatus.onComplete,
465
+ namespace,
466
+ entry,
467
+ null
468
+ );
469
+ }
470
+ })
471
+ );
472
+ }
473
+ return {
474
+ replacedVersion: previousEntry ? publicEntry(previousEntry) : null,
475
+ };
476
+ }
477
+
478
+ export async function getPreviousEntry(ctx: QueryCtx, entry: Doc<"entries">) {
479
+ if (!entry.key) {
480
+ return null;
481
+ }
482
+ const previousEntry = await ctx.db
483
+ .query("entries")
484
+ .withIndex("namespaceId_status_key_version", (q) =>
485
+ q
486
+ .eq("namespaceId", entry.namespaceId)
487
+ .eq("status.kind", "ready")
488
+ .eq("key", entry.key)
489
+ )
490
+ .unique();
491
+ if (previousEntry?._id === entry._id) return null;
492
+ return previousEntry;
493
+ }
494
+
495
+ export function publicEntry(entry: {
496
+ _id: Id<"entries">;
497
+ key?: string | undefined;
498
+ importance: number;
499
+ filterValues: EntryFilterValues[];
500
+ contentHash?: string | undefined;
501
+ title?: string | undefined;
502
+ metadata?: Record<string, Value> | undefined;
503
+ status: StatusWithOnComplete;
504
+ }): Entry {
505
+ const { key, importance, filterValues, contentHash, title, metadata } = entry;
506
+
507
+ return {
508
+ entryId: entry._id as unknown as EntryId,
509
+ key,
510
+ title,
511
+ metadata,
512
+ importance,
513
+ filterValues,
514
+ contentHash,
515
+ status: entry.status.kind,
516
+ };
517
+ }
518
+
519
+ export const deleteAsync = mutation({
520
+ args: v.object({
521
+ entryId: v.id("entries"),
522
+ startOrder: v.number(),
523
+ }),
524
+ returns: v.null(),
525
+ handler: deleteAsyncHandler,
526
+ });
527
+
528
+ async function deleteAsyncHandler(
529
+ ctx: MutationCtx,
530
+ args: { entryId: Id<"entries">; startOrder: number }
531
+ ) {
532
+ const { entryId, startOrder } = args;
533
+ const entry = await ctx.db.get(entryId);
534
+ if (!entry) {
535
+ throw new Error(`Entry ${entryId} not found`);
536
+ }
537
+ const status = await deleteChunksPage(ctx, { entryId, startOrder });
538
+ if (status.isDone) {
539
+ await ctx.db.delete(entryId);
540
+ } else {
541
+ await workpool.enqueueMutation(ctx, api.entries.deleteAsync, {
542
+ entryId,
543
+ startOrder: status.nextStartOrder,
544
+ });
545
+ }
546
+ }
@@ -0,0 +1,119 @@
1
+ import {
2
+ type GenericId,
3
+ type Infer,
4
+ v,
5
+ type Value,
6
+ type VAny,
7
+ type VArray,
8
+ type VId,
9
+ } from "convex/values";
10
+
11
+ export const vFilterFieldValue = v.array(v.any()) as unknown as VArray<
12
+ [GenericId<"namespaces">, Value],
13
+ VId<"namespaces"> | VAny
14
+ >;
15
+ export type FilterFieldValue = Infer<typeof vFilterFieldValue>;
16
+
17
+ export const filterFieldNames = [
18
+ "filter0" as const,
19
+ "filter1" as const,
20
+ "filter2" as const,
21
+ "filter3" as const,
22
+ ];
23
+ export type NamedFilterField = {
24
+ [K in (typeof filterFieldNames)[number]]?: FilterFieldValue;
25
+ };
26
+
27
+ export type NumberedFilter = Record<number, Value>;
28
+
29
+ export const vAllFilterFields = {
30
+ namespaceId: v.id("namespaces"),
31
+ filter0: v.optional(vFilterFieldValue),
32
+ filter1: v.optional(vFilterFieldValue),
33
+ filter2: v.optional(vFilterFieldValue),
34
+ filter3: v.optional(vFilterFieldValue),
35
+ };
36
+
37
+ export const allFilterFieldNames = [
38
+ "namespaceId" as const,
39
+ ...filterFieldNames,
40
+ ];
41
+
42
+ export const vNamedFilter = v.object({
43
+ name: v.string(),
44
+ value: v.any(),
45
+ });
46
+
47
+ export type NamedFilter<K extends string = string, V = Value> = {
48
+ name: K;
49
+ value: V;
50
+ };
51
+
52
+ /**
53
+ * { 1: "foo", 2: "bar" }
54
+ * -> { filter1: ["namespace", "foo"], filter2: ["namespace", "bar"] }
55
+ */
56
+ export function filterFieldsFromNumbers(
57
+ namespaceId: GenericId<"namespaces">,
58
+ filters: NumberedFilter | undefined
59
+ ): NamedFilterField {
60
+ const filterFields: NamedFilterField = {};
61
+ if (!filters) return filterFields;
62
+ for (const [i, filter] of Object.entries(filters)) {
63
+ const index = Number(i);
64
+ if (isNaN(index) || index < 0 || index >= filterFieldNames.length) {
65
+ console.warn(
66
+ `Unknown filter index: ${index} for value ${JSON.stringify(filter)}`
67
+ );
68
+ break;
69
+ }
70
+ filterFields[filterFieldNames[index]] = [namespaceId, filter];
71
+ }
72
+ return filterFields;
73
+ }
74
+
75
+ /**
76
+ * [{ name: "Foo", value: "foo" }, { name: "Baz", value: "baz" }]
77
+ * -> { 0: "foo", 2: "baz" }
78
+ */
79
+ export function numberedFilterFromNamedFilters(
80
+ namedFilters: Array<{ name: string; value: Value }>,
81
+ filterNames: string[]
82
+ ): NumberedFilter {
83
+ const numberedFilter: NumberedFilter = {};
84
+ for (const namedFilter of namedFilters) {
85
+ const index = filterNames.indexOf(namedFilter.name);
86
+ if (index === -1) {
87
+ throw new Error(
88
+ `Unknown filter name: ${namedFilter.name} for namespace with names ${filterNames.join(
89
+ ", "
90
+ )}`
91
+ );
92
+ }
93
+ numberedFilter[index] = namedFilter.value;
94
+ }
95
+ return numberedFilter;
96
+ }
97
+
98
+ /**
99
+ * [{ name: "Foo", value: "foo" }, { name: "Baz", value: "baz" }]
100
+ * -> [{ 0: "foo" }, { 2: "baz" }]
101
+ */
102
+ export function numberedFiltersFromNamedFilters(
103
+ filters: NamedFilter[],
104
+ filterNames: string[]
105
+ ): Array<NumberedFilter> {
106
+ const filterFields: Array<NumberedFilter> = [];
107
+ for (const filter of filters) {
108
+ const index = filterNames.indexOf(filter.name);
109
+ if (index === -1) {
110
+ throw new Error(
111
+ `Unknown filter name: ${filter.name} for namespace with names ${filterNames.join(
112
+ ", "
113
+ )}`
114
+ );
115
+ }
116
+ filterFields.push({ [index]: filter.value });
117
+ }
118
+ return filterFields;
119
+ }