@convex-dev/rag 0.3.2 → 0.3.3-alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,10 +1,10 @@
1
1
  /// <reference types="vite/client" />
2
2
 
3
- import { describe, expect, test } from "vitest";
3
+ import { afterEach, beforeEach, describe, expect, test, vi } from "vitest";
4
4
  import { convexTest, type TestConvex } from "convex-test";
5
5
  import schema from "./schema.js";
6
- import { api } from "./_generated/api.js";
7
- import { modules } from "./setup.test.js";
6
+ import { api, internal } from "./_generated/api.js";
7
+ import { initConvexTest } from "./setup.test.js";
8
8
  import type { Id } from "./_generated/dataModel.js";
9
9
 
10
10
  type ConvexTest = TestConvex<typeof schema>;
@@ -21,6 +21,13 @@ describe("entries", () => {
21
21
  return namespace.namespaceId;
22
22
  }
23
23
 
24
+ beforeEach(async () => {
25
+ vi.useFakeTimers();
26
+ });
27
+ afterEach(() => {
28
+ vi.useRealTimers();
29
+ });
30
+
24
31
  function testEntryArgs(namespaceId: Id<"namespaces">, key = "test-entry") {
25
32
  return {
26
33
  namespaceId,
@@ -33,7 +40,7 @@ describe("entries", () => {
33
40
  }
34
41
 
35
42
  test("add creates a new entry when none exists", async () => {
36
- const t = convexTest(schema, modules);
43
+ const t = initConvexTest();
37
44
  const namespaceId = await setupTestNamespace(t);
38
45
 
39
46
  const entry = testEntryArgs(namespaceId);
@@ -60,7 +67,7 @@ describe("entries", () => {
60
67
  });
61
68
 
62
69
  test("add returns existing entry when adding identical content", async () => {
63
- const t = convexTest(schema, modules);
70
+ const t = initConvexTest();
64
71
  const namespaceId = await setupTestNamespace(t);
65
72
 
66
73
  const entry = testEntryArgs(namespaceId);
@@ -104,7 +111,7 @@ describe("entries", () => {
104
111
  });
105
112
 
106
113
  test("add creates new version when content hash changes", async () => {
107
- const t = convexTest(schema, modules);
114
+ const t = initConvexTest();
108
115
  const namespaceId = await setupTestNamespace(t);
109
116
 
110
117
  const entry = testEntryArgs(namespaceId);
@@ -157,7 +164,7 @@ describe("entries", () => {
157
164
  });
158
165
 
159
166
  test("add creates new version when importance changes", async () => {
160
- const t = convexTest(schema, modules);
167
+ const t = initConvexTest();
161
168
  const namespaceId = await setupTestNamespace(t);
162
169
 
163
170
  const entry = testEntryArgs(namespaceId);
@@ -195,7 +202,7 @@ describe("entries", () => {
195
202
  });
196
203
 
197
204
  test("add creates new version when filter values change", async () => {
198
- const t = convexTest(schema, modules);
205
+ const t = initConvexTest();
199
206
  const namespaceId = await setupTestNamespace(t, ["category"]); // Add filter name
200
207
 
201
208
  const entry = testEntryArgs(namespaceId);
@@ -235,7 +242,7 @@ describe("entries", () => {
235
242
  });
236
243
 
237
244
  test("add without allChunks creates pending entry", async () => {
238
- const t = convexTest(schema, modules);
245
+ const t = initConvexTest();
239
246
  const namespaceId = await setupTestNamespace(t);
240
247
 
241
248
  const entry = testEntryArgs(namespaceId);
@@ -258,7 +265,7 @@ describe("entries", () => {
258
265
  });
259
266
 
260
267
  test("multiple entries with different keys can coexist", async () => {
261
- const t = convexTest(schema, modules);
268
+ const t = initConvexTest();
262
269
  const namespaceId = await setupTestNamespace(t);
263
270
 
264
271
  const entry1 = testEntryArgs(namespaceId, "doc1");
@@ -294,7 +301,7 @@ describe("entries", () => {
294
301
  });
295
302
 
296
303
  test("pending to ready transition populates replacedEntry", async () => {
297
- const t = convexTest(schema, modules);
304
+ const t = initConvexTest();
298
305
  const namespaceId = await setupTestNamespace(t);
299
306
 
300
307
  const entry = testEntryArgs(namespaceId);
@@ -338,4 +345,404 @@ describe("entries", () => {
338
345
  });
339
346
  expect(firstDoc!.status.kind).toBe("replaced");
340
347
  });
348
+
349
+ test("deleteAsync deletes entry and all chunks", async () => {
350
+ const t = initConvexTest();
351
+ const namespaceId = await setupTestNamespace(t);
352
+
353
+ const entry = testEntryArgs(namespaceId);
354
+
355
+ // Create entry with chunks
356
+ const testChunks = [
357
+ {
358
+ content: { text: "chunk 1 content", metadata: { type: "text" } },
359
+ embedding: Array.from({ length: 128 }, () => Math.random()),
360
+ searchableText: "chunk 1 content",
361
+ },
362
+ {
363
+ content: { text: "chunk 2 content", metadata: { type: "text" } },
364
+ embedding: Array.from({ length: 128 }, () => Math.random()),
365
+ searchableText: "chunk 2 content",
366
+ },
367
+ ];
368
+
369
+ const result = await t.mutation(api.entries.add, {
370
+ entry,
371
+ allChunks: testChunks,
372
+ });
373
+
374
+ expect(result.created).toBe(true);
375
+ expect(result.status).toBe("ready");
376
+
377
+ // Verify entry and chunks exist before deletion
378
+ const entryBefore = await t.run(async (ctx) => {
379
+ return ctx.db.get(result.entryId);
380
+ });
381
+ expect(entryBefore).toBeDefined();
382
+
383
+ const chunksBefore = await t.run(async (ctx) => {
384
+ return ctx.db
385
+ .query("chunks")
386
+ .filter((q) => q.eq(q.field("entryId"), result.entryId))
387
+ .collect();
388
+ });
389
+ expect(chunksBefore).toHaveLength(2);
390
+
391
+ // Delete the entry
392
+ await t.mutation(api.entries.deleteAsync, {
393
+ entryId: result.entryId,
394
+ startOrder: 0,
395
+ });
396
+
397
+ // Wait for async deletion to complete by repeatedly checking
398
+ await t.finishInProgressScheduledFunctions();
399
+
400
+ // Verify entry is deleted
401
+ const entryAfter = await t.run(async (ctx) => {
402
+ return ctx.db.get(result.entryId);
403
+ });
404
+ expect(entryAfter).toBeNull();
405
+
406
+ // Verify chunks are deleted
407
+ const chunksAfter = await t.run(async (ctx) => {
408
+ return ctx.db
409
+ .query("chunks")
410
+ .filter((q) => q.eq(q.field("entryId"), result.entryId))
411
+ .collect();
412
+ });
413
+ expect(chunksAfter).toHaveLength(0);
414
+ });
415
+
416
+ test("deleteSync deletes entry and all chunks synchronously", async () => {
417
+ const t = initConvexTest();
418
+ const namespaceId = await setupTestNamespace(t);
419
+
420
+ const entry = testEntryArgs(namespaceId);
421
+
422
+ // Create entry with chunks
423
+ const testChunks = [
424
+ {
425
+ content: { text: "sync chunk 1", metadata: { type: "text" } },
426
+ embedding: Array.from({ length: 128 }, () => Math.random()),
427
+ searchableText: "sync chunk 1",
428
+ },
429
+ {
430
+ content: { text: "sync chunk 2", metadata: { type: "text" } },
431
+ embedding: Array.from({ length: 128 }, () => Math.random()),
432
+ searchableText: "sync chunk 2",
433
+ },
434
+ ];
435
+
436
+ const result = await t.mutation(api.entries.add, {
437
+ entry,
438
+ allChunks: testChunks,
439
+ });
440
+
441
+ expect(result.created).toBe(true);
442
+ expect(result.status).toBe("ready");
443
+
444
+ // Verify entry and chunks exist before deletion
445
+ const entryBefore = await t.run(async (ctx) => {
446
+ return ctx.db.get(result.entryId);
447
+ });
448
+ expect(entryBefore).toBeDefined();
449
+
450
+ const chunksBefore = await t.run(async (ctx) => {
451
+ return ctx.db
452
+ .query("chunks")
453
+ .filter((q) => q.eq(q.field("entryId"), result.entryId))
454
+ .collect();
455
+ });
456
+ expect(chunksBefore).toHaveLength(2);
457
+
458
+ // Delete the entry synchronously
459
+ await t.action(api.entries.deleteSync, {
460
+ entryId: result.entryId,
461
+ });
462
+
463
+ // Verify entry is deleted
464
+ const entryAfter = await t.run(async (ctx) => {
465
+ return ctx.db.get(result.entryId);
466
+ });
467
+ expect(entryAfter).toBeNull();
468
+
469
+ // Verify chunks are deleted
470
+ const chunksAfter = await t.run(async (ctx) => {
471
+ return ctx.db
472
+ .query("chunks")
473
+ .filter((q) => q.eq(q.field("entryId"), result.entryId))
474
+ .collect();
475
+ });
476
+ expect(chunksAfter).toHaveLength(0);
477
+ });
478
+
479
+ test("deleteByKeyAsync deletes all entries with the given key", async () => {
480
+ const t = initConvexTest();
481
+ const namespaceId = await setupTestNamespace(t);
482
+
483
+ const entry1 = testEntryArgs(namespaceId, "shared-key");
484
+ const entry2 = {
485
+ ...testEntryArgs(namespaceId, "shared-key"),
486
+ contentHash: "hash456",
487
+ };
488
+ const entry3 = testEntryArgs(namespaceId, "different-key");
489
+
490
+ // Create multiple entries with same key and one with different key
491
+ const result1 = await t.mutation(api.entries.add, {
492
+ entry: entry1,
493
+ allChunks: [
494
+ {
495
+ content: { text: "content 1" },
496
+ embedding: Array.from({ length: 128 }, () => Math.random()),
497
+ },
498
+ ],
499
+ });
500
+
501
+ const result2 = await t.mutation(api.entries.add, {
502
+ entry: entry2,
503
+ allChunks: [
504
+ {
505
+ content: { text: "content 2" },
506
+ embedding: Array.from({ length: 128 }, () => Math.random()),
507
+ },
508
+ ],
509
+ });
510
+
511
+ const result3 = await t.mutation(api.entries.add, {
512
+ entry: entry3,
513
+ allChunks: [
514
+ {
515
+ content: { text: "content 3" },
516
+ embedding: Array.from({ length: 128 }, () => Math.random()),
517
+ },
518
+ ],
519
+ });
520
+
521
+ // Verify all entries exist
522
+ const entriesBefore = await t.run(async (ctx) => {
523
+ return ctx.db
524
+ .query("entries")
525
+ .filter((q) => q.eq(q.field("namespaceId"), namespaceId))
526
+ .collect();
527
+ });
528
+ expect(entriesBefore).toHaveLength(3);
529
+ const sharedBefore = await t.query(
530
+ internal.entries.getEntriesForNamespaceByKey,
531
+ {
532
+ namespaceId,
533
+ key: "shared-key",
534
+ }
535
+ );
536
+ expect(sharedBefore).toHaveLength(2);
537
+
538
+ // Delete entries by key
539
+ await t.mutation(api.entries.deleteByKeyAsync, {
540
+ namespaceId,
541
+ key: "shared-key",
542
+ });
543
+
544
+ // Wait for async deletion to complete
545
+ await t.finishAllScheduledFunctions(vi.runAllTimers);
546
+
547
+ // Verify only entries with "shared-key" are deleted
548
+ const entriesAfter = await t.run(async (ctx) => {
549
+ return ctx.db
550
+ .query("entries")
551
+ .filter((q) => q.eq(q.field("namespaceId"), namespaceId))
552
+ .collect();
553
+ });
554
+ expect(entriesAfter).toHaveLength(1);
555
+ expect(entriesAfter[0].key).toBe("different-key");
556
+ expect(entriesAfter[0]._id).toBe(result3.entryId);
557
+
558
+ const sharedAfter = await t.query(
559
+ internal.entries.getEntriesForNamespaceByKey,
560
+ { namespaceId, key: "shared-key" }
561
+ );
562
+ expect(sharedAfter).toHaveLength(0);
563
+
564
+ // Verify chunks from deleted entries are also deleted
565
+ const chunksAfter = await t.run(async (ctx) => {
566
+ return ctx.db.query("chunks").collect();
567
+ });
568
+ expect(chunksAfter).toHaveLength(1); // Only chunk from entry3 should remain
569
+ });
570
+
571
+ test("deleteByKeySync deletes all entries with the given key synchronously", async () => {
572
+ const t = initConvexTest();
573
+ const namespaceId = await setupTestNamespace(t);
574
+
575
+ const entry1 = testEntryArgs(namespaceId, "sync-key");
576
+ const entry2 = {
577
+ ...testEntryArgs(namespaceId, "sync-key"),
578
+ contentHash: "hash789",
579
+ };
580
+ const entry3 = testEntryArgs(namespaceId, "keep-key");
581
+
582
+ // Create multiple entries with same key and one with different key
583
+ const result1 = await t.mutation(api.entries.add, {
584
+ entry: entry1,
585
+ allChunks: [
586
+ {
587
+ content: { text: "sync content 1" },
588
+ embedding: Array.from({ length: 128 }, () => Math.random()),
589
+ },
590
+ ],
591
+ });
592
+
593
+ const result2 = await t.mutation(api.entries.add, {
594
+ entry: entry2,
595
+ allChunks: [
596
+ {
597
+ content: { text: "sync content 2" },
598
+ embedding: Array.from({ length: 128 }, () => Math.random()),
599
+ },
600
+ ],
601
+ });
602
+
603
+ const result3 = await t.mutation(api.entries.add, {
604
+ entry: entry3,
605
+ allChunks: [
606
+ {
607
+ content: { text: "sync content 3" },
608
+ embedding: Array.from({ length: 128 }, () => Math.random()),
609
+ },
610
+ ],
611
+ });
612
+
613
+ // Verify all entries exist
614
+ const entriesBefore = await t.run(async (ctx) => {
615
+ return ctx.db
616
+ .query("entries")
617
+ .filter((q) => q.eq(q.field("namespaceId"), namespaceId))
618
+ .collect();
619
+ });
620
+ expect(entriesBefore).toHaveLength(3);
621
+
622
+ // Delete entries by key synchronously
623
+ await t.action(api.entries.deleteByKeySync, {
624
+ namespaceId,
625
+ key: "sync-key",
626
+ });
627
+
628
+ // Verify only entries with "sync-key" are deleted
629
+ const entriesAfter = await t.run(async (ctx) => {
630
+ return ctx.db
631
+ .query("entries")
632
+ .filter((q) => q.eq(q.field("namespaceId"), namespaceId))
633
+ .collect();
634
+ });
635
+ expect(entriesAfter).toHaveLength(1);
636
+ expect(entriesAfter[0].key).toBe("keep-key");
637
+ expect(entriesAfter[0]._id).toBe(result3.entryId);
638
+
639
+ // Verify chunks from deleted entries are also deleted
640
+ const chunksAfter = await t.run(async (ctx) => {
641
+ return ctx.db.query("chunks").collect();
642
+ });
643
+ expect(chunksAfter).toHaveLength(1); // Only chunk from entry3 should remain
644
+ });
645
+
646
+ test("deleteByKeyAsync handles entries without key gracefully", async () => {
647
+ const t = initConvexTest();
648
+ const namespaceId = await setupTestNamespace(t);
649
+
650
+ const entryWithKey = testEntryArgs(namespaceId, "has-key");
651
+ const entryWithoutKey = { ...testEntryArgs(namespaceId), key: undefined };
652
+
653
+ // Create entries
654
+ const result1 = await t.mutation(api.entries.add, {
655
+ entry: entryWithKey,
656
+ allChunks: [],
657
+ });
658
+
659
+ const result2 = await t.mutation(api.entries.add, {
660
+ entry: entryWithoutKey,
661
+ allChunks: [],
662
+ });
663
+
664
+ // Delete by key - should only affect entries with that key
665
+ await t.mutation(api.entries.deleteByKeyAsync, {
666
+ namespaceId,
667
+ key: "has-key",
668
+ });
669
+
670
+ await t.finishAllScheduledFunctions(vi.runAllTimers);
671
+
672
+ // Verify only the entry with the specified key is deleted
673
+ const entriesAfter = await t.run(async (ctx) => {
674
+ return ctx.db
675
+ .query("entries")
676
+ .filter((q) => q.eq(q.field("namespaceId"), namespaceId))
677
+ .collect();
678
+ });
679
+ expect(entriesAfter).toHaveLength(1);
680
+ expect(entriesAfter[0]._id).toBe(result2.entryId);
681
+ expect(entriesAfter[0].key).toBeUndefined();
682
+ });
683
+
684
+ test("deleteByKeyAsync with beforeVersion parameter", async () => {
685
+ const t = initConvexTest();
686
+ const namespaceId = await setupTestNamespace(t);
687
+
688
+ const entry = testEntryArgs(namespaceId, "versioned-key");
689
+
690
+ // Create multiple versions of the same entry
691
+ const result1 = await t.mutation(api.entries.add, {
692
+ entry,
693
+ allChunks: [],
694
+ });
695
+
696
+ const result2 = await t.mutation(api.entries.add, {
697
+ entry: { ...entry, contentHash: "hash456" },
698
+ allChunks: [],
699
+ });
700
+
701
+ const result3 = await t.mutation(api.entries.add, {
702
+ entry: { ...entry, contentHash: "hash789" },
703
+ allChunks: [],
704
+ });
705
+
706
+ // Get the versions to understand ordering
707
+ const allEntries = await t.run(async (ctx) => {
708
+ return ctx.db
709
+ .query("entries")
710
+ .filter((q) =>
711
+ q.and(
712
+ q.eq(q.field("namespaceId"), namespaceId),
713
+ q.eq(q.field("key"), "versioned-key")
714
+ )
715
+ )
716
+ .collect();
717
+ });
718
+
719
+ const sortedEntries = allEntries.sort((a, b) => a.version - b.version);
720
+ expect(sortedEntries).toHaveLength(3);
721
+
722
+ // Delete entries before version 2 (should delete version 0 and 1)
723
+ await t.mutation(api.entries.deleteByKeyAsync, {
724
+ namespaceId,
725
+ key: "versioned-key",
726
+ beforeVersion: 2,
727
+ });
728
+
729
+ await t.finishAllScheduledFunctions(vi.runAllTimers);
730
+
731
+ // Should only have the latest version (version 2) remaining
732
+ const remainingEntries = await t.run(async (ctx) => {
733
+ return ctx.db
734
+ .query("entries")
735
+ .filter((q) =>
736
+ q.and(
737
+ q.eq(q.field("namespaceId"), namespaceId),
738
+ q.eq(q.field("key"), "versioned-key")
739
+ )
740
+ )
741
+ .collect();
742
+ });
743
+
744
+ expect(remainingEntries).toHaveLength(1);
745
+ expect(remainingEntries[0].version).toBe(2);
746
+ expect(remainingEntries[0]._id).toBe(result3.entryId);
747
+ });
341
748
  });
@@ -14,13 +14,15 @@ import {
14
14
  import { api, internal } from "./_generated/api.js";
15
15
  import type { Doc, Id } from "./_generated/dataModel.js";
16
16
  import {
17
+ action,
17
18
  internalMutation,
19
+ internalQuery,
18
20
  mutation,
19
21
  query,
20
22
  type MutationCtx,
21
23
  type QueryCtx,
22
24
  } from "./_generated/server.js";
23
- import { deleteChunksPage, insertChunks } from "./chunks.js";
25
+ import { deleteChunksPageHandler, insertChunks } from "./chunks.js";
24
26
  import schema, { type StatusWithOnComplete } from "./schema.js";
25
27
  import { mergedStream } from "convex-helpers/server/stream";
26
28
  import { stream } from "convex-helpers/server/stream";
@@ -36,6 +38,7 @@ import {
36
38
  Workpool,
37
39
  } from "@convex-dev/workpool";
38
40
  import { components } from "./_generated/api.js";
41
+ import { doc } from "convex-helpers/validators";
39
42
 
40
43
  const workpool = new Workpool(components.workpool, {
41
44
  retryActionsByDefault: true,
@@ -546,7 +549,7 @@ async function deleteAsyncHandler(
546
549
  if (!entry) {
547
550
  throw new Error(`Entry ${entryId} not found`);
548
551
  }
549
- const status = await deleteChunksPage(ctx, { entryId, startOrder });
552
+ const status = await deleteChunksPageHandler(ctx, { entryId, startOrder });
550
553
  if (status.isDone) {
551
554
  await ctx.db.delete(entryId);
552
555
  } else {
@@ -556,3 +559,110 @@ async function deleteAsyncHandler(
556
559
  });
557
560
  }
558
561
  }
562
+
563
+ export const deleteSync = action({
564
+ args: { entryId: v.id("entries") },
565
+ returns: v.null(),
566
+ handler: async (ctx, { entryId }) => {
567
+ let startOrder = 0;
568
+ while (true) {
569
+ const status = await ctx.runMutation(internal.chunks.deleteChunksPage, {
570
+ entryId,
571
+ startOrder,
572
+ });
573
+ if (status.isDone) {
574
+ await ctx.runMutation(internal.entries._del, { entryId });
575
+ break;
576
+ }
577
+ startOrder = status.nextStartOrder;
578
+ }
579
+ },
580
+ });
581
+
582
+ export const _del = internalMutation({
583
+ args: { entryId: v.id("entries") },
584
+ returns: v.null(),
585
+ handler: async (ctx, args) => {
586
+ await ctx.db.delete(args.entryId);
587
+ },
588
+ });
589
+
590
+ export const deleteByKeyAsync = mutation({
591
+ args: v.object({
592
+ namespaceId: v.id("namespaces"),
593
+ key: v.string(),
594
+ beforeVersion: v.optional(v.number()),
595
+ }),
596
+ returns: v.null(),
597
+ handler: async (ctx, args) => {
598
+ const entries = await getEntriesByKey(ctx, args);
599
+ for await (const entry of entries) {
600
+ await workpool.enqueueMutation(ctx, api.entries.deleteAsync, {
601
+ entryId: entry._id,
602
+ startOrder: 0,
603
+ });
604
+ }
605
+ if (entries.length === 100) {
606
+ await workpool.enqueueMutation(ctx, api.entries.deleteByKeyAsync, {
607
+ namespaceId: args.namespaceId,
608
+ key: args.key,
609
+ beforeVersion: entries[entries.length - 1].version,
610
+ });
611
+ }
612
+ },
613
+ });
614
+
615
+ async function getEntriesByKey(
616
+ ctx: QueryCtx,
617
+ args: { namespaceId: Id<"namespaces">; key: string; beforeVersion?: number }
618
+ ): Promise<Doc<"entries">[]> {
619
+ return mergedStream(
620
+ statuses.map((status) =>
621
+ stream(ctx.db, schema)
622
+ .query("entries")
623
+ .withIndex("namespaceId_status_key_version", (q) =>
624
+ q
625
+ .eq("namespaceId", args.namespaceId)
626
+ .eq("status.kind", status)
627
+ .eq("key", args.key)
628
+ .lt("version", args.beforeVersion ?? Infinity)
629
+ )
630
+ .order("desc")
631
+ ),
632
+ ["version"]
633
+ ).take(100);
634
+ }
635
+
636
+ export const getEntriesForNamespaceByKey = internalQuery({
637
+ args: {
638
+ namespaceId: v.id("namespaces"),
639
+ key: v.string(),
640
+ beforeVersion: v.optional(v.number()),
641
+ },
642
+ returns: v.array(doc(schema, "entries")),
643
+ handler: getEntriesByKey,
644
+ });
645
+
646
+ export const deleteByKeySync = action({
647
+ args: {
648
+ namespaceId: v.id("namespaces"),
649
+ key: v.string(),
650
+ },
651
+ returns: v.null(),
652
+ handler: async (ctx, args) => {
653
+ while (true) {
654
+ const entries: Doc<"entries">[] = await ctx.runQuery(
655
+ internal.entries.getEntriesForNamespaceByKey,
656
+ { namespaceId: args.namespaceId, key: args.key }
657
+ );
658
+ for await (const entry of entries) {
659
+ await ctx.runAction(api.entries.deleteSync, {
660
+ entryId: entry._id,
661
+ });
662
+ }
663
+ if (entries.length <= 100) {
664
+ break;
665
+ }
666
+ }
667
+ },
668
+ });
@@ -1,5 +1,20 @@
1
1
  /// <reference types="vite/client" />
2
2
  import { test } from "vitest";
3
+ import { convexTest } from "convex-test";
4
+ import schema from "./schema.js";
3
5
  export const modules = import.meta.glob("./**/*.*s");
4
6
 
7
+ // Sorry about everything
8
+ import componentSchema from "../../node_modules/@convex-dev/workpool/src/component/schema.js";
9
+ export { componentSchema };
10
+ export const componentModules = import.meta.glob(
11
+ "../../node_modules/@convex-dev/workpool/src/component/**/*.ts"
12
+ );
13
+
14
+ export function initConvexTest() {
15
+ const t = convexTest(schema, modules);
16
+ t.registerComponent("workpool", componentSchema, componentModules);
17
+ return t;
18
+ }
19
+
5
20
  test("setup", () => {});
@@ -0,0 +1 @@
1
+ {"version":"3.2.4","results":[[":component/chunks.test.ts",{"duration":0,"failed":false}]]}