@kernl-sdk/pg 0.1.10 → 0.1.12
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +4 -5
- package/.turbo/turbo-check-types.log +36 -0
- package/CHANGELOG.md +41 -0
- package/README.md +124 -0
- package/dist/__tests__/integration.test.js +81 -1
- package/dist/__tests__/memory-integration.test.d.ts +2 -0
- package/dist/__tests__/memory-integration.test.d.ts.map +1 -0
- package/dist/__tests__/memory-integration.test.js +287 -0
- package/dist/__tests__/memory.test.d.ts +2 -0
- package/dist/__tests__/memory.test.d.ts.map +1 -0
- package/dist/__tests__/memory.test.js +357 -0
- package/dist/index.d.ts +5 -3
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -3
- package/dist/memory/sql.d.ts +30 -0
- package/dist/memory/sql.d.ts.map +1 -0
- package/dist/memory/sql.js +100 -0
- package/dist/memory/store.d.ts +41 -0
- package/dist/memory/store.d.ts.map +1 -0
- package/dist/memory/store.js +114 -0
- package/dist/migrations.d.ts +1 -1
- package/dist/migrations.d.ts.map +1 -1
- package/dist/migrations.js +9 -3
- package/dist/pgvector/__tests__/handle.test.d.ts +2 -0
- package/dist/pgvector/__tests__/handle.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/handle.test.js +277 -0
- package/dist/pgvector/__tests__/hit.test.d.ts +2 -0
- package/dist/pgvector/__tests__/hit.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/hit.test.js +134 -0
- package/dist/pgvector/__tests__/integration/document.integration.test.d.ts +7 -0
- package/dist/pgvector/__tests__/integration/document.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/document.integration.test.js +587 -0
- package/dist/pgvector/__tests__/integration/edge.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/edge.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/edge.integration.test.js +663 -0
- package/dist/pgvector/__tests__/integration/filters.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/filters.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/filters.integration.test.js +609 -0
- package/dist/pgvector/__tests__/integration/lifecycle.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/lifecycle.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/lifecycle.integration.test.js +449 -0
- package/dist/pgvector/__tests__/integration/query.integration.test.d.ts +8 -0
- package/dist/pgvector/__tests__/integration/query.integration.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/integration/query.integration.test.js +544 -0
- package/dist/pgvector/__tests__/search.test.d.ts +2 -0
- package/dist/pgvector/__tests__/search.test.d.ts.map +1 -0
- package/dist/pgvector/__tests__/search.test.js +279 -0
- package/dist/pgvector/handle.d.ts +60 -0
- package/dist/pgvector/handle.d.ts.map +1 -0
- package/dist/pgvector/handle.js +213 -0
- package/dist/pgvector/hit.d.ts +10 -0
- package/dist/pgvector/hit.d.ts.map +1 -0
- package/dist/pgvector/hit.js +44 -0
- package/dist/pgvector/index.d.ts +7 -0
- package/dist/pgvector/index.d.ts.map +1 -0
- package/dist/pgvector/index.js +5 -0
- package/dist/pgvector/search.d.ts +60 -0
- package/dist/pgvector/search.d.ts.map +1 -0
- package/dist/pgvector/search.js +227 -0
- package/dist/pgvector/sql/__tests__/limit.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/limit.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/limit.test.js +161 -0
- package/dist/pgvector/sql/__tests__/order.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/order.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/order.test.js +218 -0
- package/dist/pgvector/sql/__tests__/query.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/query.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/query.test.js +392 -0
- package/dist/pgvector/sql/__tests__/select.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/select.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/select.test.js +293 -0
- package/dist/pgvector/sql/__tests__/where.test.d.ts +2 -0
- package/dist/pgvector/sql/__tests__/where.test.d.ts.map +1 -0
- package/dist/pgvector/sql/__tests__/where.test.js +488 -0
- package/dist/pgvector/sql/index.d.ts +7 -0
- package/dist/pgvector/sql/index.d.ts.map +1 -0
- package/dist/pgvector/sql/index.js +6 -0
- package/dist/pgvector/sql/limit.d.ts +8 -0
- package/dist/pgvector/sql/limit.d.ts.map +1 -0
- package/dist/pgvector/sql/limit.js +20 -0
- package/dist/pgvector/sql/order.d.ts +9 -0
- package/dist/pgvector/sql/order.d.ts.map +1 -0
- package/dist/pgvector/sql/order.js +47 -0
- package/dist/pgvector/sql/query.d.ts +46 -0
- package/dist/pgvector/sql/query.d.ts.map +1 -0
- package/dist/pgvector/sql/query.js +54 -0
- package/dist/pgvector/sql/schema.d.ts +16 -0
- package/dist/pgvector/sql/schema.d.ts.map +1 -0
- package/dist/pgvector/sql/schema.js +47 -0
- package/dist/pgvector/sql/select.d.ts +11 -0
- package/dist/pgvector/sql/select.d.ts.map +1 -0
- package/dist/pgvector/sql/select.js +87 -0
- package/dist/pgvector/sql/where.d.ts +8 -0
- package/dist/pgvector/sql/where.d.ts.map +1 -0
- package/dist/pgvector/sql/where.js +137 -0
- package/dist/pgvector/types.d.ts +20 -0
- package/dist/pgvector/types.d.ts.map +1 -0
- package/dist/pgvector/types.js +1 -0
- package/dist/pgvector/utils.d.ts +18 -0
- package/dist/pgvector/utils.d.ts.map +1 -0
- package/dist/pgvector/utils.js +22 -0
- package/dist/postgres.d.ts +19 -26
- package/dist/postgres.d.ts.map +1 -1
- package/dist/postgres.js +15 -27
- package/dist/storage.d.ts +62 -0
- package/dist/storage.d.ts.map +1 -1
- package/dist/storage.js +55 -10
- package/dist/thread/sql.d.ts +38 -0
- package/dist/thread/sql.d.ts.map +1 -0
- package/dist/thread/sql.js +112 -0
- package/dist/thread/store.d.ts +7 -3
- package/dist/thread/store.d.ts.map +1 -1
- package/dist/thread/store.js +46 -105
- package/package.json +8 -5
- package/src/__tests__/integration.test.ts +114 -15
- package/src/__tests__/memory-integration.test.ts +355 -0
- package/src/__tests__/memory.test.ts +428 -0
- package/src/index.ts +19 -3
- package/src/memory/sql.ts +141 -0
- package/src/memory/store.ts +166 -0
- package/src/migrations.ts +13 -3
- package/src/pgvector/README.md +50 -0
- package/src/pgvector/__tests__/handle.test.ts +335 -0
- package/src/pgvector/__tests__/hit.test.ts +165 -0
- package/src/pgvector/__tests__/integration/document.integration.test.ts +717 -0
- package/src/pgvector/__tests__/integration/edge.integration.test.ts +835 -0
- package/src/pgvector/__tests__/integration/filters.integration.test.ts +721 -0
- package/src/pgvector/__tests__/integration/lifecycle.integration.test.ts +570 -0
- package/src/pgvector/__tests__/integration/query.integration.test.ts +667 -0
- package/src/pgvector/__tests__/search.test.ts +366 -0
- package/src/pgvector/handle.ts +285 -0
- package/src/pgvector/hit.ts +56 -0
- package/src/pgvector/index.ts +7 -0
- package/src/pgvector/search.ts +330 -0
- package/src/pgvector/sql/__tests__/limit.test.ts +180 -0
- package/src/pgvector/sql/__tests__/order.test.ts +248 -0
- package/src/pgvector/sql/__tests__/query.test.ts +548 -0
- package/src/pgvector/sql/__tests__/select.test.ts +367 -0
- package/src/pgvector/sql/__tests__/where.test.ts +554 -0
- package/src/pgvector/sql/index.ts +14 -0
- package/src/pgvector/sql/limit.ts +29 -0
- package/src/pgvector/sql/order.ts +55 -0
- package/src/pgvector/sql/query.ts +112 -0
- package/src/pgvector/sql/schema.ts +61 -0
- package/src/pgvector/sql/select.ts +100 -0
- package/src/pgvector/sql/where.ts +152 -0
- package/src/pgvector/types.ts +21 -0
- package/src/pgvector/utils.ts +24 -0
- package/src/postgres.ts +31 -33
- package/src/storage.ts +102 -11
- package/src/thread/sql.ts +159 -0
- package/src/thread/store.ts +58 -127
- package/tsconfig.tsbuildinfo +1 -0
|
@@ -10,11 +10,110 @@ import {
|
|
|
10
10
|
type ModelRegistry,
|
|
11
11
|
} from "kernl";
|
|
12
12
|
import { Thread } from "kernl/internal";
|
|
13
|
+
import type { LanguageModel } from "@kernl-sdk/protocol";
|
|
13
14
|
|
|
14
15
|
const TEST_DB_URL = process.env.KERNL_PG_TEST_URL;
|
|
15
16
|
|
|
16
|
-
|
|
17
|
-
|
|
17
|
+
describe.sequential("PGStorage auto-initialization", () => {
|
|
18
|
+
if (!TEST_DB_URL) {
|
|
19
|
+
it.skip("requires KERNL_PG_TEST_URL to be set", () => {});
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
/**
|
|
24
|
+
* Verifies that ALL store methods auto-initialize without explicit init() call.
|
|
25
|
+
*
|
|
26
|
+
* This is critical for DX - users should not need to remember to call init().
|
|
27
|
+
* Each method must internally call ensureInit() before any DB operation.
|
|
28
|
+
*
|
|
29
|
+
* Methods covered: get, list, insert, update, delete, history, append
|
|
30
|
+
*/
|
|
31
|
+
it("auto-initializes on first store operation (no explicit init required)", async () => {
|
|
32
|
+
const pool = new Pool({ connectionString: TEST_DB_URL });
|
|
33
|
+
const storage = new PGStorage({ pool });
|
|
34
|
+
|
|
35
|
+
// Clean slate - drop schema to prove init runs automatically
|
|
36
|
+
await pool.query('DROP SCHEMA IF EXISTS "kernl" CASCADE');
|
|
37
|
+
|
|
38
|
+
// Bind minimal registries
|
|
39
|
+
const model = {
|
|
40
|
+
spec: "1.0" as const,
|
|
41
|
+
provider: "test",
|
|
42
|
+
modelId: "auto-init-model",
|
|
43
|
+
} as unknown as LanguageModel;
|
|
44
|
+
|
|
45
|
+
const agent = new Agent({
|
|
46
|
+
id: "auto-init-agent",
|
|
47
|
+
name: "Auto Init Agent",
|
|
48
|
+
instructions: () => "test",
|
|
49
|
+
model,
|
|
50
|
+
});
|
|
51
|
+
|
|
52
|
+
const agents: AgentRegistry = new Map([["auto-init-agent", agent]]);
|
|
53
|
+
const models: ModelRegistry = new Map([
|
|
54
|
+
["test/auto-init-model", model],
|
|
55
|
+
]) as unknown as ModelRegistry;
|
|
56
|
+
|
|
57
|
+
storage.bind({ agents, models });
|
|
58
|
+
const store = storage.threads;
|
|
59
|
+
const tid = "auto-init-thread";
|
|
60
|
+
|
|
61
|
+
// 1) list() - should auto-init
|
|
62
|
+
const threads = await store.list();
|
|
63
|
+
expect(threads).toEqual([]);
|
|
64
|
+
|
|
65
|
+
// 2) get() - should work (returns null for non-existent)
|
|
66
|
+
const got = await store.get(tid);
|
|
67
|
+
expect(got).toBeNull();
|
|
68
|
+
|
|
69
|
+
// 3) insert() - should work
|
|
70
|
+
const inserted = await store.insert({
|
|
71
|
+
id: tid,
|
|
72
|
+
namespace: "kernl",
|
|
73
|
+
agentId: "auto-init-agent",
|
|
74
|
+
model: "test/auto-init-model",
|
|
75
|
+
});
|
|
76
|
+
expect(inserted.tid).toBe(tid);
|
|
77
|
+
|
|
78
|
+
// 4) update() - should work
|
|
79
|
+
await store.update(tid, { tick: 1 });
|
|
80
|
+
const tickResult = await pool.query<{ tick: number }>(
|
|
81
|
+
`SELECT tick FROM "kernl"."threads" WHERE id = $1`,
|
|
82
|
+
[tid],
|
|
83
|
+
);
|
|
84
|
+
expect(tickResult.rows[0]?.tick).toBe(1);
|
|
85
|
+
|
|
86
|
+
// 5) history() - should work (empty)
|
|
87
|
+
const hist = await store.history(tid);
|
|
88
|
+
expect(hist).toEqual([]);
|
|
89
|
+
|
|
90
|
+
// 6) append() - should work
|
|
91
|
+
await store.append([
|
|
92
|
+
{
|
|
93
|
+
id: "evt-1",
|
|
94
|
+
tid,
|
|
95
|
+
seq: 0,
|
|
96
|
+
kind: "message",
|
|
97
|
+
timestamp: new Date(),
|
|
98
|
+
data: { role: "user", text: "test" },
|
|
99
|
+
metadata: null,
|
|
100
|
+
} as any,
|
|
101
|
+
]);
|
|
102
|
+
|
|
103
|
+
// 7) delete() - should work
|
|
104
|
+
await store.delete(tid);
|
|
105
|
+
const afterDelete = await store.get(tid);
|
|
106
|
+
expect(afterDelete).toBeNull();
|
|
107
|
+
|
|
108
|
+
// Verify schema was created
|
|
109
|
+
const schemaResult = await pool.query(
|
|
110
|
+
`SELECT schema_name FROM information_schema.schemata WHERE schema_name = 'kernl'`,
|
|
111
|
+
);
|
|
112
|
+
expect(schemaResult.rows).toHaveLength(1);
|
|
113
|
+
|
|
114
|
+
await storage.close();
|
|
115
|
+
});
|
|
116
|
+
});
|
|
18
117
|
|
|
19
118
|
describe.sequential("PGStorage integration", () => {
|
|
20
119
|
if (!TEST_DB_URL) {
|
|
@@ -71,7 +170,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
71
170
|
`SELECT id FROM "kernl".migrations ORDER BY applied_at ASC`,
|
|
72
171
|
);
|
|
73
172
|
const appliedMigrationIds = migrationsResult.rows.map((row) => row.id);
|
|
74
|
-
expect(appliedMigrationIds).toEqual(["
|
|
173
|
+
expect(appliedMigrationIds).toEqual(["001_threads", "002_memories"]);
|
|
75
174
|
|
|
76
175
|
// ---- verify indexes created by table definitions ----
|
|
77
176
|
const indexesResult = await pool.query<{
|
|
@@ -206,7 +305,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
206
305
|
spec: "1.0" as const,
|
|
207
306
|
provider: "test",
|
|
208
307
|
modelId: "test-model",
|
|
209
|
-
} as unknown as
|
|
308
|
+
} as unknown as LanguageModel;
|
|
210
309
|
|
|
211
310
|
const agent = new Agent({
|
|
212
311
|
id: "agent-1",
|
|
@@ -216,7 +315,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
216
315
|
});
|
|
217
316
|
|
|
218
317
|
const agents: AgentRegistry = new Map<string, Agent>([["agent-1", agent]]);
|
|
219
|
-
const models: ModelRegistry = new Map<string,
|
|
318
|
+
const models: ModelRegistry = new Map<string, LanguageModel>([
|
|
220
319
|
["provider/model", model],
|
|
221
320
|
]) as unknown as ModelRegistry;
|
|
222
321
|
|
|
@@ -402,7 +501,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
402
501
|
spec: "1.0" as const,
|
|
403
502
|
provider: "test",
|
|
404
503
|
modelId: "test-model",
|
|
405
|
-
} as unknown as
|
|
504
|
+
} as unknown as LanguageModel;
|
|
406
505
|
|
|
407
506
|
const agent = new Agent({
|
|
408
507
|
id: "agent-1",
|
|
@@ -412,7 +511,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
412
511
|
});
|
|
413
512
|
|
|
414
513
|
const agents: AgentRegistry = new Map<string, Agent>([["agent-1", agent]]);
|
|
415
|
-
const models: ModelRegistry = new Map<string,
|
|
514
|
+
const models: ModelRegistry = new Map<string, LanguageModel>([
|
|
416
515
|
["provider/model", model],
|
|
417
516
|
]) as unknown as ModelRegistry;
|
|
418
517
|
|
|
@@ -475,7 +574,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
475
574
|
modelId: "test-model",
|
|
476
575
|
// generate/stream are not used in this test - we only advance stream
|
|
477
576
|
// far enough to trigger the first checkpoint.
|
|
478
|
-
} as unknown as
|
|
577
|
+
} as unknown as LanguageModel;
|
|
479
578
|
|
|
480
579
|
const agent = new Agent({
|
|
481
580
|
id: "agent-1",
|
|
@@ -485,7 +584,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
485
584
|
});
|
|
486
585
|
|
|
487
586
|
const agents: AgentRegistry = new Map<string, Agent>([["agent-1", agent]]);
|
|
488
|
-
const models: ModelRegistry = new Map<string,
|
|
587
|
+
const models: ModelRegistry = new Map<string, LanguageModel>([
|
|
489
588
|
["provider/model", model],
|
|
490
589
|
]) as unknown as ModelRegistry;
|
|
491
590
|
|
|
@@ -533,7 +632,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
533
632
|
spec: "1.0" as const,
|
|
534
633
|
provider: "test",
|
|
535
634
|
modelId: "test-model",
|
|
536
|
-
} as unknown as
|
|
635
|
+
} as unknown as LanguageModel;
|
|
537
636
|
|
|
538
637
|
const agent = new Agent({
|
|
539
638
|
id: "agent-1",
|
|
@@ -543,7 +642,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
543
642
|
});
|
|
544
643
|
|
|
545
644
|
const agents: AgentRegistry = new Map<string, Agent>([["agent-1", agent]]);
|
|
546
|
-
const models: ModelRegistry = new Map<string,
|
|
645
|
+
const models: ModelRegistry = new Map<string, LanguageModel>([
|
|
547
646
|
["provider/model", model],
|
|
548
647
|
]) as unknown as ModelRegistry;
|
|
549
648
|
|
|
@@ -595,7 +694,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
595
694
|
spec: "1.0" as const,
|
|
596
695
|
provider: "test",
|
|
597
696
|
modelId: "test-model",
|
|
598
|
-
} as unknown as
|
|
697
|
+
} as unknown as LanguageModel;
|
|
599
698
|
|
|
600
699
|
const agent1 = new Agent({
|
|
601
700
|
id: "agent-1",
|
|
@@ -615,7 +714,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
615
714
|
["agent-1", agent1],
|
|
616
715
|
["agent-2", agent2],
|
|
617
716
|
]);
|
|
618
|
-
const models: ModelRegistry = new Map<string,
|
|
717
|
+
const models: ModelRegistry = new Map<string, LanguageModel>([
|
|
619
718
|
["provider/model", model],
|
|
620
719
|
]) as unknown as ModelRegistry;
|
|
621
720
|
|
|
@@ -719,7 +818,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
719
818
|
spec: "1.0" as const,
|
|
720
819
|
provider: "test",
|
|
721
820
|
modelId: "test-model",
|
|
722
|
-
} as unknown as
|
|
821
|
+
} as unknown as LanguageModel;
|
|
723
822
|
|
|
724
823
|
const agent = new Agent({
|
|
725
824
|
id: "agent-1",
|
|
@@ -729,7 +828,7 @@ describe.sequential("PGStorage integration", () => {
|
|
|
729
828
|
});
|
|
730
829
|
|
|
731
830
|
const agents: AgentRegistry = new Map<string, Agent>([["agent-1", agent]]);
|
|
732
|
-
const models: ModelRegistry = new Map<string,
|
|
831
|
+
const models: ModelRegistry = new Map<string, LanguageModel>([
|
|
733
832
|
["provider/model", model],
|
|
734
833
|
]) as unknown as ModelRegistry;
|
|
735
834
|
|
|
@@ -0,0 +1,355 @@
|
|
|
1
|
+
import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest";
|
|
2
|
+
import { Pool } from "pg";
|
|
3
|
+
import { Kernl, Agent } from "kernl";
|
|
4
|
+
import type { LanguageModel } from "@kernl-sdk/protocol";
|
|
5
|
+
import "@kernl-sdk/ai/openai"; // Register OpenAI embedding provider
|
|
6
|
+
|
|
7
|
+
import { postgres, pgvector } from "../index";
|
|
8
|
+
|
|
9
|
+
const TEST_DB_URL = process.env.KERNL_PG_TEST_URL;
|
|
10
|
+
|
|
11
|
+
describe.sequential(
|
|
12
|
+
"Memory Integration with PGVector",
|
|
13
|
+
{ timeout: 30000 },
|
|
14
|
+
() => {
|
|
15
|
+
if (!TEST_DB_URL) {
|
|
16
|
+
it.skip("requires KERNL_PG_TEST_URL environment variable", () => {});
|
|
17
|
+
return;
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
if (!process.env.OPENAI_API_KEY) {
|
|
21
|
+
it.skip("requires OPENAI_API_KEY environment variable", () => {});
|
|
22
|
+
return;
|
|
23
|
+
}
|
|
24
|
+
|
|
25
|
+
let pool: Pool;
|
|
26
|
+
let kernl: Kernl;
|
|
27
|
+
|
|
28
|
+
beforeAll(async () => {
|
|
29
|
+
pool = new Pool({ connectionString: TEST_DB_URL });
|
|
30
|
+
|
|
31
|
+
// Clean slate
|
|
32
|
+
await pool.query('DROP SCHEMA IF EXISTS "kernl" CASCADE');
|
|
33
|
+
|
|
34
|
+
// Create Kernl with PG + pgvector
|
|
35
|
+
kernl = new Kernl({
|
|
36
|
+
storage: {
|
|
37
|
+
db: postgres({ pool }),
|
|
38
|
+
vector: pgvector({ pool }),
|
|
39
|
+
},
|
|
40
|
+
memory: {
|
|
41
|
+
embeddingModel: "openai/text-embedding-3-small",
|
|
42
|
+
dimensions: 1536,
|
|
43
|
+
},
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
// Register a dummy agent for test scope
|
|
47
|
+
const model = {
|
|
48
|
+
spec: "1.0" as const,
|
|
49
|
+
provider: "test",
|
|
50
|
+
modelId: "test-model",
|
|
51
|
+
} as unknown as LanguageModel;
|
|
52
|
+
|
|
53
|
+
const agent = new Agent({
|
|
54
|
+
id: "test-agent",
|
|
55
|
+
name: "Test Agent",
|
|
56
|
+
instructions: () => "test instructions",
|
|
57
|
+
model,
|
|
58
|
+
});
|
|
59
|
+
|
|
60
|
+
kernl.register(agent);
|
|
61
|
+
|
|
62
|
+
// Initialize storage (creates "kernl" schema and tables)
|
|
63
|
+
await kernl.storage.init();
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
afterAll(async () => {
|
|
67
|
+
if (kernl) {
|
|
68
|
+
await kernl.storage.close();
|
|
69
|
+
}
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
beforeEach(async () => {
|
|
73
|
+
// Clean memories between tests
|
|
74
|
+
await pool.query('DELETE FROM "kernl"."memories"');
|
|
75
|
+
|
|
76
|
+
// Vector index table may not exist yet (created lazily on first memory operation)
|
|
77
|
+
try {
|
|
78
|
+
await pool.query('DELETE FROM "kernl"."memories_sindex"');
|
|
79
|
+
} catch (err: any) {
|
|
80
|
+
if (!err.message?.includes("does not exist")) {
|
|
81
|
+
throw err;
|
|
82
|
+
}
|
|
83
|
+
}
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
it("creates memory and indexes it in pgvector on first operation", async () => {
|
|
87
|
+
const memory = await kernl.memories.create({
|
|
88
|
+
id: "m1",
|
|
89
|
+
scope: { namespace: "test", agentId: "test-agent" },
|
|
90
|
+
kind: "semantic",
|
|
91
|
+
collection: "facts",
|
|
92
|
+
content: { text: "The user loves TypeScript programming" },
|
|
93
|
+
});
|
|
94
|
+
|
|
95
|
+
expect(memory.id).toBe("m1");
|
|
96
|
+
expect(memory.content.text).toBe("The user loves TypeScript programming");
|
|
97
|
+
|
|
98
|
+
// Verify memory exists in DB
|
|
99
|
+
const dbResult = await pool.query(
|
|
100
|
+
'SELECT * FROM "kernl"."memories" WHERE id = $1',
|
|
101
|
+
["m1"],
|
|
102
|
+
);
|
|
103
|
+
expect(dbResult.rows).toHaveLength(1);
|
|
104
|
+
|
|
105
|
+
// Verify memory was indexed in pgvector
|
|
106
|
+
const vectorResult = await pool.query(
|
|
107
|
+
'SELECT * FROM "kernl"."memories_sindex" WHERE id = $1',
|
|
108
|
+
["m1"],
|
|
109
|
+
);
|
|
110
|
+
expect(vectorResult.rows).toHaveLength(1);
|
|
111
|
+
expect(vectorResult.rows[0].text).toBe(
|
|
112
|
+
"The user loves TypeScript programming",
|
|
113
|
+
);
|
|
114
|
+
expect(vectorResult.rows[0].tvec).toBeTruthy(); // vector embedding exists
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
it("searches memories using vector similarity", async () => {
|
|
118
|
+
// Create several memories
|
|
119
|
+
await kernl.memories.create({
|
|
120
|
+
id: "m1",
|
|
121
|
+
scope: { namespace: "test" },
|
|
122
|
+
kind: "semantic",
|
|
123
|
+
collection: "facts",
|
|
124
|
+
content: { text: "The user loves TypeScript programming" },
|
|
125
|
+
});
|
|
126
|
+
|
|
127
|
+
await kernl.memories.create({
|
|
128
|
+
id: "m2",
|
|
129
|
+
scope: { namespace: "test" },
|
|
130
|
+
kind: "semantic",
|
|
131
|
+
collection: "facts",
|
|
132
|
+
content: { text: "The user enjoys cooking Italian food" },
|
|
133
|
+
});
|
|
134
|
+
|
|
135
|
+
await kernl.memories.create({
|
|
136
|
+
id: "m3",
|
|
137
|
+
scope: { namespace: "test" },
|
|
138
|
+
kind: "semantic",
|
|
139
|
+
collection: "facts",
|
|
140
|
+
content: { text: "TypeScript has excellent type safety" },
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
// Search for TypeScript-related memories
|
|
144
|
+
const results = await kernl.memories.search({
|
|
145
|
+
query: "programming languages",
|
|
146
|
+
limit: 10,
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
expect(results.length).toBeGreaterThan(0);
|
|
150
|
+
|
|
151
|
+
// Should find TypeScript-related memories with higher scores
|
|
152
|
+
const ids = results.map((r) => r.document?.id);
|
|
153
|
+
expect(ids).toContain("m1"); // Direct match
|
|
154
|
+
expect(ids).toContain("m3"); // Related to TypeScript
|
|
155
|
+
});
|
|
156
|
+
|
|
157
|
+
it("returns no results when filters exclude all matches", async () => {
|
|
158
|
+
await kernl.memories.create({
|
|
159
|
+
id: "m1",
|
|
160
|
+
scope: { namespace: "ns1", agentId: "test-agent" },
|
|
161
|
+
kind: "semantic",
|
|
162
|
+
collection: "facts",
|
|
163
|
+
content: { text: "User likes hiking" },
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
// Filter for a different namespace that has no memories
|
|
167
|
+
const results = await kernl.memories.search({
|
|
168
|
+
query: "hiking",
|
|
169
|
+
filter: { scope: { namespace: "ns2" } },
|
|
170
|
+
limit: 10,
|
|
171
|
+
});
|
|
172
|
+
|
|
173
|
+
expect(results.length).toBe(0);
|
|
174
|
+
});
|
|
175
|
+
|
|
176
|
+
it("filters search results by scope", async () => {
|
|
177
|
+
await kernl.memories.create({
|
|
178
|
+
id: "m1",
|
|
179
|
+
scope: { namespace: "user1", agentId: "test-agent" },
|
|
180
|
+
kind: "semantic",
|
|
181
|
+
collection: "facts",
|
|
182
|
+
content: { text: "User 1 likes cats" },
|
|
183
|
+
});
|
|
184
|
+
|
|
185
|
+
await kernl.memories.create({
|
|
186
|
+
id: "m2",
|
|
187
|
+
scope: { namespace: "user2", agentId: "test-agent" },
|
|
188
|
+
kind: "semantic",
|
|
189
|
+
collection: "facts",
|
|
190
|
+
content: { text: "User 2 likes cats" },
|
|
191
|
+
});
|
|
192
|
+
|
|
193
|
+
// Search only in user1 namespace
|
|
194
|
+
const results = await kernl.memories.search({
|
|
195
|
+
query: "cats",
|
|
196
|
+
filter: { scope: { namespace: "user1" } },
|
|
197
|
+
limit: 10,
|
|
198
|
+
});
|
|
199
|
+
|
|
200
|
+
expect(results.length).toBe(1);
|
|
201
|
+
expect(results[0].document?.id).toBe("m1");
|
|
202
|
+
});
|
|
203
|
+
|
|
204
|
+
it("respects topK limit", async () => {
|
|
205
|
+
await kernl.memories.create({
|
|
206
|
+
id: "m1",
|
|
207
|
+
scope: { namespace: "test" },
|
|
208
|
+
kind: "semantic",
|
|
209
|
+
collection: "facts",
|
|
210
|
+
content: { text: "The user likes TypeScript" },
|
|
211
|
+
});
|
|
212
|
+
|
|
213
|
+
await kernl.memories.create({
|
|
214
|
+
id: "m2",
|
|
215
|
+
scope: { namespace: "test" },
|
|
216
|
+
kind: "semantic",
|
|
217
|
+
collection: "facts",
|
|
218
|
+
content: { text: "The user likes JavaScript" },
|
|
219
|
+
});
|
|
220
|
+
|
|
221
|
+
await kernl.memories.create({
|
|
222
|
+
id: "m3",
|
|
223
|
+
scope: { namespace: "test" },
|
|
224
|
+
kind: "semantic",
|
|
225
|
+
collection: "facts",
|
|
226
|
+
content: { text: "The user likes Rust" },
|
|
227
|
+
});
|
|
228
|
+
|
|
229
|
+
const results = await kernl.memories.search({
|
|
230
|
+
query: "programming languages",
|
|
231
|
+
limit: 1,
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
expect(results.length).toBe(1);
|
|
235
|
+
});
|
|
236
|
+
|
|
237
|
+
it("handles index creation idempotently across Kernl instances", async () => {
|
|
238
|
+
// Create memory with first Kernl instance
|
|
239
|
+
await kernl.memories.create({
|
|
240
|
+
id: "m1",
|
|
241
|
+
scope: { namespace: "test" },
|
|
242
|
+
kind: "semantic",
|
|
243
|
+
collection: "facts",
|
|
244
|
+
content: { text: "First instance memory" },
|
|
245
|
+
});
|
|
246
|
+
|
|
247
|
+
// Close first instance (also closes the pool)
|
|
248
|
+
await kernl.storage.close();
|
|
249
|
+
|
|
250
|
+
// Create new pool and Kernl instance - reassign both so afterAll and beforeEach work
|
|
251
|
+
pool = new Pool({ connectionString: TEST_DB_URL });
|
|
252
|
+
kernl = new Kernl({
|
|
253
|
+
storage: {
|
|
254
|
+
db: postgres({ pool }),
|
|
255
|
+
vector: pgvector({ pool }),
|
|
256
|
+
},
|
|
257
|
+
memory: {
|
|
258
|
+
embeddingModel: "openai/text-embedding-3-small",
|
|
259
|
+
dimensions: 1536,
|
|
260
|
+
},
|
|
261
|
+
});
|
|
262
|
+
|
|
263
|
+
// Should be able to search without errors (index already exists)
|
|
264
|
+
const results = await kernl.memories.search({
|
|
265
|
+
query: "memory",
|
|
266
|
+
limit: 10,
|
|
267
|
+
});
|
|
268
|
+
|
|
269
|
+
expect(results.length).toBeGreaterThan(0);
|
|
270
|
+
expect(results[0].document?.id).toBe("m1");
|
|
271
|
+
});
|
|
272
|
+
|
|
273
|
+
it("updates memory content and re-indexes", async () => {
|
|
274
|
+
await kernl.memories.create({
|
|
275
|
+
id: "m1",
|
|
276
|
+
scope: { namespace: "test" },
|
|
277
|
+
kind: "semantic",
|
|
278
|
+
collection: "facts",
|
|
279
|
+
content: { text: "Original content about dogs" },
|
|
280
|
+
});
|
|
281
|
+
|
|
282
|
+
// Update content
|
|
283
|
+
await kernl.memories.update({
|
|
284
|
+
id: "m1",
|
|
285
|
+
content: { text: "Updated content about cats" },
|
|
286
|
+
});
|
|
287
|
+
|
|
288
|
+
// Search should find updated content
|
|
289
|
+
const results = await kernl.memories.search({
|
|
290
|
+
query: "cats",
|
|
291
|
+
limit: 10,
|
|
292
|
+
});
|
|
293
|
+
|
|
294
|
+
expect(results.length).toBeGreaterThan(0);
|
|
295
|
+
const match = results.find((r) => r.document?.id === "m1");
|
|
296
|
+
expect(match).toBeDefined();
|
|
297
|
+
expect(match?.document?.text).toBe("Updated content about cats");
|
|
298
|
+
});
|
|
299
|
+
|
|
300
|
+
it("patches memory metadata without re-indexing", async () => {
|
|
301
|
+
await kernl.memories.create({
|
|
302
|
+
id: "m1",
|
|
303
|
+
scope: { namespace: "test" },
|
|
304
|
+
kind: "semantic",
|
|
305
|
+
collection: "facts",
|
|
306
|
+
content: { text: "Cats are great pets" },
|
|
307
|
+
metadata: { version: 1 },
|
|
308
|
+
});
|
|
309
|
+
|
|
310
|
+
// Update only metadata (should patch, not full re-index)
|
|
311
|
+
await kernl.memories.update({
|
|
312
|
+
id: "m1",
|
|
313
|
+
metadata: { version: 2, updated: true },
|
|
314
|
+
});
|
|
315
|
+
|
|
316
|
+
// Verify metadata updated in vector index
|
|
317
|
+
const vectorResult = await pool.query(
|
|
318
|
+
'SELECT metadata FROM "kernl"."memories_sindex" WHERE id = $1',
|
|
319
|
+
["m1"],
|
|
320
|
+
);
|
|
321
|
+
|
|
322
|
+
expect(vectorResult.rows[0].metadata).toEqual({
|
|
323
|
+
version: 2,
|
|
324
|
+
updated: true,
|
|
325
|
+
});
|
|
326
|
+
});
|
|
327
|
+
|
|
328
|
+
it("creates memories with multimodal content", async () => {
|
|
329
|
+
await kernl.memories.create({
|
|
330
|
+
id: "m1",
|
|
331
|
+
scope: { namespace: "test" },
|
|
332
|
+
kind: "semantic",
|
|
333
|
+
collection: "media",
|
|
334
|
+
content: {
|
|
335
|
+
text: "A beautiful sunset",
|
|
336
|
+
image: {
|
|
337
|
+
data: "base64encodedimage",
|
|
338
|
+
mime: "image/png",
|
|
339
|
+
alt: "Sunset over the ocean",
|
|
340
|
+
},
|
|
341
|
+
},
|
|
342
|
+
});
|
|
343
|
+
|
|
344
|
+
// Should be searchable by text
|
|
345
|
+
const results = await kernl.memories.search({
|
|
346
|
+
query: "sunset",
|
|
347
|
+
limit: 10,
|
|
348
|
+
});
|
|
349
|
+
|
|
350
|
+
expect(results.length).toBeGreaterThan(0);
|
|
351
|
+
const match = results.find((r) => r.document?.id === "m1");
|
|
352
|
+
expect(match).toBeDefined();
|
|
353
|
+
});
|
|
354
|
+
},
|
|
355
|
+
);
|