@kernl-sdk/pg 0.1.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,285 @@
1
+ import { describe, it, expect, beforeAll, afterAll, beforeEach } from "vitest";
2
+ import { Pool } from "pg";
3
+
4
+ import {
5
+ Agent,
6
+ Kernl,
7
+ tool,
8
+ FunctionToolkit,
9
+ KernlStorage,
10
+ } from "kernl";
11
+ import { STOPPED, message, IN_PROGRESS } from "@kernl-sdk/protocol";
12
+
13
+ import { postgres } from "@/postgres";
14
+
15
+ // Use helper from test fixtures (handles streaming properly)
16
+ function createMockModel(generateFn: any): any {
17
+ return {
18
+ spec: "1.0" as const,
19
+ provider: "test",
20
+ modelId: "test-model",
21
+ generate: generateFn,
22
+ stream: async function* (req: any) {
23
+ const response = await generateFn(req);
24
+ for (const item of response.content) {
25
+ yield item;
26
+ }
27
+ yield {
28
+ kind: "finish" as const,
29
+ finishReason: response.finishReason,
30
+ usage: response.usage,
31
+ };
32
+ },
33
+ };
34
+ }
35
+
36
+ const TEST_DB_URL = process.env.KERNL_PG_TEST_URL;
37
+
38
+ describe.sequential("PG Thread Lifecycle", () => {
39
+ if (!TEST_DB_URL) {
40
+ it.skip("requires KERNL_PG_TEST_URL environment variable", () => {});
41
+ return;
42
+ }
43
+
44
+ let pool: Pool;
45
+ let storage: KernlStorage;
46
+ let kernl: Kernl;
47
+
48
+ beforeAll(async () => {
49
+ pool = new Pool({ connectionString: TEST_DB_URL });
50
+ storage = postgres({ pool });
51
+
52
+ await pool.query('DROP SCHEMA IF EXISTS "kernl" CASCADE');
53
+ await storage.init();
54
+
55
+ kernl = new Kernl({ storage: { db: storage } });
56
+ });
57
+
58
+ afterAll(async () => {
59
+ await storage.close();
60
+ });
61
+
62
+ beforeEach(async () => {
63
+ // Clean threads between tests
64
+ await pool.query(
65
+ 'TRUNCATE "kernl"."threads", "kernl"."thread_events" CASCADE',
66
+ );
67
+ });
68
+
69
+ describe("Simple thread (no tools)", () => {
70
+ it("should persist thread record and events correctly", async () => {
71
+ const model = createMockModel(async () => ({
72
+ content: [message({ role: "assistant", text: "Hello, world!" })],
73
+ finishReason: "stop",
74
+ usage: { inputTokens: 5, outputTokens: 3, totalTokens: 8 },
75
+ warnings: [],
76
+ }));
77
+
78
+ const agent = new Agent({
79
+ id: "simple-agent",
80
+ name: "Simple",
81
+ instructions: "Test",
82
+ model,
83
+ });
84
+
85
+ kernl.register(agent);
86
+
87
+ const result = await agent.run("Hello");
88
+
89
+ expect(result.response).toBe("Hello, world!");
90
+
91
+ // Verify thread record
92
+ const threadResult = await pool.query(
93
+ 'SELECT * FROM "kernl"."threads" WHERE agent_id = $1',
94
+ ["simple-agent"],
95
+ );
96
+
97
+ expect(threadResult.rows).toHaveLength(1);
98
+ const thread = threadResult.rows[0];
99
+
100
+ expect(thread.agent_id).toBe("simple-agent");
101
+ expect(thread.state).toBe(STOPPED);
102
+ expect(thread.tick).toBe(1);
103
+ expect(thread.model).toMatch(/test/);
104
+
105
+ // Verify events
106
+ const eventsResult = await pool.query(
107
+ 'SELECT * FROM "kernl"."thread_events" WHERE tid = $1 ORDER BY seq ASC',
108
+ [thread.id],
109
+ );
110
+
111
+ expect(eventsResult.rows.length).toBeGreaterThanOrEqual(2);
112
+
113
+ // User message
114
+ expect(eventsResult.rows[0].kind).toBe("message");
115
+ expect(eventsResult.rows[0].seq).toBe(0);
116
+
117
+ // Assistant message
118
+ expect(eventsResult.rows[1].kind).toBe("message");
119
+ expect(eventsResult.rows[1].seq).toBe(1);
120
+
121
+ // Verify monotonic seq
122
+ const seqs = eventsResult.rows.map((r) => r.seq);
123
+ expect(seqs).toEqual([...seqs].sort((a, b) => a - b));
124
+ expect(new Set(seqs).size).toBe(seqs.length);
125
+ });
126
+ });
127
+
128
+ describe("Multi-turn with tools", () => {
129
+ it("should persist all events across ticks with correct seq", async () => {
130
+ let callCount = 0;
131
+ const model = createMockModel(async () => {
132
+ callCount++;
133
+ if (callCount === 1) {
134
+ return {
135
+ content: [
136
+ message({ role: "assistant", text: "" }),
137
+ {
138
+ kind: "tool-call",
139
+ toolId: "add",
140
+ callId: "call_1",
141
+ state: IN_PROGRESS,
142
+ arguments: '{"a":2,"b":3}',
143
+ },
144
+ ],
145
+ finishReason: "stop",
146
+ usage: { inputTokens: 10, outputTokens: 5, totalTokens: 15 },
147
+ warnings: [],
148
+ };
149
+ }
150
+ return {
151
+ content: [message({ role: "assistant", text: "The sum is 5" })],
152
+ finishReason: "stop",
153
+ usage: { inputTokens: 15, outputTokens: 5, totalTokens: 20 },
154
+ warnings: [],
155
+ };
156
+ });
157
+
158
+ const addTool = tool({
159
+ id: "add",
160
+ description: "Add numbers",
161
+ parameters: undefined, // No validation for test
162
+ execute: async (ctx, params: any) => {
163
+ const { a, b } = params;
164
+ return a + b;
165
+ },
166
+ });
167
+
168
+ const agent = new Agent({
169
+ id: "tool-agent",
170
+ name: "Tool Agent",
171
+ instructions: "Test",
172
+ model,
173
+ toolkits: [new FunctionToolkit({ id: "math", tools: [addTool] })],
174
+ });
175
+
176
+ kernl.register(agent);
177
+
178
+ await agent.run("Add 2 and 3");
179
+
180
+ // verify thread
181
+ const threadResult = await pool.query(
182
+ 'SELECT * FROM "kernl"."threads" WHERE agent_id = $1',
183
+ ["tool-agent"],
184
+ );
185
+
186
+ expect(threadResult.rows).toHaveLength(1);
187
+ expect(threadResult.rows[0].tick).toBe(2); // 2 ticks
188
+ expect(threadResult.rows[0].state).toBe(STOPPED);
189
+
190
+ // verify events
191
+ const eventsResult = await pool.query(
192
+ 'SELECT * FROM "kernl"."thread_events" WHERE tid = $1 ORDER BY seq ASC',
193
+ [threadResult.rows[0].id],
194
+ );
195
+
196
+ const events = eventsResult.rows;
197
+
198
+ // Should have: user msg, assistant msg (tick1), tool-call, tool-result, assistant msg (tick2)
199
+ expect(events.length).toBeGreaterThanOrEqual(5);
200
+
201
+ // Verify event kinds and order
202
+ expect(events[0].kind).toBe("message"); // user
203
+ expect(events[1].kind).toBe("message"); // assistant (tick 1)
204
+ expect(events[2].kind).toBe("tool-call");
205
+ expect(events[3].kind).toBe("tool-result");
206
+ expect(events[4].kind).toBe("message"); // assistant (tick 2)
207
+
208
+ // Verify tool result exists and is in correct format
209
+ const toolResult = events[3];
210
+ expect(toolResult.data.state).toBe("completed");
211
+ expect(toolResult.data.callId).toBe("call_1");
212
+ expect(toolResult.data.toolId).toBe("add");
213
+
214
+ // Verify seq is monotonic
215
+ const seqs = events.map((e) => e.seq);
216
+ expect(seqs).toEqual([0, 1, 2, 3, 4]);
217
+ });
218
+ });
219
+
220
+ describe("Resume from storage", () => {
221
+ it("should load thread from storage and append new events", async () => {
222
+ const model = createMockModel(async () => ({
223
+ content: [message({ role: "assistant", text: "Response" })],
224
+ finishReason: "stop",
225
+ usage: { inputTokens: 5, outputTokens: 3, totalTokens: 8 },
226
+ warnings: [],
227
+ }));
228
+
229
+ const agent = new Agent({
230
+ id: "resume-agent",
231
+ name: "Resume",
232
+ instructions: "Test",
233
+ model,
234
+ });
235
+
236
+ kernl.register(agent);
237
+
238
+ const threadId = "resume-test-123";
239
+
240
+ // First run
241
+ await agent.run("First message", { threadId });
242
+
243
+ const firstEventsResult = await pool.query(
244
+ 'SELECT COUNT(*) as count FROM "kernl"."thread_events" WHERE tid = $1',
245
+ [threadId],
246
+ );
247
+
248
+ const firstEventCount = parseInt(firstEventsResult.rows[0].count);
249
+ expect(firstEventCount).toBeGreaterThanOrEqual(2);
250
+
251
+ // Second run: resume
252
+ await agent.run("Second message", { threadId });
253
+
254
+ const secondEventsResult = await pool.query(
255
+ 'SELECT * FROM "kernl"."thread_events" WHERE tid = $1 ORDER BY seq ASC',
256
+ [threadId],
257
+ );
258
+
259
+ const secondEventCount = secondEventsResult.rows.length;
260
+
261
+ // Should have more events now
262
+ expect(secondEventCount).toBeGreaterThan(firstEventCount);
263
+
264
+ // Verify seq is still monotonic across both runs
265
+ const seqs = secondEventsResult.rows.map((r) => r.seq);
266
+ expect(seqs).toEqual([...seqs].sort((a, b) => a - b));
267
+ expect(new Set(seqs).size).toBe(seqs.length);
268
+
269
+ // Verify last events are from second run
270
+ const lastEvents = secondEventsResult.rows.slice(-2);
271
+ expect(lastEvents[0].kind).toBe("message"); // user: "Second message"
272
+ expect(lastEvents[1].kind).toBe("message"); // assistant response
273
+ });
274
+ });
275
+
276
+ describe("Error handling", () => {
277
+ it.skip("should rollback on persist failure", async () => {
278
+ // TODO: Implement once we have transaction-level control in storage
279
+ // This would require forcing a DB error mid-transaction
280
+ // Expected behavior:
281
+ // - If append() fails, no partial events should be written
282
+ // - Thread state should remain consistent
283
+ });
284
+ });
285
+ });
package/src/index.ts ADDED
@@ -0,0 +1,7 @@
1
+ /**
2
+ * @kernl/pg - PostgreSQL storage adapter for Kernl
3
+ */
4
+
5
+ export { PGStorage, type PGStorageConfig } from "./storage";
6
+ export { postgres, type PostgresConfig } from "./postgres";
7
+ export { migrations, REQUIRED_SCHEMA_VERSION } from "./migrations";
@@ -0,0 +1,38 @@
1
+ /**
2
+ * Database migrations.
3
+ */
4
+
5
+ import type { PoolClient } from "pg";
6
+ import type { Table, Column } from "@kernl-sdk/storage";
7
+ import { TABLE_THREADS, TABLE_THREAD_EVENTS, SCHEMA_NAME } from "@kernl-sdk/storage";
8
+
9
+ /**
10
+ * Migration context with helpers.
11
+ */
12
+ export interface MigrationContext {
13
+ client: PoolClient;
14
+ createTable: (table: Table<string, Record<string, Column>>) => Promise<void>;
15
+ }
16
+
17
+ export interface Migration {
18
+ id: string;
19
+ up: (ctx: MigrationContext) => Promise<void>;
20
+ }
21
+
22
+ /**
23
+ * List of all migrations in order.
24
+ */
25
+ export const migrations: Migration[] = [
26
+ {
27
+ id: "0001_initial",
28
+ async up(ctx) {
29
+ await ctx.createTable(TABLE_THREADS);
30
+ await ctx.createTable(TABLE_THREAD_EVENTS);
31
+ },
32
+ },
33
+ ];
34
+
35
+ /**
36
+ * Minimum schema version required by this version of @kernl/pg.
37
+ */
38
+ export const REQUIRED_SCHEMA_VERSION = "0001_initial";
@@ -0,0 +1,63 @@
1
+ import { Pool } from "pg";
2
+ import type { KernlStorage } from "kernl";
3
+
4
+ import { PGStorage } from "./storage";
5
+
6
+ /**
7
+ * PostgreSQL connection configuration.
8
+ */
9
+ export type PostgresConfig =
10
+ | { pool: Pool }
11
+ | { connstr: string }
12
+ | {
13
+ host: string;
14
+ port: number;
15
+ database: string;
16
+ user: string;
17
+ password: string;
18
+ };
19
+
20
+ /**
21
+ * Create a PostgreSQL storage adapter for Kernl.
22
+ *
23
+ * @param config - Connection configuration (pool, connection string, or credentials)
24
+ * @returns KernlStorage instance backed by PostgreSQL
25
+ *
26
+ * @example
27
+ * ```ts
28
+ * // with connection string
29
+ * const storage = postgres({ connstr: "postgresql://localhost/mydb" });
30
+ *
31
+ * // with connection options
32
+ * const storage = postgres({
33
+ * host: "localhost",
34
+ * port: 5432,
35
+ * database: "mydb",
36
+ * user: "user",
37
+ * password: "password"
38
+ * });
39
+ *
40
+ * // existing pool
41
+ * const pool = new Pool({ ... });
42
+ * const storage = postgres({ pool });
43
+ * ```
44
+ */
45
+ export function postgres(config: PostgresConfig): KernlStorage {
46
+ let pool: Pool;
47
+
48
+ if ("pool" in config) {
49
+ pool = config.pool;
50
+ } else if ("connstr" in config) {
51
+ pool = new Pool({ connectionString: config.connstr });
52
+ } else {
53
+ pool = new Pool({
54
+ host: config.host,
55
+ port: config.port,
56
+ database: config.database,
57
+ user: config.user,
58
+ password: config.password,
59
+ });
60
+ }
61
+
62
+ return new PGStorage({ pool });
63
+ }
package/src/sql.ts ADDED
@@ -0,0 +1,8 @@
1
+ /**
2
+ * SQL utilities for safe query construction.
3
+ */
4
+
5
+ /**
6
+ * SQL identifier regex - alphanumeric + underscore, must start with letter/underscore.
7
+ */
8
+ export const SQL_IDENTIFIER_REGEX = /^[a-zA-Z_][a-zA-Z0-9_]*$/;
package/src/storage.ts ADDED
@@ -0,0 +1,270 @@
1
+ import assert from "assert";
2
+ import type { Pool, PoolClient } from "pg";
3
+
4
+ /* workspace */
5
+ import type { Table, Column, IndexConstraint } from "@kernl-sdk/storage";
6
+ import { SCHEMA_NAME, TABLE_MIGRATIONS } from "@kernl-sdk/storage";
7
+ import type {
8
+ AgentRegistry,
9
+ ModelRegistry,
10
+ KernlStorage,
11
+ Transaction,
12
+ } from "kernl";
13
+ import { UnimplementedError } from "@kernl-sdk/shared/lib";
14
+
15
+ /* pg */
16
+ import { PGThreadStore } from "./thread/store";
17
+ import { SQL_IDENTIFIER_REGEX } from "./sql";
18
+ import { migrations } from "./migrations";
19
+
20
+ /**
21
+ * PostgreSQL storage configuration.
22
+ */
23
+ export interface PGStorageConfig {
24
+ /**
25
+ * Pool instance for database connections.
26
+ */
27
+ pool: Pool;
28
+ }
29
+
30
+ /**
31
+ * PostgreSQL storage adapter.
32
+ */
33
+ export class PGStorage implements KernlStorage {
34
+ private pool: Pool;
35
+
36
+ threads: PGThreadStore;
37
+
38
+ constructor(config: PGStorageConfig) {
39
+ this.pool = config.pool;
40
+ this.threads = new PGThreadStore(this.pool);
41
+ }
42
+
43
+ /**
44
+ * Bind runtime registries to storage.
45
+ */
46
+ bind(registries: { agents: AgentRegistry; models: ModelRegistry }): void {
47
+ this.threads.bind(registries);
48
+ }
49
+
50
+ /**
51
+ * Execute a function within a transaction.
52
+ */
53
+ async transaction<T>(fn: (tx: Transaction) => Promise<T>): Promise<T> {
54
+ throw new UnimplementedError();
55
+ }
56
+
57
+ /**
58
+ * Initialize the storage backend.
59
+ */
60
+ async init(): Promise<void> {
61
+ await this.pool.query(`CREATE SCHEMA IF NOT EXISTS "${SCHEMA_NAME}"`);
62
+ await this.createTable(TABLE_MIGRATIONS);
63
+ await this.migrate();
64
+ }
65
+
66
+ /**
67
+ * Close the storage backend and cleanup resources.
68
+ */
69
+ async close(): Promise<void> {
70
+ await this.pool.end();
71
+ }
72
+
73
+ /**
74
+ * Run migrations to ensure all required tables exist.
75
+ */
76
+ async migrate(): Promise<void> {
77
+ const client = await this.pool.connect();
78
+ try {
79
+ await client.query("BEGIN");
80
+
81
+ // read applied migration IDs
82
+ const result = await client.query<{ id: string }>(
83
+ `SELECT id FROM "${SCHEMA_NAME}".migrations ORDER BY applied_at ASC`,
84
+ );
85
+ const applied = new Set(result.rows.map((row) => row.id));
86
+
87
+ // filter pending migrations
88
+ const pending = migrations.filter((m) => !applied.has(m.id));
89
+ if (pending.length === 0) {
90
+ await client.query("COMMIT");
91
+ return;
92
+ }
93
+
94
+ // run pending migrations + insert into migrations table
95
+ for (const migration of pending) {
96
+ await migration.up({
97
+ client,
98
+ createTable: async (table: Table<string, Record<string, Column>>) => {
99
+ await this._createTable(client, table);
100
+ },
101
+ });
102
+ await client.query(
103
+ `INSERT INTO "${SCHEMA_NAME}".migrations (id, applied_at) VALUES ($1, $2)`,
104
+ [migration.id, Date.now()],
105
+ );
106
+ }
107
+
108
+ await client.query("COMMIT");
109
+ } catch (error) {
110
+ await client.query("ROLLBACK");
111
+ throw error;
112
+ } finally {
113
+ client.release();
114
+ }
115
+ }
116
+
117
+ /**
118
+ * Create a table from its definition.
119
+ */
120
+ private async createTable(
121
+ table: Table<string, Record<string, Column>>,
122
+ ): Promise<void> {
123
+ assert(
124
+ SQL_IDENTIFIER_REGEX.test(table.name),
125
+ "system table should have a valid name",
126
+ );
127
+ await this._createTable(this.pool, table);
128
+ }
129
+
130
+ /**
131
+ * Create a table from its definition using a specific client.
132
+ */
133
+ private async _createTable(
134
+ client: Pool | PoolClient,
135
+ table: Table<string, Record<string, Column>>,
136
+ ): Promise<void> {
137
+ const columns: string[] = [];
138
+ const tableConstraints: string[] = [];
139
+
140
+ // build column definitions
141
+ for (const name in table.columns) {
142
+ const col = table.columns[name];
143
+
144
+ const constraints: string[] = [];
145
+ if (col._pk) constraints.push("PRIMARY KEY");
146
+ if (col._unique) constraints.push("UNIQUE");
147
+ if (!col._nullable && !col._pk) constraints.push("NOT NULL");
148
+ if (col._default !== undefined) {
149
+ constraints.push(`DEFAULT ${col.encode(col._default)}`);
150
+ }
151
+
152
+ // foreign key reference
153
+ if (col._fk) {
154
+ let ref = `REFERENCES "${SCHEMA_NAME}"."${col._fk.table}" ("${col._fk.column}")`;
155
+ if (col._onDelete) {
156
+ ref += ` ON DELETE ${col._onDelete}`;
157
+ }
158
+ constraints.push(ref);
159
+ }
160
+
161
+ columns.push(
162
+ `"${name}" ${col.type.toUpperCase()} ${constraints.join(" ")}`.trim(),
163
+ );
164
+ }
165
+
166
+ // table-level constraints
167
+ const indexes: IndexConstraint[] = [];
168
+ if (table.constraints) {
169
+ for (const constraint of table.constraints) {
170
+ switch (constraint.kind) {
171
+ case "unique": {
172
+ const name =
173
+ constraint.name ??
174
+ `${table.name}_${constraint.columns.join("_")}_unique`;
175
+ const cols = constraint.columns.map((c) => `"${c}"`).join(", ");
176
+ tableConstraints.push(`CONSTRAINT "${name}" UNIQUE (${cols})`);
177
+ break;
178
+ }
179
+
180
+ case "pkey": {
181
+ const name = constraint.name ?? `${table.name}_pkey`;
182
+ const cols = constraint.columns.map((c) => `"${c}"`).join(", ");
183
+ tableConstraints.push(`CONSTRAINT "${name}" PRIMARY KEY (${cols})`);
184
+ break;
185
+ }
186
+
187
+ case "fkey": {
188
+ throw new UnimplementedError(
189
+ "Composite foreign keys not yet supported. Use column-level .references() for single-column FKs.",
190
+ );
191
+ }
192
+
193
+ case "check": {
194
+ const name = constraint.name ?? `${table.name}_check`;
195
+ tableConstraints.push(
196
+ `CONSTRAINT "${name}" CHECK (${constraint.expression})`,
197
+ );
198
+ break;
199
+ }
200
+
201
+ case "index": {
202
+ // collect indexes to create after table
203
+ indexes.push(constraint);
204
+ break;
205
+ }
206
+ }
207
+ }
208
+ }
209
+
210
+ const constraints = [...columns, ...tableConstraints];
211
+
212
+ const sql = `
213
+ CREATE TABLE IF NOT EXISTS "${SCHEMA_NAME}"."${table.name}" (
214
+ ${constraints.join(",\n ")}
215
+ )
216
+ `.trim();
217
+
218
+ await client.query(sql);
219
+
220
+ // create indexes
221
+ for (const index of indexes) {
222
+ await this.createIndex(client, table.name, index);
223
+ }
224
+ }
225
+
226
+ /**
227
+ * Alter a table definition (not implemented).
228
+ */
229
+ private async alterTable(
230
+ table: Table<string, Record<string, Column>>,
231
+ ): Promise<void> {
232
+ throw new UnimplementedError();
233
+ }
234
+
235
+ /**
236
+ * Clear all rows from a table (not implemented).
237
+ */
238
+ private async clearTable(tableName: string): Promise<void> {
239
+ throw new UnimplementedError();
240
+ }
241
+
242
+ /**
243
+ * Drop a table (not implemented).
244
+ */
245
+ private async dropTable(tableName: string): Promise<void> {
246
+ throw new UnimplementedError();
247
+ }
248
+
249
+ /**
250
+ * Create an index from its definition.
251
+ */
252
+ private async createIndex(
253
+ client: Pool | PoolClient,
254
+ tableName: string,
255
+ index: IndexConstraint,
256
+ ): Promise<void> {
257
+ const uniqueKeyword = index.unique ? "UNIQUE" : "";
258
+ const columns = index.columns.map((c) => `"${c}"`).join(", ");
259
+
260
+ // Auto-generate index name: idx_{table}_{col1}_{col2}...
261
+ const indexName = `idx_${tableName}_${index.columns.join("_")}`;
262
+
263
+ const sql = `
264
+ CREATE ${uniqueKeyword} INDEX IF NOT EXISTS "${indexName}"
265
+ ON "${SCHEMA_NAME}"."${tableName}" (${columns})
266
+ `.trim();
267
+
268
+ await client.query(sql);
269
+ }
270
+ }