@kernl-sdk/libsql 0.1.38 → 0.1.39

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (137) hide show
  1. package/.turbo/turbo-build.log +5 -4
  2. package/CHANGELOG.md +8 -0
  3. package/README.md +225 -0
  4. package/dist/__tests__/constraints.test.d.ts +2 -0
  5. package/dist/__tests__/constraints.test.d.ts.map +1 -0
  6. package/dist/__tests__/constraints.test.js +97 -0
  7. package/dist/__tests__/helpers.d.ts +36 -0
  8. package/dist/__tests__/helpers.d.ts.map +1 -0
  9. package/dist/__tests__/helpers.js +80 -0
  10. package/dist/__tests__/memory.create-get.test.d.ts +2 -0
  11. package/dist/__tests__/memory.create-get.test.d.ts.map +1 -0
  12. package/dist/__tests__/memory.create-get.test.js +8 -0
  13. package/dist/__tests__/memory.delete.test.d.ts +2 -0
  14. package/dist/__tests__/memory.delete.test.d.ts.map +1 -0
  15. package/dist/__tests__/memory.delete.test.js +6 -0
  16. package/dist/__tests__/memory.list.test.d.ts +2 -0
  17. package/dist/__tests__/memory.list.test.d.ts.map +1 -0
  18. package/dist/__tests__/memory.list.test.js +8 -0
  19. package/dist/__tests__/memory.update.test.d.ts +2 -0
  20. package/dist/__tests__/memory.update.test.d.ts.map +1 -0
  21. package/dist/__tests__/memory.update.test.js +8 -0
  22. package/dist/__tests__/migrations.test.d.ts +2 -0
  23. package/dist/__tests__/migrations.test.d.ts.map +1 -0
  24. package/dist/__tests__/migrations.test.js +68 -0
  25. package/dist/__tests__/row-codecs.test.d.ts +2 -0
  26. package/dist/__tests__/row-codecs.test.d.ts.map +1 -0
  27. package/dist/__tests__/row-codecs.test.js +175 -0
  28. package/dist/__tests__/sql-utils.test.d.ts +2 -0
  29. package/dist/__tests__/sql-utils.test.d.ts.map +1 -0
  30. package/dist/__tests__/sql-utils.test.js +45 -0
  31. package/dist/__tests__/storage.init.test.d.ts +2 -0
  32. package/dist/__tests__/storage.init.test.d.ts.map +1 -0
  33. package/dist/__tests__/storage.init.test.js +63 -0
  34. package/dist/__tests__/thread.lifecycle.test.d.ts +2 -0
  35. package/dist/__tests__/thread.lifecycle.test.d.ts.map +1 -0
  36. package/dist/__tests__/thread.lifecycle.test.js +172 -0
  37. package/dist/__tests__/transaction.test.d.ts +2 -0
  38. package/dist/__tests__/transaction.test.d.ts.map +1 -0
  39. package/dist/__tests__/transaction.test.js +16 -0
  40. package/dist/__tests__/utils.test.d.ts +2 -0
  41. package/dist/__tests__/utils.test.d.ts.map +1 -0
  42. package/dist/__tests__/utils.test.js +31 -0
  43. package/dist/client.d.ts +46 -0
  44. package/dist/client.d.ts.map +1 -0
  45. package/dist/client.js +46 -0
  46. package/dist/index.d.ts +5 -0
  47. package/dist/index.d.ts.map +1 -1
  48. package/dist/index.js +5 -1
  49. package/dist/memory/__tests__/create-get.test.d.ts +2 -0
  50. package/dist/memory/__tests__/create-get.test.d.ts.map +1 -0
  51. package/dist/memory/__tests__/create-get.test.js +126 -0
  52. package/dist/memory/__tests__/delete.test.d.ts +2 -0
  53. package/dist/memory/__tests__/delete.test.d.ts.map +1 -0
  54. package/dist/memory/__tests__/delete.test.js +96 -0
  55. package/dist/memory/__tests__/list.test.d.ts +2 -0
  56. package/dist/memory/__tests__/list.test.d.ts.map +1 -0
  57. package/dist/memory/__tests__/list.test.js +168 -0
  58. package/dist/memory/__tests__/sql.test.d.ts +2 -0
  59. package/dist/memory/__tests__/sql.test.d.ts.map +1 -0
  60. package/dist/memory/__tests__/sql.test.js +159 -0
  61. package/dist/memory/__tests__/update.test.d.ts +2 -0
  62. package/dist/memory/__tests__/update.test.d.ts.map +1 -0
  63. package/dist/memory/__tests__/update.test.js +113 -0
  64. package/dist/memory/row.d.ts +11 -0
  65. package/dist/memory/row.d.ts.map +1 -0
  66. package/dist/memory/row.js +29 -0
  67. package/dist/memory/sql.d.ts +34 -0
  68. package/dist/memory/sql.d.ts.map +1 -0
  69. package/dist/memory/sql.js +109 -0
  70. package/dist/memory/store.d.ts +41 -0
  71. package/dist/memory/store.d.ts.map +1 -0
  72. package/dist/memory/store.js +132 -0
  73. package/dist/migrations.d.ts +32 -0
  74. package/dist/migrations.d.ts.map +1 -0
  75. package/dist/migrations.js +157 -0
  76. package/dist/sql.d.ts +28 -0
  77. package/dist/sql.d.ts.map +1 -0
  78. package/dist/sql.js +22 -0
  79. package/dist/storage.d.ts +75 -0
  80. package/dist/storage.d.ts.map +1 -0
  81. package/dist/storage.js +123 -0
  82. package/dist/thread/__tests__/append.test.d.ts +2 -0
  83. package/dist/thread/__tests__/append.test.d.ts.map +1 -0
  84. package/dist/thread/__tests__/append.test.js +141 -0
  85. package/dist/thread/__tests__/history.test.d.ts +2 -0
  86. package/dist/thread/__tests__/history.test.d.ts.map +1 -0
  87. package/dist/thread/__tests__/history.test.js +146 -0
  88. package/dist/thread/__tests__/sql.test.d.ts +2 -0
  89. package/dist/thread/__tests__/sql.test.d.ts.map +1 -0
  90. package/dist/thread/__tests__/sql.test.js +129 -0
  91. package/dist/thread/__tests__/store.test.d.ts +2 -0
  92. package/dist/thread/__tests__/store.test.d.ts.map +1 -0
  93. package/dist/thread/__tests__/store.test.js +170 -0
  94. package/dist/thread/row.d.ts +19 -0
  95. package/dist/thread/row.d.ts.map +1 -0
  96. package/dist/thread/row.js +65 -0
  97. package/dist/thread/sql.d.ts +33 -0
  98. package/dist/thread/sql.d.ts.map +1 -0
  99. package/dist/thread/sql.js +112 -0
  100. package/dist/thread/store.d.ts +67 -0
  101. package/dist/thread/store.d.ts.map +1 -0
  102. package/dist/thread/store.js +282 -0
  103. package/dist/utils.d.ts +10 -0
  104. package/dist/utils.d.ts.map +1 -0
  105. package/dist/utils.js +21 -0
  106. package/package.json +15 -11
  107. package/src/__tests__/constraints.test.ts +123 -0
  108. package/src/__tests__/helpers.ts +98 -0
  109. package/src/__tests__/migrations.test.ts +114 -0
  110. package/src/__tests__/row-codecs.test.ts +201 -0
  111. package/src/__tests__/sql-utils.test.ts +52 -0
  112. package/src/__tests__/storage.init.test.ts +92 -0
  113. package/src/__tests__/thread.lifecycle.test.ts +234 -0
  114. package/src/__tests__/transaction.test.ts +25 -0
  115. package/src/__tests__/utils.test.ts +38 -0
  116. package/src/client.ts +71 -0
  117. package/src/index.ts +10 -0
  118. package/src/memory/__tests__/create-get.test.ts +161 -0
  119. package/src/memory/__tests__/delete.test.ts +124 -0
  120. package/src/memory/__tests__/list.test.ts +198 -0
  121. package/src/memory/__tests__/sql.test.ts +186 -0
  122. package/src/memory/__tests__/update.test.ts +148 -0
  123. package/src/memory/row.ts +36 -0
  124. package/src/memory/sql.ts +142 -0
  125. package/src/memory/store.ts +173 -0
  126. package/src/migrations.ts +206 -0
  127. package/src/sql.ts +35 -0
  128. package/src/storage.ts +170 -0
  129. package/src/thread/__tests__/append.test.ts +201 -0
  130. package/src/thread/__tests__/history.test.ts +198 -0
  131. package/src/thread/__tests__/sql.test.ts +154 -0
  132. package/src/thread/__tests__/store.test.ts +219 -0
  133. package/src/thread/row.ts +77 -0
  134. package/src/thread/sql.ts +153 -0
  135. package/src/thread/store.ts +381 -0
  136. package/src/utils.ts +20 -0
  137. package/LICENSE +0 -201
@@ -0,0 +1,282 @@
1
+ /**
2
+ * LibSQL Thread store implementation.
3
+ */
4
+ import assert from "assert";
5
+ import { Context, } from "kernl";
6
+ import { Thread } from "kernl/internal";
7
+ import { KERNL_SCHEMA_NAME, NewThreadCodec, ThreadEventRecordCodec, } from "@kernl-sdk/storage";
8
+ import { SQL_WHERE, SQL_ORDER, SQL_UPDATE } from "./sql.js";
9
+ import { RowToThreadRecord, RowToEventRecord, RowToEventRecordDirect, } from "./row.js";
10
+ import { expandarray } from "../sql.js";
11
+ // SQLite doesn't support schemas, so we use table name prefix
12
+ const THREADS_TABLE = `${KERNL_SCHEMA_NAME}_threads`;
13
+ const THREAD_EVENTS_TABLE = `${KERNL_SCHEMA_NAME}_thread_events`;
14
+ /**
15
+ * LibSQL Thread store implementation.
16
+ *
17
+ * All async methods call `ensureInit()` before database operations
18
+ * to ensure schema/tables exist.
19
+ */
20
+ export class LibSQLThreadStore {
21
+ db;
22
+ registries;
23
+ ensureInit;
24
+ constructor(db, ensureInit) {
25
+ this.db = db;
26
+ this.ensureInit = ensureInit;
27
+ this.registries = null;
28
+ }
29
+ /**
30
+ * Bind runtime registries for hydrating Thread instances.
31
+ */
32
+ bind(registries) {
33
+ this.registries = registries;
34
+ }
35
+ /**
36
+ * Get a thread by id.
37
+ */
38
+ async get(tid, include) {
39
+ await this.ensureInit();
40
+ // JOIN with thread_events if include.history
41
+ if (include?.history) {
42
+ const opts = typeof include.history === "object" ? include.history : undefined;
43
+ const params = [tid];
44
+ let eventFilter = "";
45
+ if (opts?.after !== undefined) {
46
+ eventFilter += ` AND e.seq > ?`;
47
+ params.push(opts.after);
48
+ }
49
+ if (opts?.kinds && opts.kinds.length > 0) {
50
+ const { placeholders, params: kindParams } = expandarray(opts.kinds);
51
+ eventFilter += ` AND e.kind IN (${placeholders})`;
52
+ params.push(...kindParams);
53
+ }
54
+ const order = opts?.order ?? "asc";
55
+ const limit = opts?.limit ? ` LIMIT ${opts.limit}` : "";
56
+ const query = `
57
+ SELECT
58
+ t.*,
59
+ e.id as event_id,
60
+ e.tid as event_tid,
61
+ e.seq,
62
+ e.kind as event_kind,
63
+ e.timestamp,
64
+ e.data,
65
+ e.metadata as event_metadata
66
+ FROM ${THREADS_TABLE} t
67
+ LEFT JOIN ${THREAD_EVENTS_TABLE} e ON t.id = e.tid${eventFilter}
68
+ WHERE t.id = ?
69
+ ORDER BY e.seq ${order.toUpperCase()}
70
+ ${limit}
71
+ `;
72
+ // Move tid to end of params (WHERE t.id = ?)
73
+ const finalParams = [...params.slice(1), tid];
74
+ const result = await this.db.execute({ sql: query, args: finalParams });
75
+ if (result.rows.length === 0) {
76
+ return null;
77
+ }
78
+ // first row has thread data (all rows have same thread data)
79
+ const record = RowToThreadRecord.encode(result.rows[0]);
80
+ // collect events from all rows (skip rows where event_id is null)
81
+ const events = result.rows
82
+ .filter((row) => row.event_id !== null)
83
+ .map((row) => ThreadEventRecordCodec.decode(RowToEventRecord.encode(row)));
84
+ try {
85
+ return this.hydrate({ record, events });
86
+ }
87
+ catch {
88
+ return null;
89
+ }
90
+ }
91
+ // simple query without events
92
+ const result = await this.db.execute({
93
+ sql: `SELECT * FROM ${THREADS_TABLE} WHERE id = ?`,
94
+ args: [tid],
95
+ });
96
+ if (result.rows.length === 0) {
97
+ return null;
98
+ }
99
+ try {
100
+ return this.hydrate({ record: RowToThreadRecord.encode(result.rows[0]) });
101
+ }
102
+ catch {
103
+ return null;
104
+ }
105
+ }
106
+ /**
107
+ * List threads matching the filter.
108
+ */
109
+ async list(options) {
110
+ await this.ensureInit();
111
+ const { sql: where, params } = SQL_WHERE.encode({
112
+ filter: options?.filter,
113
+ });
114
+ let query = `SELECT * FROM ${THREADS_TABLE}`;
115
+ if (where)
116
+ query += ` WHERE ${where}`;
117
+ query += ` ORDER BY ${SQL_ORDER.encode({ order: options?.order })}`;
118
+ const args = [...params];
119
+ if (options?.limit) {
120
+ query += ` LIMIT ?`;
121
+ args.push(options.limit);
122
+ }
123
+ if (options?.offset) {
124
+ query += ` OFFSET ?`;
125
+ args.push(options.offset);
126
+ }
127
+ const result = await this.db.execute({ sql: query, args });
128
+ return result.rows
129
+ .map((row) => {
130
+ try {
131
+ return this.hydrate({ record: RowToThreadRecord.encode(row) });
132
+ }
133
+ catch {
134
+ // Skip threads with non-existent agent/model (graceful degradation)
135
+ return null;
136
+ }
137
+ })
138
+ .filter((thread) => thread !== null);
139
+ }
140
+ /**
141
+ * Insert a new thread into the store.
142
+ */
143
+ async insert(thread) {
144
+ await this.ensureInit();
145
+ const record = NewThreadCodec.encode(thread);
146
+ const result = await this.db.execute({
147
+ sql: `INSERT INTO ${THREADS_TABLE}
148
+ (id, namespace, agent_id, model, context, tick, state, parent_task_id, metadata, created_at, updated_at)
149
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
150
+ RETURNING *`,
151
+ args: [
152
+ record.id,
153
+ record.namespace,
154
+ record.agent_id,
155
+ record.model,
156
+ JSON.stringify(record.context),
157
+ record.tick,
158
+ record.state,
159
+ record.parent_task_id,
160
+ record.metadata ? JSON.stringify(record.metadata) : null,
161
+ record.created_at,
162
+ record.updated_at,
163
+ ],
164
+ });
165
+ return this.hydrate({ record: RowToThreadRecord.encode(result.rows[0]) });
166
+ }
167
+ /**
168
+ * Update thread runtime state.
169
+ */
170
+ async update(tid, patch) {
171
+ await this.ensureInit();
172
+ const { sql: updates, params } = SQL_UPDATE.encode({ patch });
173
+ const args = [...params, tid];
174
+ const result = await this.db.execute({
175
+ sql: `UPDATE ${THREADS_TABLE}
176
+ SET ${updates}
177
+ WHERE id = ?
178
+ RETURNING *`,
179
+ args,
180
+ });
181
+ return this.hydrate({ record: RowToThreadRecord.encode(result.rows[0]) });
182
+ }
183
+ /**
184
+ * Delete a thread and cascade to thread_events.
185
+ */
186
+ async delete(tid) {
187
+ await this.ensureInit();
188
+ await this.db.execute({
189
+ sql: `DELETE FROM ${THREADS_TABLE} WHERE id = ?`,
190
+ args: [tid],
191
+ });
192
+ }
193
+ /**
194
+ * Get the event history for a thread.
195
+ */
196
+ async history(tid, opts) {
197
+ await this.ensureInit();
198
+ let query = `SELECT * FROM ${THREAD_EVENTS_TABLE} WHERE tid = ?`;
199
+ const args = [tid];
200
+ // - filter:seq -
201
+ if (opts?.after !== undefined) {
202
+ query += ` AND seq > ?`;
203
+ args.push(opts.after);
204
+ }
205
+ // - filter:kind -
206
+ if (opts?.kinds && opts.kinds.length > 0) {
207
+ const { placeholders, params: kindParams } = expandarray(opts.kinds);
208
+ query += ` AND kind IN (${placeholders})`;
209
+ args.push(...kindParams);
210
+ }
211
+ // - order -
212
+ const order = opts?.order ?? "asc";
213
+ query += ` ORDER BY seq ${order.toUpperCase()}`;
214
+ // - limit -
215
+ if (opts?.limit !== undefined) {
216
+ query += ` LIMIT ?`;
217
+ args.push(opts.limit);
218
+ }
219
+ const result = await this.db.execute({ sql: query, args });
220
+ return result.rows.map((row) => ThreadEventRecordCodec.decode(RowToEventRecordDirect.encode(row)));
221
+ }
222
+ /**
223
+ * Append events to the thread history.
224
+ *
225
+ * Semantics:
226
+ * - Guaranteed per-thread ordering via monotonically increasing `seq`
227
+ * - Idempotent on `(tid, event.id)`: duplicate ids MUST NOT create duplicate rows
228
+ * - Events maintain insertion order
229
+ */
230
+ async append(events) {
231
+ if (events.length === 0)
232
+ return;
233
+ await this.ensureInit();
234
+ const records = events.map((e) => ThreadEventRecordCodec.encode(e));
235
+ const placeholders = [];
236
+ const values = [];
237
+ for (const record of records) {
238
+ placeholders.push(`(?, ?, ?, ?, ?, ?, ?)`);
239
+ values.push(record.id, record.tid, record.seq, record.kind, record.timestamp, record.data ? JSON.stringify(record.data) : null, record.metadata ? JSON.stringify(record.metadata) : null);
240
+ }
241
+ // insert with ON CONFLICT DO NOTHING for idempotency
242
+ await this.db.execute({
243
+ sql: `INSERT INTO ${THREAD_EVENTS_TABLE}
244
+ (id, tid, seq, kind, timestamp, data, metadata)
245
+ VALUES ${placeholders.join(", ")}
246
+ ON CONFLICT (tid, id) DO NOTHING`,
247
+ args: values,
248
+ });
249
+ }
250
+ /**
251
+ * Hydrate a Thread instance from a database record.
252
+ */
253
+ hydrate(thread) {
254
+ assert(this.registries, "registries should be bound to storage in Kernl constructor");
255
+ const { record, events = [] } = thread;
256
+ const agent = this.registries.agents.get(record.agent_id);
257
+ const model = this.registries.models.get(record.model);
258
+ if (!agent || !model) {
259
+ throw new Error(`Thread ${record.id} references non-existent agent/model (agent: ${record.agent_id}, model: ${record.model})`);
260
+ }
261
+ // safety: threads only exist for llm agents
262
+ if (agent.kind !== "llm") {
263
+ throw new Error(`Thread ${record.id} references non-llm agent ${record.agent_id} (kind: ${agent.kind})`);
264
+ }
265
+ return new Thread({
266
+ agent: agent,
267
+ history: events,
268
+ context: new Context(record.namespace, record.context),
269
+ model,
270
+ task: null, // TODO: load from TaskStore when it exists
271
+ tid: record.id,
272
+ namespace: record.namespace,
273
+ tick: record.tick,
274
+ state: record.state,
275
+ metadata: record.metadata,
276
+ createdAt: new Date(Number(record.created_at)),
277
+ updatedAt: new Date(Number(record.updated_at)),
278
+ storage: this, // pass storage reference so resumed thread can persist
279
+ persisted: true,
280
+ });
281
+ }
282
+ }
@@ -0,0 +1,10 @@
1
+ /**
2
+ * Shared utilities for LibSQL storage.
3
+ */
4
+ /**
5
+ * Parse a JSON string from SQLite into an object.
6
+ *
7
+ * SQLite stores JSON as TEXT, so we need to parse it back.
8
+ */
9
+ export declare function parsejson<T>(value: unknown): T | null;
10
+ //# sourceMappingURL=utils.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"utils.d.ts","sourceRoot":"","sources":["../src/utils.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH;;;;GAIG;AACH,wBAAgB,SAAS,CAAC,CAAC,EAAE,KAAK,EAAE,OAAO,GAAG,CAAC,GAAG,IAAI,CAUrD"}
package/dist/utils.js ADDED
@@ -0,0 +1,21 @@
1
+ /**
2
+ * Shared utilities for LibSQL storage.
3
+ */
4
+ /**
5
+ * Parse a JSON string from SQLite into an object.
6
+ *
7
+ * SQLite stores JSON as TEXT, so we need to parse it back.
8
+ */
9
+ export function parsejson(value) {
10
+ if (value === null || value === undefined)
11
+ return null;
12
+ if (typeof value === "string") {
13
+ try {
14
+ return JSON.parse(value);
15
+ }
16
+ catch {
17
+ return null;
18
+ }
19
+ }
20
+ return value;
21
+ }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "@kernl-sdk/libsql",
3
- "version": "0.1.38",
3
+ "version": "0.1.39",
4
4
  "description": "LibSQL storage adapter for kernl",
5
5
  "keywords": [
6
6
  "kernl",
@@ -29,15 +29,6 @@
29
29
  "import": "./dist/index.js"
30
30
  }
31
31
  },
32
- "devDependencies": {
33
- "@types/node": "^24.10.0",
34
- "tsc-alias": "^1.8.10",
35
- "typescript": "5.9.2",
36
- "vitest": "^4.0.8"
37
- },
38
- "dependencies": {
39
- "@kernl-sdk/storage": "0.2.6"
40
- },
41
32
  "scripts": {
42
33
  "build": "tsc && tsc-alias --resolve-full-paths",
43
34
  "dev": "tsc --watch",
@@ -45,5 +36,18 @@
45
36
  "test": "vitest",
46
37
  "test:watch": "vitest --watch",
47
38
  "test:run": "vitest run"
39
+ },
40
+ "devDependencies": {
41
+ "@kernl-sdk/protocol": "workspace:*",
42
+ "@types/node": "^24.10.0",
43
+ "tsc-alias": "^1.8.10",
44
+ "typescript": "5.9.2",
45
+ "vitest": "^4.0.8"
46
+ },
47
+ "dependencies": {
48
+ "@kernl-sdk/shared": "workspace:*",
49
+ "@kernl-sdk/storage": "workspace:*",
50
+ "@libsql/client": "^0.14.0",
51
+ "kernl": "workspace:*"
48
52
  }
49
- }
53
+ }
@@ -0,0 +1,123 @@
1
+ import { describe, it, expect, beforeEach, afterEach } from "vitest";
2
+ import type { Client } from "@libsql/client";
3
+
4
+ import {
5
+ create_client,
6
+ create_storage,
7
+ enable_foreign_keys,
8
+ THREADS_TABLE,
9
+ THREAD_EVENTS_TABLE,
10
+ testid,
11
+ } from "./helpers";
12
+ import { LibSQLStorage } from "../storage";
13
+
14
+ describe("LibSQL constraints", () => {
15
+ let client: Client;
16
+ let storage: LibSQLStorage;
17
+
18
+ beforeEach(async () => {
19
+ client = create_client();
20
+ await enable_foreign_keys(client);
21
+ storage = create_storage(client);
22
+ // Initialize tables
23
+ await storage.memories.list();
24
+ });
25
+
26
+ afterEach(() => {
27
+ client.close();
28
+ });
29
+
30
+ it("enforces thread_events foreign key to threads", async () => {
31
+ // Try to insert event for non-existent thread
32
+ const result = client.execute({
33
+ sql: `INSERT INTO "${THREAD_EVENTS_TABLE}" (id, tid, seq, kind, timestamp) VALUES (?, ?, ?, ?, ?)`,
34
+ args: ["evt-1", "nonexistent-thread", 1, "message", Date.now()],
35
+ });
36
+
37
+ await expect(result).rejects.toThrow(/FOREIGN KEY/i);
38
+ });
39
+
40
+ it("cascades thread deletion to events", async () => {
41
+ const tid = testid("thread");
42
+
43
+ // Insert a thread directly
44
+ await client.execute({
45
+ sql: `INSERT INTO "${THREADS_TABLE}" (id, namespace, agent_id, model, context, tick, state, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
46
+ args: [
47
+ tid,
48
+ "default",
49
+ "agent-1",
50
+ "test/model",
51
+ "{}",
52
+ 0,
53
+ "idle",
54
+ Date.now(),
55
+ Date.now(),
56
+ ],
57
+ });
58
+
59
+ // Insert events for the thread
60
+ await client.execute({
61
+ sql: `INSERT INTO "${THREAD_EVENTS_TABLE}" (id, tid, seq, kind, timestamp) VALUES (?, ?, ?, ?, ?)`,
62
+ args: ["evt-1", tid, 1, "message", Date.now()],
63
+ });
64
+ await client.execute({
65
+ sql: `INSERT INTO "${THREAD_EVENTS_TABLE}" (id, tid, seq, kind, timestamp) VALUES (?, ?, ?, ?, ?)`,
66
+ args: ["evt-2", tid, 2, "message", Date.now()],
67
+ });
68
+
69
+ // Verify events exist
70
+ const beforeDelete = await client.execute({
71
+ sql: `SELECT COUNT(*) as count FROM "${THREAD_EVENTS_TABLE}" WHERE tid = ?`,
72
+ args: [tid],
73
+ });
74
+ expect(beforeDelete.rows[0].count).toBe(2);
75
+
76
+ // Delete thread
77
+ await client.execute({
78
+ sql: `DELETE FROM "${THREADS_TABLE}" WHERE id = ?`,
79
+ args: [tid],
80
+ });
81
+
82
+ // Events should be cascaded
83
+ const afterDelete = await client.execute({
84
+ sql: `SELECT COUNT(*) as count FROM "${THREAD_EVENTS_TABLE}" WHERE tid = ?`,
85
+ args: [tid],
86
+ });
87
+ expect(afterDelete.rows[0].count).toBe(0);
88
+ });
89
+
90
+ it("enforces unique (tid, id) on thread_events", async () => {
91
+ const tid = testid("thread");
92
+
93
+ // Insert a thread
94
+ await client.execute({
95
+ sql: `INSERT INTO "${THREADS_TABLE}" (id, namespace, agent_id, model, context, tick, state, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
96
+ args: [
97
+ tid,
98
+ "default",
99
+ "agent-1",
100
+ "test/model",
101
+ "{}",
102
+ 0,
103
+ "idle",
104
+ Date.now(),
105
+ Date.now(),
106
+ ],
107
+ });
108
+
109
+ // Insert an event
110
+ await client.execute({
111
+ sql: `INSERT INTO "${THREAD_EVENTS_TABLE}" (id, tid, seq, kind, timestamp) VALUES (?, ?, ?, ?, ?)`,
112
+ args: ["evt-1", tid, 1, "message", Date.now()],
113
+ });
114
+
115
+ // Try to insert duplicate (tid, id) - should fail
116
+ const duplicate = client.execute({
117
+ sql: `INSERT INTO "${THREAD_EVENTS_TABLE}" (id, tid, seq, kind, timestamp) VALUES (?, ?, ?, ?, ?)`,
118
+ args: ["evt-1", tid, 2, "message", Date.now()],
119
+ });
120
+
121
+ await expect(duplicate).rejects.toThrow(/UNIQUE/i);
122
+ });
123
+ });
@@ -0,0 +1,98 @@
1
+ import { createClient, type Client } from "@libsql/client";
2
+ import { KERNL_SCHEMA_NAME } from "@kernl-sdk/storage";
3
+ import type { IAgentRegistry, IModelRegistry } from "kernl";
4
+
5
+ import { LibSQLStorage } from "../storage";
6
+
7
+ // Counter to ensure unique database URLs within the same process
8
+ let dbCounter = 0;
9
+
10
+ /**
11
+ * Generate a unique database URL for each test.
12
+ * Uses temp file instead of :memory: due to libsql bug with in-memory transactions.
13
+ */
14
+ export function test_db_url(): string {
15
+ return `file:/tmp/kernl-test-${process.pid}-${Date.now()}-${++dbCounter}.db`;
16
+ }
17
+
18
+ export const THREADS_TABLE = `${KERNL_SCHEMA_NAME}_threads`;
19
+ export const THREAD_EVENTS_TABLE = `${KERNL_SCHEMA_NAME}_thread_events`;
20
+ export const MEMORIES_TABLE = `${KERNL_SCHEMA_NAME}_memories`;
21
+ export const MIGRATIONS_TABLE = `${KERNL_SCHEMA_NAME}_migrations`;
22
+
23
+ /**
24
+ * Create a test database setup with client and storage sharing the same URL.
25
+ */
26
+ export function create_test_db(): { client: Client; storage: LibSQLStorage; url: string } {
27
+ const url = test_db_url();
28
+ const client = createClient({ url });
29
+ const storage = new LibSQLStorage({ client, url });
30
+ return { client, storage, url };
31
+ }
32
+
33
+ export function create_client(url?: string): Client {
34
+ const dbUrl = url ?? test_db_url();
35
+ return createClient({ url: dbUrl });
36
+ }
37
+
38
+ export async function enable_foreign_keys(client: Client): Promise<void> {
39
+ await client.execute("PRAGMA foreign_keys = ON");
40
+ }
41
+
42
+ export function create_storage(client: Client, url?: string): LibSQLStorage {
43
+ const dbUrl = url ?? test_db_url();
44
+ return new LibSQLStorage({ client, url: dbUrl });
45
+ }
46
+
47
+ export async function reset_tables(client: Client): Promise<void> {
48
+ await client.execute(`DELETE FROM "${THREAD_EVENTS_TABLE}"`);
49
+ await client.execute(`DELETE FROM "${THREADS_TABLE}"`);
50
+ await client.execute(`DELETE FROM "${MEMORIES_TABLE}"`);
51
+ }
52
+
53
+ /**
54
+ * Create mock registries for thread hydration tests.
55
+ */
56
+ export function create_mock_registries(): {
57
+ agents: IAgentRegistry;
58
+ models: IModelRegistry;
59
+ } {
60
+ const agentMap = new Map<string, any>();
61
+ const modelMap = new Map<string, any>();
62
+
63
+ // Add a test agent
64
+ agentMap.set("test-agent", {
65
+ id: "test-agent",
66
+ kind: "llm",
67
+ name: "Test Agent",
68
+ });
69
+
70
+ // Add a test model
71
+ modelMap.set("test/model", {
72
+ id: "test/model",
73
+ provider: "test",
74
+ modelId: "model",
75
+ });
76
+
77
+ return {
78
+ agents: {
79
+ get: (id: string) => agentMap.get(id),
80
+ set: (id: string, agent: any) => agentMap.set(id, agent),
81
+ has: (id: string) => agentMap.has(id),
82
+ list: () => Array.from(agentMap.values()),
83
+ } as IAgentRegistry,
84
+ models: {
85
+ get: (id: string) => modelMap.get(id),
86
+ set: (id: string, model: any) => modelMap.set(id, model),
87
+ has: (id: string) => modelMap.has(id),
88
+ list: () => Array.from(modelMap.values()),
89
+ } as IModelRegistry,
90
+ };
91
+ }
92
+
93
+ /**
94
+ * Generate a unique ID for tests.
95
+ */
96
+ export function testid(prefix: string = "test"): string {
97
+ return `${prefix}-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
98
+ }
@@ -0,0 +1,114 @@
1
+ import { describe, it, expect, beforeEach, afterEach } from "vitest";
2
+ import type { Client } from "@libsql/client";
3
+
4
+ import {
5
+ create_client,
6
+ create_storage,
7
+ THREADS_TABLE,
8
+ MEMORIES_TABLE,
9
+ } from "./helpers";
10
+
11
+ describe("LibSQL migrations", () => {
12
+ let client: Client;
13
+
14
+ beforeEach(() => {
15
+ client = create_client();
16
+ });
17
+
18
+ afterEach(() => {
19
+ client.close();
20
+ });
21
+
22
+ it("creates tables with mapped SQLite types", async () => {
23
+ const storage = create_storage(client);
24
+ await storage.memories.list();
25
+
26
+ // Check threads table schema
27
+ const threadInfo = await client.execute(
28
+ `PRAGMA table_info("${THREADS_TABLE}")`,
29
+ );
30
+ const threadCols = Object.fromEntries(
31
+ threadInfo.rows.map((r) => [r.name, r.type]),
32
+ );
33
+
34
+ // JSON fields should be TEXT (not JSONB)
35
+ expect(threadCols.context).toBe("TEXT");
36
+ expect(threadCols.metadata).toBe("TEXT");
37
+ expect(threadCols.state).toBe("TEXT");
38
+
39
+ // Check memories table schema
40
+ const memoryInfo = await client.execute(
41
+ `PRAGMA table_info("${MEMORIES_TABLE}")`,
42
+ );
43
+ const memoryCols = Object.fromEntries(
44
+ memoryInfo.rows.map((r) => [r.name, r.type]),
45
+ );
46
+
47
+ // Boolean fields should be INTEGER
48
+ expect(memoryCols.wmem).toBe("INTEGER");
49
+ // JSON fields should be TEXT
50
+ expect(memoryCols.content).toBe("TEXT");
51
+ expect(memoryCols.metadata).toBe("TEXT");
52
+ });
53
+
54
+ it("applies column defaults correctly", async () => {
55
+ const storage = create_storage(client);
56
+ await storage.memories.list();
57
+
58
+ // Check threads table defaults
59
+ const threadInfo = await client.execute(
60
+ `PRAGMA table_info("${THREADS_TABLE}")`,
61
+ );
62
+ const tickCol = threadInfo.rows.find((r) => r.name === "tick");
63
+ const namespaceCol = threadInfo.rows.find((r) => r.name === "namespace");
64
+
65
+ expect(tickCol?.dflt_value).toBe("0");
66
+ expect(namespaceCol?.dflt_value).toBe("'kernl'");
67
+
68
+ // Check memories table defaults
69
+ const memoryInfo = await client.execute(
70
+ `PRAGMA table_info("${MEMORIES_TABLE}")`,
71
+ );
72
+ const wmemCol = memoryInfo.rows.find((r) => r.name === "wmem");
73
+
74
+ expect(wmemCol?.dflt_value).toBe("0");
75
+ });
76
+
77
+ it("creates indexes from table definitions", async () => {
78
+ const storage = create_storage(client);
79
+ await storage.memories.list();
80
+
81
+ // Check for indexes on threads table
82
+ const threadIndexes = await client.execute(
83
+ `PRAGMA index_list("${THREADS_TABLE}")`,
84
+ );
85
+ const threadIndexNames = threadIndexes.rows.map((r) => r.name as string);
86
+
87
+ // Should have index on namespace
88
+ expect(threadIndexNames.some((n) => n.includes("namespace"))).toBe(true);
89
+
90
+ // Check for indexes on memories table
91
+ const memoryIndexes = await client.execute(
92
+ `PRAGMA index_list("${MEMORIES_TABLE}")`,
93
+ );
94
+ const memoryIndexNames = memoryIndexes.rows.map((r) => r.name as string);
95
+
96
+ // Should have indexes for common queries
97
+ expect(memoryIndexNames.some((n) => n.includes("namespace"))).toBe(true);
98
+ });
99
+
100
+ it("creates foreign key constraint on thread_events", async () => {
101
+ const storage = create_storage(client);
102
+ await storage.memories.list();
103
+
104
+ const fkInfo = await client.execute(
105
+ 'PRAGMA foreign_key_list("kernl_thread_events")',
106
+ );
107
+
108
+ expect(fkInfo.rows.length).toBeGreaterThan(0);
109
+ const threadFk = fkInfo.rows.find((r) => r.table === THREADS_TABLE);
110
+ expect(threadFk).toBeDefined();
111
+ expect(threadFk?.from).toBe("tid");
112
+ expect(threadFk?.to).toBe("id");
113
+ });
114
+ });