@kernl-sdk/libsql 0.1.38 → 0.1.39
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +5 -4
- package/CHANGELOG.md +8 -0
- package/README.md +225 -0
- package/dist/__tests__/constraints.test.d.ts +2 -0
- package/dist/__tests__/constraints.test.d.ts.map +1 -0
- package/dist/__tests__/constraints.test.js +97 -0
- package/dist/__tests__/helpers.d.ts +36 -0
- package/dist/__tests__/helpers.d.ts.map +1 -0
- package/dist/__tests__/helpers.js +80 -0
- package/dist/__tests__/memory.create-get.test.d.ts +2 -0
- package/dist/__tests__/memory.create-get.test.d.ts.map +1 -0
- package/dist/__tests__/memory.create-get.test.js +8 -0
- package/dist/__tests__/memory.delete.test.d.ts +2 -0
- package/dist/__tests__/memory.delete.test.d.ts.map +1 -0
- package/dist/__tests__/memory.delete.test.js +6 -0
- package/dist/__tests__/memory.list.test.d.ts +2 -0
- package/dist/__tests__/memory.list.test.d.ts.map +1 -0
- package/dist/__tests__/memory.list.test.js +8 -0
- package/dist/__tests__/memory.update.test.d.ts +2 -0
- package/dist/__tests__/memory.update.test.d.ts.map +1 -0
- package/dist/__tests__/memory.update.test.js +8 -0
- package/dist/__tests__/migrations.test.d.ts +2 -0
- package/dist/__tests__/migrations.test.d.ts.map +1 -0
- package/dist/__tests__/migrations.test.js +68 -0
- package/dist/__tests__/row-codecs.test.d.ts +2 -0
- package/dist/__tests__/row-codecs.test.d.ts.map +1 -0
- package/dist/__tests__/row-codecs.test.js +175 -0
- package/dist/__tests__/sql-utils.test.d.ts +2 -0
- package/dist/__tests__/sql-utils.test.d.ts.map +1 -0
- package/dist/__tests__/sql-utils.test.js +45 -0
- package/dist/__tests__/storage.init.test.d.ts +2 -0
- package/dist/__tests__/storage.init.test.d.ts.map +1 -0
- package/dist/__tests__/storage.init.test.js +63 -0
- package/dist/__tests__/thread.lifecycle.test.d.ts +2 -0
- package/dist/__tests__/thread.lifecycle.test.d.ts.map +1 -0
- package/dist/__tests__/thread.lifecycle.test.js +172 -0
- package/dist/__tests__/transaction.test.d.ts +2 -0
- package/dist/__tests__/transaction.test.d.ts.map +1 -0
- package/dist/__tests__/transaction.test.js +16 -0
- package/dist/__tests__/utils.test.d.ts +2 -0
- package/dist/__tests__/utils.test.d.ts.map +1 -0
- package/dist/__tests__/utils.test.js +31 -0
- package/dist/client.d.ts +46 -0
- package/dist/client.d.ts.map +1 -0
- package/dist/client.js +46 -0
- package/dist/index.d.ts +5 -0
- package/dist/index.d.ts.map +1 -1
- package/dist/index.js +5 -1
- package/dist/memory/__tests__/create-get.test.d.ts +2 -0
- package/dist/memory/__tests__/create-get.test.d.ts.map +1 -0
- package/dist/memory/__tests__/create-get.test.js +126 -0
- package/dist/memory/__tests__/delete.test.d.ts +2 -0
- package/dist/memory/__tests__/delete.test.d.ts.map +1 -0
- package/dist/memory/__tests__/delete.test.js +96 -0
- package/dist/memory/__tests__/list.test.d.ts +2 -0
- package/dist/memory/__tests__/list.test.d.ts.map +1 -0
- package/dist/memory/__tests__/list.test.js +168 -0
- package/dist/memory/__tests__/sql.test.d.ts +2 -0
- package/dist/memory/__tests__/sql.test.d.ts.map +1 -0
- package/dist/memory/__tests__/sql.test.js +159 -0
- package/dist/memory/__tests__/update.test.d.ts +2 -0
- package/dist/memory/__tests__/update.test.d.ts.map +1 -0
- package/dist/memory/__tests__/update.test.js +113 -0
- package/dist/memory/row.d.ts +11 -0
- package/dist/memory/row.d.ts.map +1 -0
- package/dist/memory/row.js +29 -0
- package/dist/memory/sql.d.ts +34 -0
- package/dist/memory/sql.d.ts.map +1 -0
- package/dist/memory/sql.js +109 -0
- package/dist/memory/store.d.ts +41 -0
- package/dist/memory/store.d.ts.map +1 -0
- package/dist/memory/store.js +132 -0
- package/dist/migrations.d.ts +32 -0
- package/dist/migrations.d.ts.map +1 -0
- package/dist/migrations.js +157 -0
- package/dist/sql.d.ts +28 -0
- package/dist/sql.d.ts.map +1 -0
- package/dist/sql.js +22 -0
- package/dist/storage.d.ts +75 -0
- package/dist/storage.d.ts.map +1 -0
- package/dist/storage.js +123 -0
- package/dist/thread/__tests__/append.test.d.ts +2 -0
- package/dist/thread/__tests__/append.test.d.ts.map +1 -0
- package/dist/thread/__tests__/append.test.js +141 -0
- package/dist/thread/__tests__/history.test.d.ts +2 -0
- package/dist/thread/__tests__/history.test.d.ts.map +1 -0
- package/dist/thread/__tests__/history.test.js +146 -0
- package/dist/thread/__tests__/sql.test.d.ts +2 -0
- package/dist/thread/__tests__/sql.test.d.ts.map +1 -0
- package/dist/thread/__tests__/sql.test.js +129 -0
- package/dist/thread/__tests__/store.test.d.ts +2 -0
- package/dist/thread/__tests__/store.test.d.ts.map +1 -0
- package/dist/thread/__tests__/store.test.js +170 -0
- package/dist/thread/row.d.ts +19 -0
- package/dist/thread/row.d.ts.map +1 -0
- package/dist/thread/row.js +65 -0
- package/dist/thread/sql.d.ts +33 -0
- package/dist/thread/sql.d.ts.map +1 -0
- package/dist/thread/sql.js +112 -0
- package/dist/thread/store.d.ts +67 -0
- package/dist/thread/store.d.ts.map +1 -0
- package/dist/thread/store.js +282 -0
- package/dist/utils.d.ts +10 -0
- package/dist/utils.d.ts.map +1 -0
- package/dist/utils.js +21 -0
- package/package.json +15 -11
- package/src/__tests__/constraints.test.ts +123 -0
- package/src/__tests__/helpers.ts +98 -0
- package/src/__tests__/migrations.test.ts +114 -0
- package/src/__tests__/row-codecs.test.ts +201 -0
- package/src/__tests__/sql-utils.test.ts +52 -0
- package/src/__tests__/storage.init.test.ts +92 -0
- package/src/__tests__/thread.lifecycle.test.ts +234 -0
- package/src/__tests__/transaction.test.ts +25 -0
- package/src/__tests__/utils.test.ts +38 -0
- package/src/client.ts +71 -0
- package/src/index.ts +10 -0
- package/src/memory/__tests__/create-get.test.ts +161 -0
- package/src/memory/__tests__/delete.test.ts +124 -0
- package/src/memory/__tests__/list.test.ts +198 -0
- package/src/memory/__tests__/sql.test.ts +186 -0
- package/src/memory/__tests__/update.test.ts +148 -0
- package/src/memory/row.ts +36 -0
- package/src/memory/sql.ts +142 -0
- package/src/memory/store.ts +173 -0
- package/src/migrations.ts +206 -0
- package/src/sql.ts +35 -0
- package/src/storage.ts +170 -0
- package/src/thread/__tests__/append.test.ts +201 -0
- package/src/thread/__tests__/history.test.ts +198 -0
- package/src/thread/__tests__/sql.test.ts +154 -0
- package/src/thread/__tests__/store.test.ts +219 -0
- package/src/thread/row.ts +77 -0
- package/src/thread/sql.ts +153 -0
- package/src/thread/store.ts +381 -0
- package/src/utils.ts +20 -0
- package/LICENSE +0 -201
|
@@ -0,0 +1,198 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import type { Client } from "@libsql/client";
|
|
3
|
+
import { message, IN_PROGRESS, COMPLETED } from "@kernl-sdk/protocol";
|
|
4
|
+
import type { ThreadEvent } from "kernl/internal";
|
|
5
|
+
|
|
6
|
+
import {
|
|
7
|
+
create_client,
|
|
8
|
+
create_storage,
|
|
9
|
+
create_mock_registries,
|
|
10
|
+
testid,
|
|
11
|
+
} from "../../__tests__/helpers";
|
|
12
|
+
import { LibSQLStorage } from "../../storage";
|
|
13
|
+
|
|
14
|
+
/** Create a message ThreadEvent */
|
|
15
|
+
function evt(
|
|
16
|
+
id: string,
|
|
17
|
+
tid: string,
|
|
18
|
+
seq: number,
|
|
19
|
+
timestamp: Date,
|
|
20
|
+
role: "user" | "assistant" = "user",
|
|
21
|
+
): ThreadEvent {
|
|
22
|
+
return {
|
|
23
|
+
...message({ role, text: `msg-${seq}` }),
|
|
24
|
+
id,
|
|
25
|
+
tid,
|
|
26
|
+
seq,
|
|
27
|
+
timestamp,
|
|
28
|
+
metadata: {},
|
|
29
|
+
} as ThreadEvent;
|
|
30
|
+
}
|
|
31
|
+
|
|
32
|
+
/** Create a tool.call ThreadEvent */
|
|
33
|
+
function toolCallEvt(
|
|
34
|
+
id: string,
|
|
35
|
+
tid: string,
|
|
36
|
+
seq: number,
|
|
37
|
+
timestamp: Date,
|
|
38
|
+
): ThreadEvent {
|
|
39
|
+
return {
|
|
40
|
+
kind: "tool.call",
|
|
41
|
+
id,
|
|
42
|
+
tid,
|
|
43
|
+
seq,
|
|
44
|
+
timestamp,
|
|
45
|
+
callId: `call-${seq}`,
|
|
46
|
+
toolId: "test-tool",
|
|
47
|
+
state: IN_PROGRESS,
|
|
48
|
+
arguments: "{}",
|
|
49
|
+
metadata: {},
|
|
50
|
+
} as ThreadEvent;
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
/** Create a tool.result ThreadEvent */
|
|
54
|
+
function toolResultEvt(
|
|
55
|
+
id: string,
|
|
56
|
+
tid: string,
|
|
57
|
+
seq: number,
|
|
58
|
+
timestamp: Date,
|
|
59
|
+
): ThreadEvent {
|
|
60
|
+
return {
|
|
61
|
+
kind: "tool.result",
|
|
62
|
+
id,
|
|
63
|
+
tid,
|
|
64
|
+
seq,
|
|
65
|
+
timestamp,
|
|
66
|
+
callId: `call-${seq - 1}`,
|
|
67
|
+
toolId: "test-tool",
|
|
68
|
+
state: COMPLETED,
|
|
69
|
+
result: null,
|
|
70
|
+
error: null,
|
|
71
|
+
metadata: {},
|
|
72
|
+
} as ThreadEvent;
|
|
73
|
+
}
|
|
74
|
+
|
|
75
|
+
describe("LibSQLThreadStore history", () => {
|
|
76
|
+
let client: Client;
|
|
77
|
+
let storage: LibSQLStorage;
|
|
78
|
+
let tid: string;
|
|
79
|
+
|
|
80
|
+
beforeEach(async () => {
|
|
81
|
+
client = create_client();
|
|
82
|
+
storage = create_storage(client);
|
|
83
|
+
storage.bind(create_mock_registries());
|
|
84
|
+
await storage.memories.list(); // init
|
|
85
|
+
|
|
86
|
+
tid = testid("thread");
|
|
87
|
+
await storage.threads.insert({
|
|
88
|
+
id: tid,
|
|
89
|
+
namespace: "default",
|
|
90
|
+
agentId: "test-agent",
|
|
91
|
+
model: "test/model",
|
|
92
|
+
});
|
|
93
|
+
|
|
94
|
+
// Add test events
|
|
95
|
+
const now = Date.now();
|
|
96
|
+
await storage.threads.append([
|
|
97
|
+
evt("e1", tid, 1, new Date(now)),
|
|
98
|
+
toolCallEvt("e2", tid, 2, new Date(now + 1)),
|
|
99
|
+
toolResultEvt("e3", tid, 3, new Date(now + 2)),
|
|
100
|
+
evt("e4", tid, 4, new Date(now + 3)),
|
|
101
|
+
evt("e5", tid, 5, new Date(now + 4)),
|
|
102
|
+
]);
|
|
103
|
+
});
|
|
104
|
+
|
|
105
|
+
afterEach(() => {
|
|
106
|
+
client.close();
|
|
107
|
+
});
|
|
108
|
+
|
|
109
|
+
it("returns events in asc order by default", async () => {
|
|
110
|
+
const history = await storage.threads.history(tid);
|
|
111
|
+
|
|
112
|
+
expect(history.length).toBe(5);
|
|
113
|
+
expect(history[0].seq).toBe(1);
|
|
114
|
+
expect(history[4].seq).toBe(5);
|
|
115
|
+
});
|
|
116
|
+
|
|
117
|
+
it("filters by after seq", async () => {
|
|
118
|
+
const history = await storage.threads.history(tid, { after: 2 });
|
|
119
|
+
|
|
120
|
+
expect(history.length).toBe(3);
|
|
121
|
+
expect(history[0].seq).toBe(3);
|
|
122
|
+
expect(history[2].seq).toBe(5);
|
|
123
|
+
});
|
|
124
|
+
|
|
125
|
+
it("filters by kinds", async () => {
|
|
126
|
+
const history = await storage.threads.history(tid, {
|
|
127
|
+
kinds: ["message"],
|
|
128
|
+
});
|
|
129
|
+
|
|
130
|
+
expect(history.length).toBe(3);
|
|
131
|
+
expect(history.every((e) => e.kind === "message")).toBe(true);
|
|
132
|
+
});
|
|
133
|
+
|
|
134
|
+
it("filters by multiple kinds", async () => {
|
|
135
|
+
const history = await storage.threads.history(tid, {
|
|
136
|
+
kinds: ["tool.call", "tool.result"],
|
|
137
|
+
});
|
|
138
|
+
|
|
139
|
+
expect(history.length).toBe(2);
|
|
140
|
+
expect(history.map((e) => e.kind)).toEqual(["tool.call", "tool.result"]);
|
|
141
|
+
});
|
|
142
|
+
|
|
143
|
+
it("supports desc order and limit", async () => {
|
|
144
|
+
const history = await storage.threads.history(tid, {
|
|
145
|
+
order: "desc",
|
|
146
|
+
limit: 2,
|
|
147
|
+
});
|
|
148
|
+
|
|
149
|
+
expect(history.length).toBe(2);
|
|
150
|
+
expect(history[0].seq).toBe(5);
|
|
151
|
+
expect(history[1].seq).toBe(4);
|
|
152
|
+
});
|
|
153
|
+
|
|
154
|
+
it("combines after, kinds, and limit", async () => {
|
|
155
|
+
const history = await storage.threads.history(tid, {
|
|
156
|
+
after: 1,
|
|
157
|
+
kinds: ["message"],
|
|
158
|
+
limit: 1,
|
|
159
|
+
order: "asc",
|
|
160
|
+
});
|
|
161
|
+
|
|
162
|
+
expect(history.length).toBe(1);
|
|
163
|
+
expect(history[0].seq).toBe(4); // First message after seq 1
|
|
164
|
+
});
|
|
165
|
+
|
|
166
|
+
it("returns empty array for non-existent thread", async () => {
|
|
167
|
+
const history = await storage.threads.history("nonexistent");
|
|
168
|
+
expect(history).toEqual([]);
|
|
169
|
+
});
|
|
170
|
+
|
|
171
|
+
it("includes history when getting thread with include option", async () => {
|
|
172
|
+
const thread = await storage.threads.get(tid, { history: true });
|
|
173
|
+
|
|
174
|
+
// Thread is returned with history loaded (history is private,
|
|
175
|
+
// so we verify via get returning non-null and separate history() call)
|
|
176
|
+
expect(thread).not.toBeNull();
|
|
177
|
+
expect(thread?.tid).toBe(tid);
|
|
178
|
+
|
|
179
|
+
// Verify events exist via direct history query
|
|
180
|
+
const history = await storage.threads.history(tid);
|
|
181
|
+
expect(history.length).toBe(5);
|
|
182
|
+
});
|
|
183
|
+
|
|
184
|
+
it("respects history options when getting thread", async () => {
|
|
185
|
+
const thread = await storage.threads.get(tid, {
|
|
186
|
+
history: { after: 3, limit: 2 },
|
|
187
|
+
});
|
|
188
|
+
|
|
189
|
+
// Thread is returned with filtered history loaded
|
|
190
|
+
expect(thread).not.toBeNull();
|
|
191
|
+
expect(thread?.tid).toBe(tid);
|
|
192
|
+
|
|
193
|
+
// Verify filtered history via direct query with same options
|
|
194
|
+
const history = await storage.threads.history(tid, { after: 3, limit: 2 });
|
|
195
|
+
expect(history.length).toBe(2);
|
|
196
|
+
expect(history[0].seq).toBe(4);
|
|
197
|
+
});
|
|
198
|
+
});
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
import { describe, it, expect } from "vitest";
|
|
2
|
+
|
|
3
|
+
import { SQL_WHERE, SQL_ORDER, SQL_UPDATE } from "../sql";
|
|
4
|
+
|
|
5
|
+
describe("LibSQL thread SQL codecs", () => {
|
|
6
|
+
describe("SQL_WHERE", () => {
|
|
7
|
+
it("returns empty clause when no filters", () => {
|
|
8
|
+
const result = SQL_WHERE.encode({ filter: undefined });
|
|
9
|
+
expect(result.sql).toBe("");
|
|
10
|
+
expect(result.params).toEqual([]);
|
|
11
|
+
});
|
|
12
|
+
|
|
13
|
+
it("encodes namespace filter", () => {
|
|
14
|
+
const result = SQL_WHERE.encode({
|
|
15
|
+
filter: { namespace: "default" },
|
|
16
|
+
});
|
|
17
|
+
expect(result.sql).toBe("namespace = ?");
|
|
18
|
+
expect(result.params).toEqual(["default"]);
|
|
19
|
+
});
|
|
20
|
+
|
|
21
|
+
it("encodes agentId filter", () => {
|
|
22
|
+
const result = SQL_WHERE.encode({
|
|
23
|
+
filter: { agentId: "agent-1" },
|
|
24
|
+
});
|
|
25
|
+
expect(result.sql).toBe("agent_id = ?");
|
|
26
|
+
expect(result.params).toEqual(["agent-1"]);
|
|
27
|
+
});
|
|
28
|
+
|
|
29
|
+
it("encodes single state filter", () => {
|
|
30
|
+
const result = SQL_WHERE.encode({
|
|
31
|
+
filter: { state: "stopped" },
|
|
32
|
+
});
|
|
33
|
+
expect(result.sql).toBe("state = ?");
|
|
34
|
+
expect(result.params).toEqual(["stopped"]);
|
|
35
|
+
});
|
|
36
|
+
|
|
37
|
+
it("encodes state array filter with IN clause", () => {
|
|
38
|
+
const result = SQL_WHERE.encode({
|
|
39
|
+
filter: { state: ["stopped", "running", "interruptible"] },
|
|
40
|
+
});
|
|
41
|
+
expect(result.sql).toBe("state IN (?, ?, ?)");
|
|
42
|
+
expect(result.params).toEqual(["stopped", "running", "interruptible"]);
|
|
43
|
+
});
|
|
44
|
+
|
|
45
|
+
it("encodes parentTaskId filter", () => {
|
|
46
|
+
const result = SQL_WHERE.encode({
|
|
47
|
+
filter: { parentTaskId: "task-1" },
|
|
48
|
+
});
|
|
49
|
+
expect(result.sql).toBe("parent_task_id = ?");
|
|
50
|
+
expect(result.params).toEqual(["task-1"]);
|
|
51
|
+
});
|
|
52
|
+
|
|
53
|
+
it("encodes date range filters", () => {
|
|
54
|
+
const after = new Date("2024-01-01");
|
|
55
|
+
const before = new Date("2024-12-31");
|
|
56
|
+
|
|
57
|
+
const result = SQL_WHERE.encode({
|
|
58
|
+
filter: { createdAfter: after, createdBefore: before },
|
|
59
|
+
});
|
|
60
|
+
|
|
61
|
+
// Uses > and < not >= and <=
|
|
62
|
+
expect(result.sql).toBe("created_at > ? AND created_at < ?");
|
|
63
|
+
expect(result.params).toEqual([after.getTime(), before.getTime()]);
|
|
64
|
+
});
|
|
65
|
+
|
|
66
|
+
it("combines multiple filters with AND", () => {
|
|
67
|
+
const result = SQL_WHERE.encode({
|
|
68
|
+
filter: {
|
|
69
|
+
namespace: "default",
|
|
70
|
+
agentId: "agent-1",
|
|
71
|
+
state: "stopped",
|
|
72
|
+
},
|
|
73
|
+
});
|
|
74
|
+
|
|
75
|
+
// Order depends on filter processing order, check contains
|
|
76
|
+
expect(result.sql).toContain("namespace = ?");
|
|
77
|
+
expect(result.sql).toContain("agent_id = ?");
|
|
78
|
+
expect(result.sql).toContain("state = ?");
|
|
79
|
+
expect(result.sql).toContain(" AND ");
|
|
80
|
+
expect(result.params).toContain("default");
|
|
81
|
+
expect(result.params).toContain("agent-1");
|
|
82
|
+
expect(result.params).toContain("stopped");
|
|
83
|
+
});
|
|
84
|
+
});
|
|
85
|
+
|
|
86
|
+
describe("SQL_ORDER", () => {
|
|
87
|
+
it("returns default ordering", () => {
|
|
88
|
+
const result = SQL_ORDER.encode({ order: undefined });
|
|
89
|
+
expect(result).toBe("created_at DESC");
|
|
90
|
+
});
|
|
91
|
+
|
|
92
|
+
it("encodes createdAt asc", () => {
|
|
93
|
+
const result = SQL_ORDER.encode({ order: { createdAt: "asc" } });
|
|
94
|
+
expect(result).toBe("created_at ASC");
|
|
95
|
+
});
|
|
96
|
+
|
|
97
|
+
it("encodes createdAt desc", () => {
|
|
98
|
+
const result = SQL_ORDER.encode({ order: { createdAt: "desc" } });
|
|
99
|
+
expect(result).toBe("created_at DESC");
|
|
100
|
+
});
|
|
101
|
+
|
|
102
|
+
it("encodes updatedAt asc", () => {
|
|
103
|
+
const result = SQL_ORDER.encode({ order: { updatedAt: "asc" } });
|
|
104
|
+
expect(result).toBe("updated_at ASC");
|
|
105
|
+
});
|
|
106
|
+
|
|
107
|
+
it("encodes updatedAt desc", () => {
|
|
108
|
+
const result = SQL_ORDER.encode({ order: { updatedAt: "desc" } });
|
|
109
|
+
expect(result).toBe("updated_at DESC");
|
|
110
|
+
});
|
|
111
|
+
});
|
|
112
|
+
|
|
113
|
+
describe("SQL_UPDATE", () => {
|
|
114
|
+
it("encodes tick update", () => {
|
|
115
|
+
const result = SQL_UPDATE.encode({ patch: { tick: 5 } });
|
|
116
|
+
expect(result.sql).toContain("tick = ?");
|
|
117
|
+
expect(result.params).toContain(5);
|
|
118
|
+
});
|
|
119
|
+
|
|
120
|
+
it("encodes state update", () => {
|
|
121
|
+
const result = SQL_UPDATE.encode({ patch: { state: "running" } });
|
|
122
|
+
expect(result.sql).toContain("state = ?");
|
|
123
|
+
expect(result.params).toContain("running");
|
|
124
|
+
});
|
|
125
|
+
|
|
126
|
+
it("encodes metadata update with JSON stringify", () => {
|
|
127
|
+
const metadata = { title: "Test" };
|
|
128
|
+
const result = SQL_UPDATE.encode({ patch: { metadata } });
|
|
129
|
+
expect(result.sql).toContain("metadata = ?");
|
|
130
|
+
expect(result.params).toContain(JSON.stringify(metadata));
|
|
131
|
+
});
|
|
132
|
+
|
|
133
|
+
it("always includes updated_at", () => {
|
|
134
|
+
const before = Date.now();
|
|
135
|
+
const result = SQL_UPDATE.encode({ patch: { tick: 1 } });
|
|
136
|
+
const after = Date.now();
|
|
137
|
+
|
|
138
|
+
expect(result.sql).toContain("updated_at = ?");
|
|
139
|
+
const updatedAt = result.params.find(
|
|
140
|
+
(p) => typeof p === "number" && p >= before && p <= after,
|
|
141
|
+
);
|
|
142
|
+
expect(updatedAt).toBeDefined();
|
|
143
|
+
});
|
|
144
|
+
|
|
145
|
+
it("combines multiple updates", () => {
|
|
146
|
+
const result = SQL_UPDATE.encode({
|
|
147
|
+
patch: { tick: 3, state: "interruptible" },
|
|
148
|
+
});
|
|
149
|
+
expect(result.sql).toContain("tick = ?");
|
|
150
|
+
expect(result.sql).toContain("state = ?");
|
|
151
|
+
expect(result.sql).toContain("updated_at = ?");
|
|
152
|
+
});
|
|
153
|
+
});
|
|
154
|
+
});
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
|
2
|
+
import type { Client } from "@libsql/client";
|
|
3
|
+
import { message } from "@kernl-sdk/protocol";
|
|
4
|
+
import type { ThreadEvent } from "kernl/internal";
|
|
5
|
+
|
|
6
|
+
import {
|
|
7
|
+
create_client,
|
|
8
|
+
create_storage,
|
|
9
|
+
create_mock_registries,
|
|
10
|
+
testid,
|
|
11
|
+
} from "../../__tests__/helpers";
|
|
12
|
+
import { LibSQLStorage } from "../../storage";
|
|
13
|
+
|
|
14
|
+
/** Create a message ThreadEvent */
|
|
15
|
+
function evt(
|
|
16
|
+
id: string,
|
|
17
|
+
tid: string,
|
|
18
|
+
seq: number,
|
|
19
|
+
timestamp: Date,
|
|
20
|
+
): ThreadEvent {
|
|
21
|
+
return {
|
|
22
|
+
...message({ role: "user", text: `msg-${seq}` }),
|
|
23
|
+
id,
|
|
24
|
+
tid,
|
|
25
|
+
seq,
|
|
26
|
+
timestamp,
|
|
27
|
+
metadata: {},
|
|
28
|
+
} as ThreadEvent;
|
|
29
|
+
}
|
|
30
|
+
|
|
31
|
+
describe("LibSQLThreadStore", () => {
|
|
32
|
+
let client: Client;
|
|
33
|
+
let storage: LibSQLStorage;
|
|
34
|
+
|
|
35
|
+
beforeEach(async () => {
|
|
36
|
+
client = create_client();
|
|
37
|
+
storage = create_storage(client);
|
|
38
|
+
storage.bind(create_mock_registries());
|
|
39
|
+
await storage.memories.list(); // init
|
|
40
|
+
});
|
|
41
|
+
|
|
42
|
+
afterEach(() => {
|
|
43
|
+
client.close();
|
|
44
|
+
});
|
|
45
|
+
|
|
46
|
+
it("inserts and gets a thread", async () => {
|
|
47
|
+
const tid = testid("thread");
|
|
48
|
+
|
|
49
|
+
const inserted = await storage.threads.insert({
|
|
50
|
+
id: tid,
|
|
51
|
+
namespace: "default",
|
|
52
|
+
agentId: "test-agent",
|
|
53
|
+
model: "test/model",
|
|
54
|
+
context: { userId: "user-1" },
|
|
55
|
+
metadata: { title: "Test" },
|
|
56
|
+
});
|
|
57
|
+
|
|
58
|
+
expect(inserted.tid).toBe(tid);
|
|
59
|
+
expect(inserted.namespace).toBe("default");
|
|
60
|
+
|
|
61
|
+
const found = await storage.threads.get(tid);
|
|
62
|
+
expect(found).not.toBeNull();
|
|
63
|
+
expect(found?.tid).toBe(tid);
|
|
64
|
+
expect(found?.metadata).toEqual({ title: "Test" });
|
|
65
|
+
});
|
|
66
|
+
|
|
67
|
+
it("returns null for non-existent thread", async () => {
|
|
68
|
+
const found = await storage.threads.get("nonexistent");
|
|
69
|
+
expect(found).toBeNull();
|
|
70
|
+
});
|
|
71
|
+
|
|
72
|
+
it("updates thread fields and metadata", async () => {
|
|
73
|
+
const tid = testid("thread");
|
|
74
|
+
|
|
75
|
+
await storage.threads.insert({
|
|
76
|
+
id: tid,
|
|
77
|
+
namespace: "default",
|
|
78
|
+
agentId: "test-agent",
|
|
79
|
+
model: "test/model",
|
|
80
|
+
});
|
|
81
|
+
|
|
82
|
+
const updated = await storage.threads.update(tid, {
|
|
83
|
+
tick: 5,
|
|
84
|
+
state: "running",
|
|
85
|
+
metadata: { title: "Updated", score: 100 },
|
|
86
|
+
});
|
|
87
|
+
|
|
88
|
+
expect(updated._tick).toBe(5);
|
|
89
|
+
expect(updated.state).toBe("running");
|
|
90
|
+
expect(updated.metadata).toEqual({ title: "Updated", score: 100 });
|
|
91
|
+
|
|
92
|
+
// Verify persisted
|
|
93
|
+
const found = await storage.threads.get(tid);
|
|
94
|
+
expect(found?._tick).toBe(5);
|
|
95
|
+
expect(found?.state).toBe("running");
|
|
96
|
+
});
|
|
97
|
+
|
|
98
|
+
it("lists threads with filters and pagination", async () => {
|
|
99
|
+
// Create multiple threads
|
|
100
|
+
await storage.threads.insert({
|
|
101
|
+
id: testid("t1"),
|
|
102
|
+
namespace: "ns1",
|
|
103
|
+
agentId: "test-agent",
|
|
104
|
+
model: "test/model",
|
|
105
|
+
state: "stopped",
|
|
106
|
+
});
|
|
107
|
+
await storage.threads.insert({
|
|
108
|
+
id: testid("t2"),
|
|
109
|
+
namespace: "ns1",
|
|
110
|
+
agentId: "test-agent",
|
|
111
|
+
model: "test/model",
|
|
112
|
+
state: "running",
|
|
113
|
+
});
|
|
114
|
+
await storage.threads.insert({
|
|
115
|
+
id: testid("t3"),
|
|
116
|
+
namespace: "ns2",
|
|
117
|
+
agentId: "test-agent",
|
|
118
|
+
model: "test/model",
|
|
119
|
+
state: "stopped",
|
|
120
|
+
});
|
|
121
|
+
|
|
122
|
+
// Filter by namespace
|
|
123
|
+
const ns1Threads = await storage.threads.list({
|
|
124
|
+
filter: { namespace: "ns1" },
|
|
125
|
+
});
|
|
126
|
+
expect(ns1Threads.length).toBe(2);
|
|
127
|
+
|
|
128
|
+
// Filter by state
|
|
129
|
+
const stoppedThreads = await storage.threads.list({
|
|
130
|
+
filter: { state: "stopped" },
|
|
131
|
+
});
|
|
132
|
+
expect(stoppedThreads.length).toBe(2);
|
|
133
|
+
|
|
134
|
+
// Filter by state array
|
|
135
|
+
const activeThreads = await storage.threads.list({
|
|
136
|
+
filter: { state: ["stopped", "running"] },
|
|
137
|
+
});
|
|
138
|
+
expect(activeThreads.length).toBe(3);
|
|
139
|
+
|
|
140
|
+
// Pagination
|
|
141
|
+
const page1 = await storage.threads.list({ limit: 2 });
|
|
142
|
+
expect(page1.length).toBe(2);
|
|
143
|
+
|
|
144
|
+
const page2 = await storage.threads.list({ limit: 2, offset: 2 });
|
|
145
|
+
expect(page2.length).toBe(1);
|
|
146
|
+
});
|
|
147
|
+
|
|
148
|
+
it("orders threads by createdAt", async () => {
|
|
149
|
+
const t1 = testid("t1");
|
|
150
|
+
const t2 = testid("t2");
|
|
151
|
+
|
|
152
|
+
await storage.threads.insert({
|
|
153
|
+
id: t1,
|
|
154
|
+
namespace: "default",
|
|
155
|
+
agentId: "test-agent",
|
|
156
|
+
model: "test/model",
|
|
157
|
+
});
|
|
158
|
+
|
|
159
|
+
// Small delay to ensure different timestamps
|
|
160
|
+
await new Promise((r) => setTimeout(r, 10));
|
|
161
|
+
|
|
162
|
+
await storage.threads.insert({
|
|
163
|
+
id: t2,
|
|
164
|
+
namespace: "default",
|
|
165
|
+
agentId: "test-agent",
|
|
166
|
+
model: "test/model",
|
|
167
|
+
});
|
|
168
|
+
|
|
169
|
+
const asc = await storage.threads.list({
|
|
170
|
+
order: { createdAt: "asc" },
|
|
171
|
+
});
|
|
172
|
+
expect(asc[0].tid).toBe(t1);
|
|
173
|
+
expect(asc[1].tid).toBe(t2);
|
|
174
|
+
|
|
175
|
+
const desc = await storage.threads.list({
|
|
176
|
+
order: { createdAt: "desc" },
|
|
177
|
+
});
|
|
178
|
+
expect(desc[0].tid).toBe(t2);
|
|
179
|
+
expect(desc[1].tid).toBe(t1);
|
|
180
|
+
});
|
|
181
|
+
|
|
182
|
+
it("deletes thread", async () => {
|
|
183
|
+
const tid = testid("thread");
|
|
184
|
+
|
|
185
|
+
await storage.threads.insert({
|
|
186
|
+
id: tid,
|
|
187
|
+
namespace: "default",
|
|
188
|
+
agentId: "test-agent",
|
|
189
|
+
model: "test/model",
|
|
190
|
+
});
|
|
191
|
+
|
|
192
|
+
// Add some events
|
|
193
|
+
await storage.threads.append([evt("evt-1", tid, 1, new Date())]);
|
|
194
|
+
|
|
195
|
+
await storage.threads.delete(tid);
|
|
196
|
+
|
|
197
|
+
const found = await storage.threads.get(tid);
|
|
198
|
+
expect(found).toBeNull();
|
|
199
|
+
|
|
200
|
+
// Events should also be deleted (cascade)
|
|
201
|
+
const history = await storage.threads.history(tid);
|
|
202
|
+
expect(history.length).toBe(0);
|
|
203
|
+
});
|
|
204
|
+
|
|
205
|
+
it("returns null when agent/model not in registry", async () => {
|
|
206
|
+
const tid = testid("thread");
|
|
207
|
+
|
|
208
|
+
// Insert with non-existent agent
|
|
209
|
+
await client.execute({
|
|
210
|
+
sql: `INSERT INTO kernl_threads (id, namespace, agent_id, model, context, tick, state, created_at, updated_at)
|
|
211
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
212
|
+
args: [tid, "default", "unknown-agent", "unknown/model", "{}", 0, "idle", Date.now(), Date.now()],
|
|
213
|
+
});
|
|
214
|
+
|
|
215
|
+
// Get should return null (graceful degradation)
|
|
216
|
+
const found = await storage.threads.get(tid);
|
|
217
|
+
expect(found).toBeNull();
|
|
218
|
+
});
|
|
219
|
+
});
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* LibSQL row codecs for thread data.
|
|
3
|
+
*/
|
|
4
|
+
|
|
5
|
+
import type { Row } from "@libsql/client";
|
|
6
|
+
import type { Codec } from "@kernl-sdk/shared/lib";
|
|
7
|
+
import type { ThreadState } from "kernl";
|
|
8
|
+
import type { ThreadRecord, ThreadEventRecord } from "@kernl-sdk/storage";
|
|
9
|
+
|
|
10
|
+
import { parsejson } from "../utils";
|
|
11
|
+
|
|
12
|
+
/**
|
|
13
|
+
* Codec for converting LibSQL rows to ThreadRecord.
|
|
14
|
+
*/
|
|
15
|
+
export const RowToThreadRecord: Codec<Row, ThreadRecord> = {
|
|
16
|
+
encode(row: Row): ThreadRecord {
|
|
17
|
+
return {
|
|
18
|
+
id: row.id as string,
|
|
19
|
+
namespace: row.namespace as string,
|
|
20
|
+
agent_id: row.agent_id as string,
|
|
21
|
+
model: row.model as string,
|
|
22
|
+
context: parsejson<Record<string, unknown>>(row.context) ?? {},
|
|
23
|
+
tick: row.tick as number,
|
|
24
|
+
state: row.state as ThreadState,
|
|
25
|
+
parent_task_id: row.parent_task_id as string | null,
|
|
26
|
+
metadata: parsejson<Record<string, unknown>>(row.metadata),
|
|
27
|
+
created_at: row.created_at as number,
|
|
28
|
+
updated_at: row.updated_at as number,
|
|
29
|
+
};
|
|
30
|
+
},
|
|
31
|
+
|
|
32
|
+
decode(): Row {
|
|
33
|
+
throw new Error("RowToThreadRecord.decode not implemented");
|
|
34
|
+
},
|
|
35
|
+
};
|
|
36
|
+
|
|
37
|
+
/**
|
|
38
|
+
* Codec for converting LibSQL rows (from JOIN query) to ThreadEventRecord.
|
|
39
|
+
*/
|
|
40
|
+
export const RowToEventRecord: Codec<Row, ThreadEventRecord> = {
|
|
41
|
+
encode(row: Row): ThreadEventRecord {
|
|
42
|
+
return {
|
|
43
|
+
id: row.event_id as string,
|
|
44
|
+
tid: row.event_tid as string,
|
|
45
|
+
seq: row.seq as number,
|
|
46
|
+
kind: row.event_kind as string,
|
|
47
|
+
timestamp: Number(row.timestamp),
|
|
48
|
+
data: parsejson<Record<string, unknown>>(row.data),
|
|
49
|
+
metadata: parsejson<Record<string, unknown>>(row.event_metadata),
|
|
50
|
+
} as ThreadEventRecord;
|
|
51
|
+
},
|
|
52
|
+
|
|
53
|
+
decode(): Row {
|
|
54
|
+
throw new Error("RowToEventRecord.decode not implemented");
|
|
55
|
+
},
|
|
56
|
+
};
|
|
57
|
+
|
|
58
|
+
/**
|
|
59
|
+
* Codec for converting LibSQL rows to ThreadEventRecord (direct query).
|
|
60
|
+
*/
|
|
61
|
+
export const RowToEventRecordDirect: Codec<Row, ThreadEventRecord> = {
|
|
62
|
+
encode(row: Row): ThreadEventRecord {
|
|
63
|
+
return {
|
|
64
|
+
id: row.id as string,
|
|
65
|
+
tid: row.tid as string,
|
|
66
|
+
seq: row.seq as number,
|
|
67
|
+
kind: row.kind as string,
|
|
68
|
+
timestamp: Number(row.timestamp),
|
|
69
|
+
data: parsejson<Record<string, unknown>>(row.data),
|
|
70
|
+
metadata: parsejson<Record<string, unknown>>(row.metadata),
|
|
71
|
+
} as ThreadEventRecord;
|
|
72
|
+
},
|
|
73
|
+
|
|
74
|
+
decode(): Row {
|
|
75
|
+
throw new Error("RowToEventRecordDirect.decode not implemented");
|
|
76
|
+
},
|
|
77
|
+
};
|