@voltagent/libsql 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +238 -0
- package/dist/index.d.mts +519 -0
- package/dist/index.d.ts +519 -0
- package/dist/index.js +3021 -0
- package/dist/index.js.map +1 -0
- package/dist/index.mjs +2998 -0
- package/dist/index.mjs.map +1 -0
- package/package.json +53 -0
package/dist/index.mjs
ADDED
|
@@ -0,0 +1,2998 @@
|
|
|
1
|
+
var __defProp = Object.defineProperty;
|
|
2
|
+
var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
|
|
3
|
+
|
|
4
|
+
// src/index.ts
|
|
5
|
+
import { existsSync, mkdirSync } from "fs";
|
|
6
|
+
import { dirname } from "path";
|
|
7
|
+
import { createClient } from "@libsql/client";
|
|
8
|
+
import { safeJsonParse } from "@voltagent/core";
|
|
9
|
+
import { safeStringify as safeStringify2 } from "@voltagent/internal/utils";
|
|
10
|
+
import { createPinoLogger as createPinoLogger2 } from "@voltagent/logger";
|
|
11
|
+
|
|
12
|
+
// src/migrations/add-suspended-status.ts
|
|
13
|
+
async function addSuspendedStatusMigration(db, tablePrefix = "voltagent_memory") {
|
|
14
|
+
const migrationName = "add_suspended_status_to_workflow_history";
|
|
15
|
+
await db.execute(`
|
|
16
|
+
CREATE TABLE IF NOT EXISTS ${tablePrefix}_migrations (
|
|
17
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
18
|
+
name TEXT NOT NULL UNIQUE,
|
|
19
|
+
applied_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
20
|
+
)
|
|
21
|
+
`);
|
|
22
|
+
const result = await db.execute({
|
|
23
|
+
sql: `SELECT * FROM ${tablePrefix}_migrations WHERE name = ?`,
|
|
24
|
+
args: [migrationName]
|
|
25
|
+
});
|
|
26
|
+
if (result.rows.length > 0) {
|
|
27
|
+
return;
|
|
28
|
+
}
|
|
29
|
+
try {
|
|
30
|
+
const needsMigration = await checkIfSuspendedStatusNeeded(db, tablePrefix);
|
|
31
|
+
if (!needsMigration) {
|
|
32
|
+
} else {
|
|
33
|
+
await performSuspendedStatusMigration(db, tablePrefix);
|
|
34
|
+
}
|
|
35
|
+
await db.execute({
|
|
36
|
+
sql: `INSERT INTO ${tablePrefix}_migrations (name) VALUES (?)`,
|
|
37
|
+
args: [migrationName]
|
|
38
|
+
});
|
|
39
|
+
} catch (error) {
|
|
40
|
+
console.error(`[Migration] Failed to apply '${migrationName}':`, error);
|
|
41
|
+
throw error;
|
|
42
|
+
}
|
|
43
|
+
}
|
|
44
|
+
__name(addSuspendedStatusMigration, "addSuspendedStatusMigration");
|
|
45
|
+
async function checkIfSuspendedStatusNeeded(db, tablePrefix) {
|
|
46
|
+
try {
|
|
47
|
+
const testId = `test-suspended-check-${Date.now()}`;
|
|
48
|
+
await db.execute({
|
|
49
|
+
sql: `
|
|
50
|
+
INSERT INTO ${tablePrefix}_workflow_history
|
|
51
|
+
(id, name, workflow_id, status, start_time)
|
|
52
|
+
VALUES (?, 'test', 'test', 'suspended', datetime('now'))
|
|
53
|
+
`,
|
|
54
|
+
args: [testId]
|
|
55
|
+
});
|
|
56
|
+
await db.execute({
|
|
57
|
+
sql: `DELETE FROM ${tablePrefix}_workflow_history WHERE id = ?`,
|
|
58
|
+
args: [testId]
|
|
59
|
+
});
|
|
60
|
+
return false;
|
|
61
|
+
} catch (error) {
|
|
62
|
+
if (error.message?.includes("CHECK constraint failed")) {
|
|
63
|
+
return true;
|
|
64
|
+
}
|
|
65
|
+
throw error;
|
|
66
|
+
}
|
|
67
|
+
}
|
|
68
|
+
__name(checkIfSuspendedStatusNeeded, "checkIfSuspendedStatusNeeded");
|
|
69
|
+
async function performSuspendedStatusMigration(db, tablePrefix) {
|
|
70
|
+
await db.execute("BEGIN TRANSACTION");
|
|
71
|
+
try {
|
|
72
|
+
await db.execute(`
|
|
73
|
+
CREATE TABLE ${tablePrefix}_workflow_history_temp (
|
|
74
|
+
id TEXT PRIMARY KEY,
|
|
75
|
+
name TEXT NOT NULL,
|
|
76
|
+
workflow_id TEXT NOT NULL,
|
|
77
|
+
status TEXT NOT NULL CHECK (status IN ('running', 'completed', 'error', 'cancelled', 'suspended')),
|
|
78
|
+
start_time TEXT NOT NULL,
|
|
79
|
+
end_time TEXT,
|
|
80
|
+
input TEXT,
|
|
81
|
+
output TEXT,
|
|
82
|
+
user_id TEXT,
|
|
83
|
+
conversation_id TEXT,
|
|
84
|
+
metadata TEXT,
|
|
85
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
86
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
87
|
+
)
|
|
88
|
+
`);
|
|
89
|
+
await db.execute(`
|
|
90
|
+
INSERT INTO ${tablePrefix}_workflow_history_temp
|
|
91
|
+
SELECT * FROM ${tablePrefix}_workflow_history
|
|
92
|
+
`);
|
|
93
|
+
await db.execute(`DROP TABLE ${tablePrefix}_workflow_history`);
|
|
94
|
+
await db.execute(`
|
|
95
|
+
ALTER TABLE ${tablePrefix}_workflow_history_temp
|
|
96
|
+
RENAME TO ${tablePrefix}_workflow_history
|
|
97
|
+
`);
|
|
98
|
+
await db.execute(
|
|
99
|
+
`CREATE INDEX idx_${tablePrefix}_workflow_history_workflow_id ON ${tablePrefix}_workflow_history(workflow_id)`
|
|
100
|
+
);
|
|
101
|
+
await db.execute(
|
|
102
|
+
`CREATE INDEX idx_${tablePrefix}_workflow_history_status ON ${tablePrefix}_workflow_history(status)`
|
|
103
|
+
);
|
|
104
|
+
await db.execute(
|
|
105
|
+
`CREATE INDEX idx_${tablePrefix}_workflow_history_start_time ON ${tablePrefix}_workflow_history(start_time)`
|
|
106
|
+
);
|
|
107
|
+
await db.execute(
|
|
108
|
+
`CREATE INDEX idx_${tablePrefix}_workflow_history_user_id ON ${tablePrefix}_workflow_history(user_id)`
|
|
109
|
+
);
|
|
110
|
+
await db.execute(
|
|
111
|
+
`CREATE INDEX idx_${tablePrefix}_workflow_history_conversation_id ON ${tablePrefix}_workflow_history(conversation_id)`
|
|
112
|
+
);
|
|
113
|
+
await db.execute("COMMIT");
|
|
114
|
+
} catch (error) {
|
|
115
|
+
await db.execute("ROLLBACK");
|
|
116
|
+
throw error;
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
__name(performSuspendedStatusMigration, "performSuspendedStatusMigration");
|
|
120
|
+
|
|
121
|
+
// src/migrations/workflow-tables.ts
|
|
122
|
+
async function createWorkflowTables(db, tablePrefix = "voltagent_memory") {
|
|
123
|
+
await db.execute(`
|
|
124
|
+
CREATE TABLE IF NOT EXISTS ${tablePrefix}_workflow_history (
|
|
125
|
+
id TEXT PRIMARY KEY,
|
|
126
|
+
name TEXT NOT NULL,
|
|
127
|
+
workflow_id TEXT NOT NULL,
|
|
128
|
+
status TEXT NOT NULL CHECK (status IN ('running', 'completed', 'error', 'cancelled', 'suspended')),
|
|
129
|
+
start_time TEXT NOT NULL,
|
|
130
|
+
end_time TEXT,
|
|
131
|
+
input TEXT,
|
|
132
|
+
output TEXT,
|
|
133
|
+
user_id TEXT,
|
|
134
|
+
conversation_id TEXT,
|
|
135
|
+
metadata TEXT,
|
|
136
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
137
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
138
|
+
)
|
|
139
|
+
`);
|
|
140
|
+
await db.execute(
|
|
141
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_history_workflow_id ON ${tablePrefix}_workflow_history(workflow_id)`
|
|
142
|
+
);
|
|
143
|
+
await db.execute(
|
|
144
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_history_status ON ${tablePrefix}_workflow_history(status)`
|
|
145
|
+
);
|
|
146
|
+
await db.execute(
|
|
147
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_history_start_time ON ${tablePrefix}_workflow_history(start_time)`
|
|
148
|
+
);
|
|
149
|
+
await db.execute(
|
|
150
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_history_user_id ON ${tablePrefix}_workflow_history(user_id)`
|
|
151
|
+
);
|
|
152
|
+
await db.execute(
|
|
153
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_history_conversation_id ON ${tablePrefix}_workflow_history(conversation_id)`
|
|
154
|
+
);
|
|
155
|
+
await db.execute(`
|
|
156
|
+
CREATE TABLE IF NOT EXISTS ${tablePrefix}_workflow_steps (
|
|
157
|
+
id TEXT PRIMARY KEY,
|
|
158
|
+
workflow_history_id TEXT NOT NULL,
|
|
159
|
+
step_index INTEGER NOT NULL,
|
|
160
|
+
step_type TEXT NOT NULL,
|
|
161
|
+
step_name TEXT NOT NULL,
|
|
162
|
+
step_id TEXT,
|
|
163
|
+
status TEXT NOT NULL CHECK (status IN ('running', 'completed', 'error', 'skipped')),
|
|
164
|
+
start_time TEXT NOT NULL,
|
|
165
|
+
end_time TEXT,
|
|
166
|
+
input TEXT,
|
|
167
|
+
output TEXT,
|
|
168
|
+
error_message TEXT,
|
|
169
|
+
agent_execution_id TEXT,
|
|
170
|
+
parallel_index INTEGER,
|
|
171
|
+
parent_step_id TEXT,
|
|
172
|
+
metadata TEXT,
|
|
173
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
174
|
+
updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
175
|
+
)
|
|
176
|
+
`);
|
|
177
|
+
await db.execute(
|
|
178
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_steps_workflow_history ON ${tablePrefix}_workflow_steps(workflow_history_id)`
|
|
179
|
+
);
|
|
180
|
+
await db.execute(
|
|
181
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_steps_agent_execution ON ${tablePrefix}_workflow_steps(agent_execution_id)`
|
|
182
|
+
);
|
|
183
|
+
await db.execute(
|
|
184
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_steps_step_index ON ${tablePrefix}_workflow_steps(workflow_history_id, step_index)`
|
|
185
|
+
);
|
|
186
|
+
await db.execute(
|
|
187
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_steps_parallel ON ${tablePrefix}_workflow_steps(parent_step_id, parallel_index)`
|
|
188
|
+
);
|
|
189
|
+
await db.execute(`
|
|
190
|
+
CREATE TABLE IF NOT EXISTS ${tablePrefix}_workflow_timeline_events (
|
|
191
|
+
id TEXT PRIMARY KEY,
|
|
192
|
+
workflow_history_id TEXT NOT NULL,
|
|
193
|
+
event_id TEXT NOT NULL,
|
|
194
|
+
name TEXT NOT NULL,
|
|
195
|
+
type TEXT NOT NULL CHECK (type IN ('workflow', 'workflow-step')),
|
|
196
|
+
start_time TEXT NOT NULL,
|
|
197
|
+
end_time TEXT,
|
|
198
|
+
status TEXT NOT NULL,
|
|
199
|
+
level TEXT DEFAULT 'INFO',
|
|
200
|
+
input TEXT,
|
|
201
|
+
output TEXT,
|
|
202
|
+
status_message TEXT,
|
|
203
|
+
metadata TEXT,
|
|
204
|
+
trace_id TEXT,
|
|
205
|
+
parent_event_id TEXT,
|
|
206
|
+
event_sequence INTEGER,
|
|
207
|
+
created_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
208
|
+
)
|
|
209
|
+
`);
|
|
210
|
+
await db.execute(
|
|
211
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_timeline_events_workflow_history ON ${tablePrefix}_workflow_timeline_events(workflow_history_id)`
|
|
212
|
+
);
|
|
213
|
+
await db.execute(
|
|
214
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_timeline_events_trace ON ${tablePrefix}_workflow_timeline_events(trace_id)`
|
|
215
|
+
);
|
|
216
|
+
await db.execute(
|
|
217
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_timeline_events_parent ON ${tablePrefix}_workflow_timeline_events(parent_event_id)`
|
|
218
|
+
);
|
|
219
|
+
await db.execute(
|
|
220
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_timeline_events_type ON ${tablePrefix}_workflow_timeline_events(type)`
|
|
221
|
+
);
|
|
222
|
+
await db.execute(
|
|
223
|
+
`CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_timeline_events_sequence ON ${tablePrefix}_workflow_timeline_events(event_sequence)`
|
|
224
|
+
);
|
|
225
|
+
const checkWorkflowIdColumn = await db.execute(`
|
|
226
|
+
SELECT COUNT(*) as count
|
|
227
|
+
FROM pragma_table_info('agent_history')
|
|
228
|
+
WHERE name = 'workflow_id'
|
|
229
|
+
`);
|
|
230
|
+
if (checkWorkflowIdColumn.rows[0].count === 0) {
|
|
231
|
+
await db.execute("ALTER TABLE agent_history ADD COLUMN workflow_id TEXT");
|
|
232
|
+
}
|
|
233
|
+
const checkWorkflowStepIdColumn = await db.execute(`
|
|
234
|
+
SELECT COUNT(*) as count
|
|
235
|
+
FROM pragma_table_info('agent_history')
|
|
236
|
+
WHERE name = 'workflow_step_id'
|
|
237
|
+
`);
|
|
238
|
+
if (checkWorkflowStepIdColumn.rows[0].count === 0) {
|
|
239
|
+
await db.execute("ALTER TABLE agent_history ADD COLUMN workflow_step_id TEXT");
|
|
240
|
+
}
|
|
241
|
+
await db.execute(
|
|
242
|
+
"CREATE INDEX IF NOT EXISTS idx_agent_history_workflow_id ON agent_history(workflow_id)"
|
|
243
|
+
);
|
|
244
|
+
await db.execute(
|
|
245
|
+
"CREATE INDEX IF NOT EXISTS idx_agent_history_workflow_step ON agent_history(workflow_step_id)"
|
|
246
|
+
);
|
|
247
|
+
}
|
|
248
|
+
__name(createWorkflowTables, "createWorkflowTables");
|
|
249
|
+
|
|
250
|
+
// src/workflow-extension.ts
|
|
251
|
+
import { safeStringify } from "@voltagent/internal/utils";
|
|
252
|
+
import { createPinoLogger } from "@voltagent/logger";
|
|
253
|
+
var LibSQLWorkflowExtension = class {
|
|
254
|
+
constructor(client, _tablePrefix = "voltagent_memory", logger) {
|
|
255
|
+
this.client = client;
|
|
256
|
+
this._tablePrefix = _tablePrefix;
|
|
257
|
+
this.logger = logger || createPinoLogger({ name: "libsql-workflow" });
|
|
258
|
+
}
|
|
259
|
+
static {
|
|
260
|
+
__name(this, "LibSQLWorkflowExtension");
|
|
261
|
+
}
|
|
262
|
+
logger;
|
|
263
|
+
/**
|
|
264
|
+
* Store a workflow history entry
|
|
265
|
+
*/
|
|
266
|
+
async storeWorkflowHistory(entry) {
|
|
267
|
+
await this.client.execute({
|
|
268
|
+
sql: `
|
|
269
|
+
INSERT INTO ${this._tablePrefix}_workflow_history (
|
|
270
|
+
id, name, workflow_id, status, start_time, end_time,
|
|
271
|
+
input, output, user_id, conversation_id, metadata, created_at, updated_at
|
|
272
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
273
|
+
`,
|
|
274
|
+
args: [
|
|
275
|
+
entry.id,
|
|
276
|
+
entry.workflowName,
|
|
277
|
+
entry.workflowId,
|
|
278
|
+
entry.status,
|
|
279
|
+
entry.startTime.toISOString(),
|
|
280
|
+
entry.endTime?.toISOString() || null,
|
|
281
|
+
safeStringify(entry.input),
|
|
282
|
+
entry.output ? safeStringify(entry.output) : null,
|
|
283
|
+
entry.userId || null,
|
|
284
|
+
entry.conversationId || null,
|
|
285
|
+
entry.metadata ? safeStringify(entry.metadata) : null,
|
|
286
|
+
entry.createdAt?.toISOString() || (/* @__PURE__ */ new Date()).toISOString(),
|
|
287
|
+
entry.updatedAt?.toISOString() || (/* @__PURE__ */ new Date()).toISOString()
|
|
288
|
+
]
|
|
289
|
+
});
|
|
290
|
+
}
|
|
291
|
+
/**
|
|
292
|
+
* Get a workflow history entry by ID
|
|
293
|
+
*/
|
|
294
|
+
async getWorkflowHistory(id) {
|
|
295
|
+
const result = await this.client.execute({
|
|
296
|
+
sql: `SELECT * FROM ${this._tablePrefix}_workflow_history WHERE id = ?`,
|
|
297
|
+
args: [id]
|
|
298
|
+
});
|
|
299
|
+
if (result.rows.length === 0) return null;
|
|
300
|
+
return this.parseWorkflowHistoryRow(result.rows[0]);
|
|
301
|
+
}
|
|
302
|
+
/**
|
|
303
|
+
* Get all workflow history entries for a specific workflow
|
|
304
|
+
*/
|
|
305
|
+
async getWorkflowHistoryByWorkflowId(workflowId) {
|
|
306
|
+
const result = await this.client.execute({
|
|
307
|
+
sql: `SELECT * FROM ${this._tablePrefix}_workflow_history WHERE workflow_id = ? ORDER BY start_time DESC`,
|
|
308
|
+
args: [workflowId]
|
|
309
|
+
});
|
|
310
|
+
return result.rows.map((row) => this.parseWorkflowHistoryRow(row));
|
|
311
|
+
}
|
|
312
|
+
/**
|
|
313
|
+
* Update a workflow history entry
|
|
314
|
+
*/
|
|
315
|
+
async updateWorkflowHistory(id, updates) {
|
|
316
|
+
this.logger.trace(`Updating workflow history ${id}`, {
|
|
317
|
+
status: updates.status,
|
|
318
|
+
hasMetadata: !!updates.metadata,
|
|
319
|
+
hasSuspension: !!updates.metadata?.suspension
|
|
320
|
+
});
|
|
321
|
+
const setClauses = [];
|
|
322
|
+
const args = [];
|
|
323
|
+
if (updates.status !== void 0) {
|
|
324
|
+
setClauses.push("status = ?");
|
|
325
|
+
args.push(updates.status);
|
|
326
|
+
}
|
|
327
|
+
if (updates.endTime !== void 0) {
|
|
328
|
+
setClauses.push("end_time = ?");
|
|
329
|
+
args.push(updates.endTime.toISOString());
|
|
330
|
+
}
|
|
331
|
+
if (updates.output !== void 0) {
|
|
332
|
+
setClauses.push("output = ?");
|
|
333
|
+
args.push(safeStringify(updates.output));
|
|
334
|
+
}
|
|
335
|
+
if (updates.userId !== void 0) {
|
|
336
|
+
setClauses.push("user_id = ?");
|
|
337
|
+
args.push(updates.userId);
|
|
338
|
+
}
|
|
339
|
+
if (updates.conversationId !== void 0) {
|
|
340
|
+
setClauses.push("conversation_id = ?");
|
|
341
|
+
args.push(updates.conversationId);
|
|
342
|
+
}
|
|
343
|
+
if (updates.metadata !== void 0) {
|
|
344
|
+
setClauses.push("metadata = ?");
|
|
345
|
+
const metadataJson = safeStringify(updates.metadata);
|
|
346
|
+
args.push(metadataJson);
|
|
347
|
+
this.logger.trace(`Setting metadata for ${id}:`, { metadata: metadataJson });
|
|
348
|
+
}
|
|
349
|
+
setClauses.push("updated_at = ?");
|
|
350
|
+
args.push((/* @__PURE__ */ new Date()).toISOString());
|
|
351
|
+
args.push(id);
|
|
352
|
+
const sql = `UPDATE ${this._tablePrefix}_workflow_history SET ${setClauses.join(", ")} WHERE id = ?`;
|
|
353
|
+
this.logger.trace("Executing SQL:", { sql, args });
|
|
354
|
+
try {
|
|
355
|
+
const result = await this.client.execute({ sql, args });
|
|
356
|
+
this.logger.trace(
|
|
357
|
+
`Successfully updated workflow history ${id}, rows affected: ${result.rowsAffected}`
|
|
358
|
+
);
|
|
359
|
+
} catch (error) {
|
|
360
|
+
this.logger.error(`Failed to update workflow history ${id}:`, { error });
|
|
361
|
+
throw error;
|
|
362
|
+
}
|
|
363
|
+
}
|
|
364
|
+
/**
|
|
365
|
+
* Delete a workflow history entry
|
|
366
|
+
*/
|
|
367
|
+
async deleteWorkflowHistory(id) {
|
|
368
|
+
await this.client.execute({
|
|
369
|
+
sql: `DELETE FROM ${this._tablePrefix}_workflow_history WHERE id = ?`,
|
|
370
|
+
args: [id]
|
|
371
|
+
});
|
|
372
|
+
}
|
|
373
|
+
/**
|
|
374
|
+
* Store a workflow step entry
|
|
375
|
+
*/
|
|
376
|
+
async storeWorkflowStep(step) {
|
|
377
|
+
await this.client.execute({
|
|
378
|
+
sql: `
|
|
379
|
+
INSERT INTO ${this._tablePrefix}_workflow_steps (
|
|
380
|
+
id, workflow_history_id, step_index, step_type, step_name, step_id,
|
|
381
|
+
status, start_time, end_time, input, output, error_message,
|
|
382
|
+
agent_execution_id, parallel_index, parent_step_id, metadata,
|
|
383
|
+
created_at, updated_at
|
|
384
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
385
|
+
`,
|
|
386
|
+
args: [
|
|
387
|
+
step.id,
|
|
388
|
+
step.workflowHistoryId,
|
|
389
|
+
step.stepIndex,
|
|
390
|
+
step.stepType,
|
|
391
|
+
step.stepName,
|
|
392
|
+
step.stepId || null,
|
|
393
|
+
step.status,
|
|
394
|
+
step.startTime.toISOString(),
|
|
395
|
+
step.endTime?.toISOString() || null,
|
|
396
|
+
step.input ? safeStringify(step.input) : null,
|
|
397
|
+
step.output ? safeStringify(step.output) : null,
|
|
398
|
+
step.error ? safeStringify(step.error) : null,
|
|
399
|
+
step.agentExecutionId || null,
|
|
400
|
+
step.parallelIndex || null,
|
|
401
|
+
step.parallelParentStepId || null,
|
|
402
|
+
step.metadata ? safeStringify(step.metadata) : null,
|
|
403
|
+
step.createdAt?.toISOString() || (/* @__PURE__ */ new Date()).toISOString(),
|
|
404
|
+
step.updatedAt?.toISOString() || (/* @__PURE__ */ new Date()).toISOString()
|
|
405
|
+
]
|
|
406
|
+
});
|
|
407
|
+
}
|
|
408
|
+
/**
|
|
409
|
+
* Get a workflow step by ID
|
|
410
|
+
*/
|
|
411
|
+
async getWorkflowStep(id) {
|
|
412
|
+
const result = await this.client.execute({
|
|
413
|
+
sql: `SELECT * FROM ${this._tablePrefix}_workflow_steps WHERE id = ?`,
|
|
414
|
+
args: [id]
|
|
415
|
+
});
|
|
416
|
+
if (result.rows.length === 0) return null;
|
|
417
|
+
return this.parseWorkflowStepRow(result.rows[0]);
|
|
418
|
+
}
|
|
419
|
+
/**
|
|
420
|
+
* Get all workflow steps for a specific workflow history
|
|
421
|
+
*/
|
|
422
|
+
async getWorkflowSteps(workflowHistoryId) {
|
|
423
|
+
const result = await this.client.execute({
|
|
424
|
+
sql: `SELECT * FROM ${this._tablePrefix}_workflow_steps WHERE workflow_history_id = ? ORDER BY step_index ASC`,
|
|
425
|
+
args: [workflowHistoryId]
|
|
426
|
+
});
|
|
427
|
+
return result.rows.map((row) => this.parseWorkflowStepRow(row));
|
|
428
|
+
}
|
|
429
|
+
/**
|
|
430
|
+
* Update a workflow step
|
|
431
|
+
*/
|
|
432
|
+
async updateWorkflowStep(id, updates) {
|
|
433
|
+
const setClauses = [];
|
|
434
|
+
const args = [];
|
|
435
|
+
if (updates.status !== void 0) {
|
|
436
|
+
setClauses.push("status = ?");
|
|
437
|
+
args.push(updates.status);
|
|
438
|
+
}
|
|
439
|
+
if (updates.endTime !== void 0) {
|
|
440
|
+
setClauses.push("end_time = ?");
|
|
441
|
+
args.push(updates.endTime.toISOString());
|
|
442
|
+
}
|
|
443
|
+
if (updates.output !== void 0) {
|
|
444
|
+
setClauses.push("output = ?");
|
|
445
|
+
args.push(safeStringify(updates.output));
|
|
446
|
+
}
|
|
447
|
+
if (updates.error !== void 0) {
|
|
448
|
+
setClauses.push("error_message = ?");
|
|
449
|
+
args.push(safeStringify(updates.error));
|
|
450
|
+
}
|
|
451
|
+
if (updates.agentExecutionId !== void 0) {
|
|
452
|
+
setClauses.push("agent_execution_id = ?");
|
|
453
|
+
args.push(updates.agentExecutionId);
|
|
454
|
+
}
|
|
455
|
+
if (updates.metadata !== void 0) {
|
|
456
|
+
setClauses.push("metadata = ?");
|
|
457
|
+
args.push(safeStringify(updates.metadata));
|
|
458
|
+
}
|
|
459
|
+
setClauses.push("updated_at = ?");
|
|
460
|
+
args.push((/* @__PURE__ */ new Date()).toISOString());
|
|
461
|
+
args.push(id);
|
|
462
|
+
await this.client.execute({
|
|
463
|
+
sql: `UPDATE ${this._tablePrefix}_workflow_steps SET ${setClauses.join(", ")} WHERE id = ?`,
|
|
464
|
+
args
|
|
465
|
+
});
|
|
466
|
+
}
|
|
467
|
+
/**
|
|
468
|
+
* Delete a workflow step
|
|
469
|
+
*/
|
|
470
|
+
async deleteWorkflowStep(id) {
|
|
471
|
+
await this.client.execute({
|
|
472
|
+
sql: `DELETE FROM ${this._tablePrefix}_workflow_steps WHERE id = ?`,
|
|
473
|
+
args: [id]
|
|
474
|
+
});
|
|
475
|
+
}
|
|
476
|
+
/**
|
|
477
|
+
* Store a workflow timeline event
|
|
478
|
+
*/
|
|
479
|
+
async storeWorkflowTimelineEvent(event) {
|
|
480
|
+
await this.client.execute({
|
|
481
|
+
sql: `
|
|
482
|
+
INSERT INTO ${this._tablePrefix}_workflow_timeline_events (
|
|
483
|
+
id, workflow_history_id, event_id, name, type,
|
|
484
|
+
start_time, end_time, status, level, input, output,
|
|
485
|
+
status_message, metadata, trace_id, parent_event_id, event_sequence, created_at
|
|
486
|
+
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
487
|
+
`,
|
|
488
|
+
args: [
|
|
489
|
+
event.id,
|
|
490
|
+
event.workflowHistoryId,
|
|
491
|
+
event.eventId,
|
|
492
|
+
event.name,
|
|
493
|
+
event.type,
|
|
494
|
+
event.startTime,
|
|
495
|
+
event.endTime || null,
|
|
496
|
+
event.status,
|
|
497
|
+
event.level || "INFO",
|
|
498
|
+
event.input ? safeStringify(event.input) : null,
|
|
499
|
+
event.output ? safeStringify(event.output) : null,
|
|
500
|
+
event.statusMessage ? safeStringify(event.statusMessage) : null,
|
|
501
|
+
event.metadata ? safeStringify(event.metadata) : null,
|
|
502
|
+
event.traceId || null,
|
|
503
|
+
event.parentEventId || null,
|
|
504
|
+
event.eventSequence || null,
|
|
505
|
+
// Event sequence for ordering
|
|
506
|
+
event.createdAt.toISOString()
|
|
507
|
+
]
|
|
508
|
+
});
|
|
509
|
+
}
|
|
510
|
+
/**
|
|
511
|
+
* Get a workflow timeline event by ID
|
|
512
|
+
*/
|
|
513
|
+
async getWorkflowTimelineEvent(id) {
|
|
514
|
+
const result = await this.client.execute({
|
|
515
|
+
sql: `SELECT * FROM ${this._tablePrefix}_workflow_timeline_events WHERE id = ?`,
|
|
516
|
+
args: [id]
|
|
517
|
+
});
|
|
518
|
+
if (result.rows.length === 0) return null;
|
|
519
|
+
return this.parseWorkflowTimelineEventRow(result.rows[0]);
|
|
520
|
+
}
|
|
521
|
+
/**
|
|
522
|
+
* Get all workflow timeline events for a specific workflow history
|
|
523
|
+
*/
|
|
524
|
+
async getWorkflowTimelineEvents(workflowHistoryId) {
|
|
525
|
+
const result = await this.client.execute({
|
|
526
|
+
sql: `SELECT * FROM ${this._tablePrefix}_workflow_timeline_events WHERE workflow_history_id = ? ORDER BY event_sequence ASC, start_time ASC`,
|
|
527
|
+
args: [workflowHistoryId]
|
|
528
|
+
});
|
|
529
|
+
return result.rows.map((row) => this.parseWorkflowTimelineEventRow(row));
|
|
530
|
+
}
|
|
531
|
+
/**
|
|
532
|
+
* Delete a workflow timeline event
|
|
533
|
+
*/
|
|
534
|
+
async deleteWorkflowTimelineEvent(id) {
|
|
535
|
+
await this.client.execute({
|
|
536
|
+
sql: `DELETE FROM ${this._tablePrefix}_workflow_timeline_events WHERE id = ?`,
|
|
537
|
+
args: [id]
|
|
538
|
+
});
|
|
539
|
+
}
|
|
540
|
+
/**
|
|
541
|
+
* Get all workflow IDs
|
|
542
|
+
*/
|
|
543
|
+
async getAllWorkflowIds() {
|
|
544
|
+
const result = await this.client.execute({
|
|
545
|
+
sql: `SELECT DISTINCT workflow_id FROM ${this._tablePrefix}_workflow_history`,
|
|
546
|
+
args: []
|
|
547
|
+
});
|
|
548
|
+
return result.rows.map((row) => row.workflow_id);
|
|
549
|
+
}
|
|
550
|
+
/**
|
|
551
|
+
* Get workflow statistics
|
|
552
|
+
*/
|
|
553
|
+
async getWorkflowStats(workflowId) {
|
|
554
|
+
const result = await this.client.execute({
|
|
555
|
+
sql: `
|
|
556
|
+
SELECT
|
|
557
|
+
COUNT(*) as total_executions,
|
|
558
|
+
SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as successful_executions,
|
|
559
|
+
SUM(CASE WHEN status = 'error' THEN 1 ELSE 0 END) as failed_executions,
|
|
560
|
+
AVG(CASE WHEN end_time IS NOT NULL THEN
|
|
561
|
+
(julianday(end_time) - julianday(start_time)) * 24 * 60 * 60 * 1000
|
|
562
|
+
ELSE NULL END) as avg_duration_ms,
|
|
563
|
+
MAX(start_time) as last_execution_time
|
|
564
|
+
FROM ${this._tablePrefix}_workflow_history
|
|
565
|
+
WHERE workflow_id = ?
|
|
566
|
+
`,
|
|
567
|
+
args: [workflowId]
|
|
568
|
+
});
|
|
569
|
+
if (result.rows.length === 0) {
|
|
570
|
+
return {
|
|
571
|
+
totalExecutions: 0,
|
|
572
|
+
successfulExecutions: 0,
|
|
573
|
+
failedExecutions: 0,
|
|
574
|
+
averageExecutionTime: 0,
|
|
575
|
+
lastExecutionTime: void 0
|
|
576
|
+
};
|
|
577
|
+
}
|
|
578
|
+
const row = result.rows[0];
|
|
579
|
+
return {
|
|
580
|
+
totalExecutions: Number(row.total_executions) || 0,
|
|
581
|
+
successfulExecutions: Number(row.successful_executions) || 0,
|
|
582
|
+
failedExecutions: Number(row.failed_executions) || 0,
|
|
583
|
+
averageExecutionTime: Number(row.avg_duration_ms) || 0,
|
|
584
|
+
lastExecutionTime: row.last_execution_time ? new Date(row.last_execution_time) : void 0
|
|
585
|
+
};
|
|
586
|
+
}
|
|
587
|
+
/**
|
|
588
|
+
* Get workflow history with all related data (steps and events)
|
|
589
|
+
*/
|
|
590
|
+
async getWorkflowHistoryWithStepsAndEvents(id) {
|
|
591
|
+
const history = await this.getWorkflowHistory(id);
|
|
592
|
+
if (!history) return null;
|
|
593
|
+
const [steps, events] = await Promise.all([
|
|
594
|
+
this.getWorkflowSteps(id),
|
|
595
|
+
this.getWorkflowTimelineEvents(id)
|
|
596
|
+
]);
|
|
597
|
+
history.steps = steps;
|
|
598
|
+
history.events = events;
|
|
599
|
+
return history;
|
|
600
|
+
}
|
|
601
|
+
/**
|
|
602
|
+
* Delete workflow history and all related data
|
|
603
|
+
*/
|
|
604
|
+
async deleteWorkflowHistoryWithRelated(id) {
|
|
605
|
+
await this.deleteWorkflowHistory(id);
|
|
606
|
+
}
|
|
607
|
+
/**
|
|
608
|
+
* Clean up old workflow histories
|
|
609
|
+
*/
|
|
610
|
+
async cleanupOldWorkflowHistories(workflowId, maxEntries) {
|
|
611
|
+
const countResult = await this.client.execute({
|
|
612
|
+
sql: `SELECT COUNT(*) as count FROM ${this._tablePrefix}_workflow_history WHERE workflow_id = ?`,
|
|
613
|
+
args: [workflowId]
|
|
614
|
+
});
|
|
615
|
+
const currentCount = Number(countResult.rows[0].count);
|
|
616
|
+
if (currentCount <= maxEntries) return 0;
|
|
617
|
+
const deleteCount = currentCount - maxEntries;
|
|
618
|
+
const deleteResult = await this.client.execute({
|
|
619
|
+
sql: `
|
|
620
|
+
DELETE FROM ${this._tablePrefix}_workflow_history
|
|
621
|
+
WHERE workflow_id = ?
|
|
622
|
+
AND id IN (
|
|
623
|
+
SELECT id FROM ${this._tablePrefix}_workflow_history
|
|
624
|
+
WHERE workflow_id = ?
|
|
625
|
+
ORDER BY start_time ASC
|
|
626
|
+
LIMIT ?
|
|
627
|
+
)
|
|
628
|
+
`,
|
|
629
|
+
args: [workflowId, workflowId, deleteCount]
|
|
630
|
+
});
|
|
631
|
+
return deleteResult.rowsAffected;
|
|
632
|
+
}
|
|
633
|
+
/**
|
|
634
|
+
* Parse workflow history row from database
|
|
635
|
+
*/
|
|
636
|
+
parseWorkflowHistoryRow(row) {
|
|
637
|
+
return {
|
|
638
|
+
id: row.id,
|
|
639
|
+
workflowName: row.name,
|
|
640
|
+
workflowId: row.workflow_id,
|
|
641
|
+
status: row.status,
|
|
642
|
+
startTime: new Date(row.start_time),
|
|
643
|
+
endTime: row.end_time ? new Date(row.end_time) : void 0,
|
|
644
|
+
input: row.input ? JSON.parse(row.input) : null,
|
|
645
|
+
output: row.output ? JSON.parse(row.output) : void 0,
|
|
646
|
+
userId: row.user_id,
|
|
647
|
+
conversationId: row.conversation_id,
|
|
648
|
+
metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
|
|
649
|
+
steps: [],
|
|
650
|
+
// Will be loaded separately if needed
|
|
651
|
+
events: [],
|
|
652
|
+
// Will be loaded separately if needed
|
|
653
|
+
createdAt: new Date(row.created_at),
|
|
654
|
+
updatedAt: new Date(row.updated_at)
|
|
655
|
+
};
|
|
656
|
+
}
|
|
657
|
+
/**
|
|
658
|
+
* Parse workflow step row from database
|
|
659
|
+
*/
|
|
660
|
+
parseWorkflowStepRow(row) {
|
|
661
|
+
return {
|
|
662
|
+
id: row.id,
|
|
663
|
+
workflowHistoryId: row.workflow_history_id,
|
|
664
|
+
stepIndex: Number(row.step_index),
|
|
665
|
+
stepType: row.step_type,
|
|
666
|
+
stepName: row.step_name,
|
|
667
|
+
stepId: row.step_id || void 0,
|
|
668
|
+
status: row.status,
|
|
669
|
+
startTime: new Date(row.start_time),
|
|
670
|
+
endTime: row.end_time ? new Date(row.end_time) : void 0,
|
|
671
|
+
input: row.input ? JSON.parse(row.input) : void 0,
|
|
672
|
+
output: row.output ? JSON.parse(row.output) : void 0,
|
|
673
|
+
error: row.error_message ? JSON.parse(row.error_message) : void 0,
|
|
674
|
+
agentExecutionId: row.agent_execution_id || void 0,
|
|
675
|
+
parallelIndex: row.parallel_index ? Number(row.parallel_index) : void 0,
|
|
676
|
+
parallelParentStepId: row.parent_step_id || void 0,
|
|
677
|
+
metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
|
|
678
|
+
createdAt: new Date(row.created_at),
|
|
679
|
+
updatedAt: new Date(row.updated_at)
|
|
680
|
+
};
|
|
681
|
+
}
|
|
682
|
+
/**
|
|
683
|
+
* Parse workflow timeline event row from database
|
|
684
|
+
*/
|
|
685
|
+
parseWorkflowTimelineEventRow(row) {
|
|
686
|
+
return {
|
|
687
|
+
id: row.id,
|
|
688
|
+
workflowHistoryId: row.workflow_history_id,
|
|
689
|
+
eventId: row.event_id,
|
|
690
|
+
name: row.name,
|
|
691
|
+
type: row.type,
|
|
692
|
+
startTime: row.start_time,
|
|
693
|
+
endTime: row.end_time ? row.end_time : void 0,
|
|
694
|
+
status: row.status,
|
|
695
|
+
level: row.level || void 0,
|
|
696
|
+
input: row.input ? JSON.parse(row.input) : void 0,
|
|
697
|
+
output: row.output ? JSON.parse(row.output) : void 0,
|
|
698
|
+
statusMessage: row.status_message ? JSON.parse(row.status_message) : void 0,
|
|
699
|
+
metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
|
|
700
|
+
traceId: row.trace_id || void 0,
|
|
701
|
+
parentEventId: row.parent_event_id || void 0,
|
|
702
|
+
eventSequence: Number(row.event_sequence),
|
|
703
|
+
createdAt: new Date(row.created_at)
|
|
704
|
+
};
|
|
705
|
+
}
|
|
706
|
+
};
|
|
707
|
+
|
|
708
|
+
// src/index.ts
|
|
709
|
+
async function debugDelay() {
|
|
710
|
+
const min = 0;
|
|
711
|
+
const max = 0;
|
|
712
|
+
const delay = Math.floor(Math.random() * (max - min + 1)) + min;
|
|
713
|
+
return new Promise((resolve) => setTimeout(resolve, delay));
|
|
714
|
+
}
|
|
715
|
+
__name(debugDelay, "debugDelay");
|
|
716
|
+
var LibSQLStorage = class {
|
|
717
|
+
static {
|
|
718
|
+
__name(this, "LibSQLStorage");
|
|
719
|
+
}
|
|
720
|
+
client;
|
|
721
|
+
options;
|
|
722
|
+
initialized;
|
|
723
|
+
workflowExtension;
|
|
724
|
+
logger;
|
|
725
|
+
retryAttempts;
|
|
726
|
+
baseDelayMs;
|
|
727
|
+
/**
|
|
728
|
+
* Create a new LibSQL storage
|
|
729
|
+
* @param options Configuration options
|
|
730
|
+
*/
|
|
731
|
+
constructor(options) {
|
|
732
|
+
this.logger = options.logger || createPinoLogger2({ name: "libsql-storage" });
|
|
733
|
+
this.retryAttempts = options.retryAttempts ?? 3;
|
|
734
|
+
this.baseDelayMs = options.baseDelayMs ?? 50;
|
|
735
|
+
this.options = {
|
|
736
|
+
storageLimit: options.storageLimit || 100,
|
|
737
|
+
tablePrefix: options.tablePrefix || "voltagent_memory",
|
|
738
|
+
debug: options.debug || false,
|
|
739
|
+
url: options.url || "file:./.voltagent/memory.db",
|
|
740
|
+
authToken: options.authToken,
|
|
741
|
+
retryAttempts: this.retryAttempts,
|
|
742
|
+
baseDelayMs: this.baseDelayMs
|
|
743
|
+
};
|
|
744
|
+
if (this.options.url.startsWith("file:") && !this.options.url.includes(":memory:")) {
|
|
745
|
+
const filePath = this.options.url.substring(5);
|
|
746
|
+
const dir = dirname(filePath);
|
|
747
|
+
if (dir && dir !== "." && !existsSync(dir)) {
|
|
748
|
+
try {
|
|
749
|
+
mkdirSync(dir, { recursive: true });
|
|
750
|
+
this.debug("Created directory for database", { dir });
|
|
751
|
+
} catch (error) {
|
|
752
|
+
this.logger.warn("Failed to create directory for database", { dir, error });
|
|
753
|
+
}
|
|
754
|
+
}
|
|
755
|
+
}
|
|
756
|
+
this.client = createClient({
|
|
757
|
+
url: this.options.url,
|
|
758
|
+
authToken: this.options.authToken
|
|
759
|
+
});
|
|
760
|
+
this.debug("LibSQL storage provider initialized with options", this.options);
|
|
761
|
+
this.workflowExtension = new LibSQLWorkflowExtension(
|
|
762
|
+
this.client,
|
|
763
|
+
this.options.tablePrefix,
|
|
764
|
+
this.logger
|
|
765
|
+
);
|
|
766
|
+
this.initialized = this.initializeDatabase();
|
|
767
|
+
}
|
|
768
|
+
/**
|
|
769
|
+
* Log a debug message if debug is enabled
|
|
770
|
+
* @param message Message to log
|
|
771
|
+
* @param data Additional data to log
|
|
772
|
+
*/
|
|
773
|
+
debug(message, data) {
|
|
774
|
+
if (this.options?.debug) {
|
|
775
|
+
this.logger.debug(`${message}`, data || "");
|
|
776
|
+
}
|
|
777
|
+
}
|
|
778
|
+
/**
|
|
779
|
+
* Calculate delay with jitter for better load distribution
|
|
780
|
+
* @param attempt Current retry attempt number
|
|
781
|
+
* @returns Delay in milliseconds
|
|
782
|
+
*/
|
|
783
|
+
calculateRetryDelay(attempt) {
|
|
784
|
+
const exponentialDelay = this.baseDelayMs * 2 ** (attempt - 1);
|
|
785
|
+
const jitterFactor = 0.2 + Math.random() * 0.2;
|
|
786
|
+
const delayWithJitter = exponentialDelay * (1 + jitterFactor);
|
|
787
|
+
return Math.min(delayWithJitter, 2e3);
|
|
788
|
+
}
|
|
789
|
+
/**
|
|
790
|
+
* Execute a database operation with retry strategy
|
|
791
|
+
* Implements jittered exponential backoff
|
|
792
|
+
* @param operationFn The operation function to execute
|
|
793
|
+
* @param operationName Operation name for logging
|
|
794
|
+
* @returns The result of the operation
|
|
795
|
+
*/
|
|
796
|
+
async executeWithRetryStrategy(operationFn, operationName) {
|
|
797
|
+
let attempt = 0;
|
|
798
|
+
while (attempt < this.retryAttempts) {
|
|
799
|
+
attempt++;
|
|
800
|
+
try {
|
|
801
|
+
return await operationFn();
|
|
802
|
+
} catch (error) {
|
|
803
|
+
const isBusyError = error.message && (error.message.includes("SQLITE_BUSY") || error.message.includes("database is locked") || error.code === "SQLITE_BUSY");
|
|
804
|
+
if (!isBusyError || attempt >= this.retryAttempts) {
|
|
805
|
+
this.debug(`Operation failed: ${operationName}`, {
|
|
806
|
+
attempt,
|
|
807
|
+
error: error.message
|
|
808
|
+
});
|
|
809
|
+
throw error;
|
|
810
|
+
}
|
|
811
|
+
const delay = this.calculateRetryDelay(attempt);
|
|
812
|
+
this.debug(`Retrying ${operationName}`, {
|
|
813
|
+
attempt,
|
|
814
|
+
remainingAttempts: this.retryAttempts - attempt,
|
|
815
|
+
delay
|
|
816
|
+
});
|
|
817
|
+
await new Promise((resolve) => setTimeout(resolve, delay));
|
|
818
|
+
}
|
|
819
|
+
}
|
|
820
|
+
throw new Error(`Max retry attempts (${this.retryAttempts}) exceeded for ${operationName}`);
|
|
821
|
+
}
|
|
822
|
+
/**
|
|
823
|
+
* Initialize workflow tables
|
|
824
|
+
*/
|
|
825
|
+
async initializeWorkflowTables() {
|
|
826
|
+
try {
|
|
827
|
+
await createWorkflowTables(this.client, this.options.tablePrefix);
|
|
828
|
+
this.debug("Workflow tables initialized successfully");
|
|
829
|
+
await addSuspendedStatusMigration(this.client, this.options.tablePrefix);
|
|
830
|
+
this.debug("Workflow migrations applied successfully");
|
|
831
|
+
} catch (error) {
|
|
832
|
+
this.debug("Error initializing workflow tables:", error);
|
|
833
|
+
}
|
|
834
|
+
}
|
|
835
|
+
/**
|
|
836
|
+
* Initialize the database tables
|
|
837
|
+
* @returns Promise that resolves when initialization is complete
|
|
838
|
+
*/
|
|
839
|
+
async initializeDatabase() {
|
|
840
|
+
if (this.options.url.startsWith("file:") || this.options.url.includes(":memory:")) {
|
|
841
|
+
try {
|
|
842
|
+
await this.client.execute("PRAGMA journal_mode=WAL;");
|
|
843
|
+
this.debug("PRAGMA journal_mode=WAL set.");
|
|
844
|
+
} catch (err) {
|
|
845
|
+
this.debug("Failed to set PRAGMA journal_mode=WAL.", err);
|
|
846
|
+
}
|
|
847
|
+
try {
|
|
848
|
+
await this.client.execute("PRAGMA busy_timeout = 5000;");
|
|
849
|
+
this.debug("PRAGMA busy_timeout=5000 set.");
|
|
850
|
+
} catch (err) {
|
|
851
|
+
this.debug("Failed to set PRAGMA busy_timeout.", err);
|
|
852
|
+
}
|
|
853
|
+
}
|
|
854
|
+
const conversationsTableName = `${this.options.tablePrefix}_conversations`;
|
|
855
|
+
await this.client.execute(`
|
|
856
|
+
CREATE TABLE IF NOT EXISTS ${conversationsTableName} (
|
|
857
|
+
id TEXT PRIMARY KEY,
|
|
858
|
+
resource_id TEXT NOT NULL,
|
|
859
|
+
user_id TEXT NOT NULL,
|
|
860
|
+
title TEXT NOT NULL,
|
|
861
|
+
metadata TEXT NOT NULL,
|
|
862
|
+
created_at TEXT NOT NULL,
|
|
863
|
+
updated_at TEXT NOT NULL
|
|
864
|
+
)
|
|
865
|
+
`);
|
|
866
|
+
const messagesTableName = `${this.options.tablePrefix}_messages`;
|
|
867
|
+
await this.client.execute(`
|
|
868
|
+
CREATE TABLE IF NOT EXISTS ${messagesTableName} (
|
|
869
|
+
conversation_id TEXT NOT NULL,
|
|
870
|
+
message_id TEXT NOT NULL,
|
|
871
|
+
role TEXT NOT NULL,
|
|
872
|
+
content TEXT NOT NULL,
|
|
873
|
+
type TEXT NOT NULL,
|
|
874
|
+
created_at TEXT NOT NULL,
|
|
875
|
+
PRIMARY KEY (conversation_id, message_id)
|
|
876
|
+
)
|
|
877
|
+
`);
|
|
878
|
+
const historyTableName = `${this.options.tablePrefix}_agent_history`;
|
|
879
|
+
await this.client.execute(`
|
|
880
|
+
CREATE TABLE IF NOT EXISTS ${historyTableName} (
|
|
881
|
+
id TEXT PRIMARY KEY,
|
|
882
|
+
agent_id TEXT NOT NULL,
|
|
883
|
+
timestamp TEXT NOT NULL,
|
|
884
|
+
status TEXT,
|
|
885
|
+
input TEXT,
|
|
886
|
+
output TEXT,
|
|
887
|
+
usage TEXT,
|
|
888
|
+
metadata TEXT,
|
|
889
|
+
userId TEXT,
|
|
890
|
+
conversationId TEXT
|
|
891
|
+
)
|
|
892
|
+
`);
|
|
893
|
+
const historyStepsTableName = `${this.options.tablePrefix}_agent_history_steps`;
|
|
894
|
+
await this.client.execute(`
|
|
895
|
+
CREATE TABLE IF NOT EXISTS ${historyStepsTableName} (
|
|
896
|
+
key TEXT PRIMARY KEY,
|
|
897
|
+
value TEXT NOT NULL,
|
|
898
|
+
history_id TEXT NOT NULL,
|
|
899
|
+
agent_id TEXT
|
|
900
|
+
)
|
|
901
|
+
`);
|
|
902
|
+
const timelineEventsTableName = `${this.options.tablePrefix}_agent_history_timeline_events`;
|
|
903
|
+
await this.client.execute(`
|
|
904
|
+
CREATE TABLE IF NOT EXISTS ${timelineEventsTableName} (
|
|
905
|
+
id TEXT PRIMARY KEY,
|
|
906
|
+
history_id TEXT NOT NULL,
|
|
907
|
+
agent_id TEXT,
|
|
908
|
+
event_type TEXT NOT NULL,
|
|
909
|
+
event_name TEXT NOT NULL,
|
|
910
|
+
start_time TEXT NOT NULL,
|
|
911
|
+
end_time TEXT,
|
|
912
|
+
status TEXT,
|
|
913
|
+
status_message TEXT,
|
|
914
|
+
level TEXT,
|
|
915
|
+
version TEXT,
|
|
916
|
+
parent_event_id TEXT,
|
|
917
|
+
tags TEXT,
|
|
918
|
+
input TEXT,
|
|
919
|
+
output TEXT,
|
|
920
|
+
error TEXT,
|
|
921
|
+
metadata TEXT
|
|
922
|
+
)
|
|
923
|
+
`);
|
|
924
|
+
await this.client.execute(`
|
|
925
|
+
CREATE INDEX IF NOT EXISTS idx_${messagesTableName}_lookup
|
|
926
|
+
ON ${messagesTableName}(conversation_id, created_at)
|
|
927
|
+
`);
|
|
928
|
+
await this.client.execute(`
|
|
929
|
+
CREATE INDEX IF NOT EXISTS idx_${conversationsTableName}_resource
|
|
930
|
+
ON ${conversationsTableName}(resource_id)
|
|
931
|
+
`);
|
|
932
|
+
try {
|
|
933
|
+
const tableInfo = await this.client.execute(`PRAGMA table_info(${conversationsTableName})`);
|
|
934
|
+
const hasUserIdColumn = tableInfo.rows.some((row) => row.name === "user_id");
|
|
935
|
+
if (hasUserIdColumn) {
|
|
936
|
+
await this.client.execute(`
|
|
937
|
+
CREATE INDEX IF NOT EXISTS idx_${conversationsTableName}_user
|
|
938
|
+
ON ${conversationsTableName}(user_id)
|
|
939
|
+
`);
|
|
940
|
+
}
|
|
941
|
+
} catch (error) {
|
|
942
|
+
this.debug("Error creating user_id index, will be created after migration:", error);
|
|
943
|
+
}
|
|
944
|
+
await this.client.execute(`
|
|
945
|
+
CREATE INDEX IF NOT EXISTS idx_${historyStepsTableName}_history_id
|
|
946
|
+
ON ${historyStepsTableName}(history_id)
|
|
947
|
+
`);
|
|
948
|
+
await this.initializeWorkflowTables();
|
|
949
|
+
await this.client.execute(`
|
|
950
|
+
CREATE INDEX IF NOT EXISTS idx_${historyTableName}_agent_id
|
|
951
|
+
ON ${historyTableName}(agent_id)
|
|
952
|
+
`);
|
|
953
|
+
await this.client.execute(`
|
|
954
|
+
CREATE INDEX IF NOT EXISTS idx_${historyStepsTableName}_agent_id
|
|
955
|
+
ON ${historyStepsTableName}(agent_id)
|
|
956
|
+
`);
|
|
957
|
+
await this.client.execute(`
|
|
958
|
+
CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_history_id
|
|
959
|
+
ON ${timelineEventsTableName}(history_id)
|
|
960
|
+
`);
|
|
961
|
+
await this.client.execute(`
|
|
962
|
+
CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_agent_id
|
|
963
|
+
ON ${timelineEventsTableName}(agent_id)
|
|
964
|
+
`);
|
|
965
|
+
await this.client.execute(`
|
|
966
|
+
CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_event_type
|
|
967
|
+
ON ${timelineEventsTableName}(event_type)
|
|
968
|
+
`);
|
|
969
|
+
await this.client.execute(`
|
|
970
|
+
CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_event_name
|
|
971
|
+
ON ${timelineEventsTableName}(event_name)
|
|
972
|
+
`);
|
|
973
|
+
await this.client.execute(`
|
|
974
|
+
CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_parent_event_id
|
|
975
|
+
ON ${timelineEventsTableName}(parent_event_id)
|
|
976
|
+
`);
|
|
977
|
+
await this.client.execute(`
|
|
978
|
+
CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_status
|
|
979
|
+
ON ${timelineEventsTableName}(status)
|
|
980
|
+
`);
|
|
981
|
+
this.debug("Database initialized successfully");
|
|
982
|
+
try {
|
|
983
|
+
const migrationResult = await this.migrateConversationSchema({
|
|
984
|
+
createBackup: true,
|
|
985
|
+
deleteBackupAfterSuccess: true
|
|
986
|
+
});
|
|
987
|
+
if (migrationResult.success) {
|
|
988
|
+
if ((migrationResult.migratedCount || 0) > 0) {
|
|
989
|
+
this.logger.info(
|
|
990
|
+
`${migrationResult.migratedCount} conversation records successfully migrated`
|
|
991
|
+
);
|
|
992
|
+
}
|
|
993
|
+
} else {
|
|
994
|
+
this.logger.error("Conversation migration error:", migrationResult.error);
|
|
995
|
+
}
|
|
996
|
+
} catch (error) {
|
|
997
|
+
this.debug("Error migrating conversation schema:", error);
|
|
998
|
+
}
|
|
999
|
+
try {
|
|
1000
|
+
const migrationResult = await this.migrateAgentHistorySchema();
|
|
1001
|
+
if (!migrationResult.success) {
|
|
1002
|
+
this.logger.error("Agent history schema migration error:", migrationResult.error);
|
|
1003
|
+
}
|
|
1004
|
+
} catch (error) {
|
|
1005
|
+
this.debug("Error migrating agent history schema:", error);
|
|
1006
|
+
}
|
|
1007
|
+
try {
|
|
1008
|
+
const result = await this.migrateAgentHistoryData({
|
|
1009
|
+
restoreFromBackup: false
|
|
1010
|
+
});
|
|
1011
|
+
if (result.success) {
|
|
1012
|
+
if ((result.migratedCount || 0) > 0) {
|
|
1013
|
+
this.logger.info(`${result.migratedCount} records successfully migrated`);
|
|
1014
|
+
}
|
|
1015
|
+
} else {
|
|
1016
|
+
this.logger.error("Migration error:", result.error);
|
|
1017
|
+
const restoreResult = await this.migrateAgentHistoryData({});
|
|
1018
|
+
if (restoreResult.success) {
|
|
1019
|
+
this.logger.info("Successfully restored from backup");
|
|
1020
|
+
}
|
|
1021
|
+
}
|
|
1022
|
+
} catch (error) {
|
|
1023
|
+
this.debug("Error initializing database:", error);
|
|
1024
|
+
}
|
|
1025
|
+
}
|
|
1026
|
+
/**
|
|
1027
|
+
* Generate a unique ID for a message
|
|
1028
|
+
* @returns Unique ID
|
|
1029
|
+
*/
|
|
1030
|
+
generateId() {
|
|
1031
|
+
return Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15);
|
|
1032
|
+
}
|
|
1033
|
+
/**
|
|
1034
|
+
* Get messages with filtering options
|
|
1035
|
+
* @param options Filtering options
|
|
1036
|
+
* @returns Filtered messages
|
|
1037
|
+
*/
|
|
1038
|
+
async getMessages(options = {}) {
|
|
1039
|
+
await this.initialized;
|
|
1040
|
+
await debugDelay();
|
|
1041
|
+
const {
|
|
1042
|
+
userId = "default",
|
|
1043
|
+
conversationId = "default",
|
|
1044
|
+
limit,
|
|
1045
|
+
before,
|
|
1046
|
+
after,
|
|
1047
|
+
role,
|
|
1048
|
+
types
|
|
1049
|
+
} = options;
|
|
1050
|
+
const messagesTableName = `${this.options.tablePrefix}_messages`;
|
|
1051
|
+
const conversationsTableName = `${this.options.tablePrefix}_conversations`;
|
|
1052
|
+
try {
|
|
1053
|
+
let sql = `
|
|
1054
|
+
SELECT m.message_id, m.role, m.content, m.type, m.created_at, m.conversation_id
|
|
1055
|
+
FROM ${messagesTableName} m
|
|
1056
|
+
`;
|
|
1057
|
+
const args = [];
|
|
1058
|
+
const conditions = [];
|
|
1059
|
+
if (userId !== "default") {
|
|
1060
|
+
sql += ` INNER JOIN ${conversationsTableName} c ON m.conversation_id = c.id`;
|
|
1061
|
+
conditions.push("c.user_id = ?");
|
|
1062
|
+
args.push(userId);
|
|
1063
|
+
}
|
|
1064
|
+
if (conversationId !== "default") {
|
|
1065
|
+
conditions.push("m.conversation_id = ?");
|
|
1066
|
+
args.push(conversationId);
|
|
1067
|
+
}
|
|
1068
|
+
if (before) {
|
|
1069
|
+
conditions.push("m.created_at < ?");
|
|
1070
|
+
args.push(new Date(before).toISOString());
|
|
1071
|
+
}
|
|
1072
|
+
if (after) {
|
|
1073
|
+
conditions.push("m.created_at > ?");
|
|
1074
|
+
args.push(new Date(after).toISOString());
|
|
1075
|
+
}
|
|
1076
|
+
if (role) {
|
|
1077
|
+
conditions.push("m.role = ?");
|
|
1078
|
+
args.push(role);
|
|
1079
|
+
}
|
|
1080
|
+
if (types) {
|
|
1081
|
+
const placeholders = types.map(() => "?").join(", ");
|
|
1082
|
+
conditions.push(`m.type IN (${placeholders})`);
|
|
1083
|
+
args.push(...types);
|
|
1084
|
+
}
|
|
1085
|
+
if (conditions.length > 0) {
|
|
1086
|
+
sql += ` WHERE ${conditions.join(" AND ")}`;
|
|
1087
|
+
}
|
|
1088
|
+
if (limit && limit > 0) {
|
|
1089
|
+
sql += " ORDER BY m.created_at DESC LIMIT ?";
|
|
1090
|
+
args.push(limit);
|
|
1091
|
+
} else {
|
|
1092
|
+
sql += " ORDER BY m.created_at ASC";
|
|
1093
|
+
}
|
|
1094
|
+
const result = await this.client.execute({
|
|
1095
|
+
sql,
|
|
1096
|
+
args
|
|
1097
|
+
});
|
|
1098
|
+
const messages = result.rows.map((row) => {
|
|
1099
|
+
let content = row.content;
|
|
1100
|
+
const parsedContent = safeJsonParse(content);
|
|
1101
|
+
if (parsedContent !== null) {
|
|
1102
|
+
content = parsedContent;
|
|
1103
|
+
}
|
|
1104
|
+
return {
|
|
1105
|
+
id: row.message_id,
|
|
1106
|
+
role: row.role,
|
|
1107
|
+
content,
|
|
1108
|
+
type: row.type,
|
|
1109
|
+
createdAt: row.created_at
|
|
1110
|
+
};
|
|
1111
|
+
});
|
|
1112
|
+
if (limit && limit > 0) {
|
|
1113
|
+
return messages.reverse();
|
|
1114
|
+
}
|
|
1115
|
+
return messages;
|
|
1116
|
+
} catch (error) {
|
|
1117
|
+
this.debug("Error getting messages:", error);
|
|
1118
|
+
throw new Error("Failed to get messages from LibSQL database");
|
|
1119
|
+
}
|
|
1120
|
+
}
|
|
1121
|
+
/**
|
|
1122
|
+
* Add a message to the conversation history
|
|
1123
|
+
* @param message Message to add
|
|
1124
|
+
* @param userId User identifier (optional, defaults to "default")
|
|
1125
|
+
* @param conversationId Conversation identifier (optional, defaults to "default")
|
|
1126
|
+
*/
|
|
1127
|
+
async addMessage(message, conversationId = "default") {
|
|
1128
|
+
await this.initialized;
|
|
1129
|
+
await debugDelay();
|
|
1130
|
+
const tableName = `${this.options.tablePrefix}_messages`;
|
|
1131
|
+
const contentString = safeStringify2(message.content);
|
|
1132
|
+
await this.executeWithRetryStrategy(async () => {
|
|
1133
|
+
await this.client.execute({
|
|
1134
|
+
sql: `INSERT INTO ${tableName} (conversation_id, message_id, role, content, type, created_at)
|
|
1135
|
+
VALUES (?, ?, ?, ?, ?, ?)`,
|
|
1136
|
+
args: [
|
|
1137
|
+
conversationId,
|
|
1138
|
+
message.id,
|
|
1139
|
+
message.role,
|
|
1140
|
+
contentString,
|
|
1141
|
+
message.type,
|
|
1142
|
+
message.createdAt
|
|
1143
|
+
]
|
|
1144
|
+
});
|
|
1145
|
+
this.debug("Message added successfully", { conversationId, messageId: message.id });
|
|
1146
|
+
try {
|
|
1147
|
+
await this.pruneOldMessages(conversationId);
|
|
1148
|
+
} catch (pruneError) {
|
|
1149
|
+
this.debug("Error pruning old messages:", pruneError);
|
|
1150
|
+
}
|
|
1151
|
+
}, `addMessage[${message.id}]`);
|
|
1152
|
+
}
|
|
1153
|
+
/**
|
|
1154
|
+
* Prune old messages to respect storage limit
|
|
1155
|
+
* @param conversationId Conversation ID to prune messages for
|
|
1156
|
+
*/
|
|
1157
|
+
async pruneOldMessages(conversationId) {
|
|
1158
|
+
const limit = this.options.storageLimit || 100;
|
|
1159
|
+
const tableName = `${this.options.tablePrefix}_messages`;
|
|
1160
|
+
try {
|
|
1161
|
+
const countResult = await this.client.execute({
|
|
1162
|
+
sql: `SELECT COUNT(*) as count FROM ${tableName} WHERE conversation_id = ?`,
|
|
1163
|
+
args: [conversationId]
|
|
1164
|
+
});
|
|
1165
|
+
const messageCount = countResult.rows[0]?.count;
|
|
1166
|
+
if (messageCount > limit) {
|
|
1167
|
+
const deleteCount = messageCount - limit;
|
|
1168
|
+
await this.client.execute({
|
|
1169
|
+
sql: `DELETE FROM ${tableName}
|
|
1170
|
+
WHERE conversation_id = ?
|
|
1171
|
+
AND message_id IN (
|
|
1172
|
+
SELECT message_id FROM ${tableName}
|
|
1173
|
+
WHERE conversation_id = ?
|
|
1174
|
+
ORDER BY created_at ASC
|
|
1175
|
+
LIMIT ?
|
|
1176
|
+
)`,
|
|
1177
|
+
args: [conversationId, conversationId, deleteCount]
|
|
1178
|
+
});
|
|
1179
|
+
this.debug(`Pruned ${deleteCount} old messages for conversation ${conversationId}`);
|
|
1180
|
+
}
|
|
1181
|
+
} catch (error) {
|
|
1182
|
+
this.debug("Error pruning old messages:", error);
|
|
1183
|
+
throw error;
|
|
1184
|
+
}
|
|
1185
|
+
}
|
|
1186
|
+
/**
|
|
1187
|
+
* Clear messages from memory
|
|
1188
|
+
*/
|
|
1189
|
+
async clearMessages(options) {
|
|
1190
|
+
await this.initialized;
|
|
1191
|
+
await debugDelay();
|
|
1192
|
+
const { userId, conversationId } = options;
|
|
1193
|
+
const messagesTableName = `${this.options.tablePrefix}_messages`;
|
|
1194
|
+
const conversationsTableName = `${this.options.tablePrefix}_conversations`;
|
|
1195
|
+
try {
|
|
1196
|
+
if (conversationId) {
|
|
1197
|
+
await this.client.execute({
|
|
1198
|
+
sql: `DELETE FROM ${messagesTableName}
|
|
1199
|
+
WHERE conversation_id = ?
|
|
1200
|
+
AND conversation_id IN (
|
|
1201
|
+
SELECT id FROM ${conversationsTableName} WHERE user_id = ?
|
|
1202
|
+
)`,
|
|
1203
|
+
args: [conversationId, userId]
|
|
1204
|
+
});
|
|
1205
|
+
this.debug(`Cleared messages for conversation ${conversationId} for user ${userId}`);
|
|
1206
|
+
} else {
|
|
1207
|
+
await this.client.execute({
|
|
1208
|
+
sql: `DELETE FROM ${messagesTableName}
|
|
1209
|
+
WHERE conversation_id IN (
|
|
1210
|
+
SELECT id FROM ${conversationsTableName} WHERE user_id = ?
|
|
1211
|
+
)`,
|
|
1212
|
+
args: [userId]
|
|
1213
|
+
});
|
|
1214
|
+
this.debug(`Cleared all messages for user ${userId}`);
|
|
1215
|
+
}
|
|
1216
|
+
} catch (error) {
|
|
1217
|
+
this.debug("Error clearing messages:", error);
|
|
1218
|
+
throw new Error("Failed to clear messages from LibSQL database");
|
|
1219
|
+
}
|
|
1220
|
+
}
|
|
1221
|
+
/**
|
|
1222
|
+
* Close the database connection
|
|
1223
|
+
*/
|
|
1224
|
+
async close() {
|
|
1225
|
+
try {
|
|
1226
|
+
await this.initialized;
|
|
1227
|
+
} catch {
|
|
1228
|
+
}
|
|
1229
|
+
this.client.close();
|
|
1230
|
+
}
|
|
1231
|
+
/**
|
|
1232
|
+
* Add or update a history entry
|
|
1233
|
+
* @param key Entry ID
|
|
1234
|
+
* @param value Entry data
|
|
1235
|
+
* @param agentId Agent ID for filtering
|
|
1236
|
+
*/
|
|
1237
|
+
async addHistoryEntry(key, value, agentId) {
|
|
1238
|
+
await this.initialized;
|
|
1239
|
+
try {
|
|
1240
|
+
const tableName = `${this.options.tablePrefix}_agent_history`;
|
|
1241
|
+
const inputJSON = value.input ? safeStringify2(value.input) : null;
|
|
1242
|
+
const outputJSON = value.output ? safeStringify2(value.output) : null;
|
|
1243
|
+
const usageJSON = value.usage ? safeStringify2(value.usage) : null;
|
|
1244
|
+
const metadataJSON = value.metadata ? safeStringify2(value.metadata) : null;
|
|
1245
|
+
await this.client.execute({
|
|
1246
|
+
sql: `INSERT OR REPLACE INTO ${tableName}
|
|
1247
|
+
(id, agent_id, timestamp, status, input, output, usage, metadata, userId, conversationId)
|
|
1248
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
1249
|
+
args: [
|
|
1250
|
+
key,
|
|
1251
|
+
// id
|
|
1252
|
+
agentId,
|
|
1253
|
+
// agent_id
|
|
1254
|
+
value.timestamp ? value.timestamp.toISOString() : (/* @__PURE__ */ new Date()).toISOString(),
|
|
1255
|
+
// timestamp
|
|
1256
|
+
value.status || null,
|
|
1257
|
+
// status
|
|
1258
|
+
inputJSON,
|
|
1259
|
+
// input
|
|
1260
|
+
outputJSON,
|
|
1261
|
+
// output
|
|
1262
|
+
usageJSON,
|
|
1263
|
+
// usage
|
|
1264
|
+
metadataJSON,
|
|
1265
|
+
// metadata
|
|
1266
|
+
value.userId || null,
|
|
1267
|
+
// userId
|
|
1268
|
+
value.conversationId || null
|
|
1269
|
+
// conversationId
|
|
1270
|
+
]
|
|
1271
|
+
});
|
|
1272
|
+
this.debug(`Set agent_history entry with ID ${key} for agent ${agentId}`);
|
|
1273
|
+
} catch (error) {
|
|
1274
|
+
this.debug("Error setting agent_history entry:", error);
|
|
1275
|
+
throw new Error("Failed to set value in agent_history");
|
|
1276
|
+
}
|
|
1277
|
+
}
|
|
1278
|
+
/**
|
|
1279
|
+
* Update an existing history entry
|
|
1280
|
+
* @param key Entry ID
|
|
1281
|
+
* @param value Updated entry data
|
|
1282
|
+
* @param agentId Agent ID for filtering
|
|
1283
|
+
*/
|
|
1284
|
+
async updateHistoryEntry(key, value, agentId) {
|
|
1285
|
+
return this.addHistoryEntry(key, value, agentId);
|
|
1286
|
+
}
|
|
1287
|
+
/**
|
|
1288
|
+
* Add a history step
|
|
1289
|
+
* @param key Step ID
|
|
1290
|
+
* @param value Step data
|
|
1291
|
+
* @param historyId Related history entry ID
|
|
1292
|
+
* @param agentId Agent ID for filtering
|
|
1293
|
+
*/
|
|
1294
|
+
async addHistoryStep(key, value, historyId, agentId) {
|
|
1295
|
+
await this.initialized;
|
|
1296
|
+
try {
|
|
1297
|
+
const tableName = `${this.options.tablePrefix}_agent_history_steps`;
|
|
1298
|
+
const serializedValue = safeStringify2(value);
|
|
1299
|
+
await this.client.execute({
|
|
1300
|
+
sql: `INSERT OR REPLACE INTO ${tableName} (key, value, history_id, agent_id) VALUES (?, ?, ?, ?)`,
|
|
1301
|
+
args: [key, serializedValue, historyId, agentId]
|
|
1302
|
+
});
|
|
1303
|
+
this.debug(`Set agent_history_steps:${key} for history ${historyId} and agent ${agentId}`);
|
|
1304
|
+
} catch (error) {
|
|
1305
|
+
this.debug(`Error setting agent_history_steps:${key}`, error);
|
|
1306
|
+
throw new Error("Failed to set value in agent_history_steps");
|
|
1307
|
+
}
|
|
1308
|
+
}
|
|
1309
|
+
/**
|
|
1310
|
+
* Update a history step
|
|
1311
|
+
* @param key Step ID
|
|
1312
|
+
* @param value Updated step data
|
|
1313
|
+
* @param historyId Related history entry ID
|
|
1314
|
+
* @param agentId Agent ID for filtering
|
|
1315
|
+
*/
|
|
1316
|
+
async updateHistoryStep(key, value, historyId, agentId) {
|
|
1317
|
+
return this.addHistoryStep(key, value, historyId, agentId);
|
|
1318
|
+
}
|
|
1319
|
+
/**
|
|
1320
|
+
* Add a timeline event
|
|
1321
|
+
* @param key Event ID (UUID)
|
|
1322
|
+
* @param value Timeline event data
|
|
1323
|
+
* @param historyId Related history entry ID
|
|
1324
|
+
* @param agentId Agent ID for filtering
|
|
1325
|
+
*/
|
|
1326
|
+
async addTimelineEvent(key, value, historyId, agentId) {
|
|
1327
|
+
await this.initialized;
|
|
1328
|
+
try {
|
|
1329
|
+
const tableName = `${this.options.tablePrefix}_agent_history_timeline_events`;
|
|
1330
|
+
const inputJSON = value.input ? safeStringify2(value.input) : null;
|
|
1331
|
+
const outputJSON = value.output ? safeStringify2(value.output) : null;
|
|
1332
|
+
const statusMessageJSON = value.statusMessage ? safeStringify2(value.statusMessage) : null;
|
|
1333
|
+
const metadataJSON = value.metadata ? safeStringify2(value.metadata) : null;
|
|
1334
|
+
const tagsJSON = value.tags ? safeStringify2(value.tags) : null;
|
|
1335
|
+
await this.client.execute({
|
|
1336
|
+
sql: `INSERT OR REPLACE INTO ${tableName}
|
|
1337
|
+
(id, history_id, agent_id, event_type, event_name,
|
|
1338
|
+
start_time, end_time, status, status_message, level,
|
|
1339
|
+
version, parent_event_id, tags,
|
|
1340
|
+
input, output, error, metadata)
|
|
1341
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
1342
|
+
args: [
|
|
1343
|
+
key,
|
|
1344
|
+
historyId,
|
|
1345
|
+
agentId,
|
|
1346
|
+
value.type,
|
|
1347
|
+
value.name,
|
|
1348
|
+
value.startTime,
|
|
1349
|
+
value.endTime || null,
|
|
1350
|
+
value.status || null,
|
|
1351
|
+
statusMessageJSON || null,
|
|
1352
|
+
value.level || "INFO",
|
|
1353
|
+
value.version || null,
|
|
1354
|
+
value.parentEventId || null,
|
|
1355
|
+
tagsJSON,
|
|
1356
|
+
inputJSON,
|
|
1357
|
+
outputJSON,
|
|
1358
|
+
statusMessageJSON,
|
|
1359
|
+
metadataJSON
|
|
1360
|
+
]
|
|
1361
|
+
});
|
|
1362
|
+
this.debug(`Added timeline event ${key} for history ${historyId}`);
|
|
1363
|
+
} catch (error) {
|
|
1364
|
+
this.debug("Error adding timeline event:", error);
|
|
1365
|
+
throw new Error("Failed to add timeline event");
|
|
1366
|
+
}
|
|
1367
|
+
}
|
|
1368
|
+
/**
|
|
1369
|
+
* Get a history entry by ID
|
|
1370
|
+
* @param key Entry ID
|
|
1371
|
+
* @returns The history entry or undefined if not found
|
|
1372
|
+
*/
|
|
1373
|
+
async getHistoryEntry(key) {
|
|
1374
|
+
await this.initialized;
|
|
1375
|
+
try {
|
|
1376
|
+
const tableName = `${this.options.tablePrefix}_agent_history`;
|
|
1377
|
+
const result = await this.client.execute({
|
|
1378
|
+
sql: `SELECT id, agent_id, timestamp, status, input, output, usage, metadata, userId, conversationId
|
|
1379
|
+
FROM ${tableName} WHERE id = ?`,
|
|
1380
|
+
args: [key]
|
|
1381
|
+
});
|
|
1382
|
+
if (result.rows.length === 0) {
|
|
1383
|
+
this.debug(`History entry with ID ${key} not found`);
|
|
1384
|
+
return void 0;
|
|
1385
|
+
}
|
|
1386
|
+
const row = result.rows[0];
|
|
1387
|
+
const entry = {
|
|
1388
|
+
id: row.id,
|
|
1389
|
+
_agentId: row.agent_id,
|
|
1390
|
+
// Keep _agentId for compatibility
|
|
1391
|
+
timestamp: new Date(row.timestamp),
|
|
1392
|
+
status: row.status,
|
|
1393
|
+
input: row.input ? safeJsonParse(row.input) : null,
|
|
1394
|
+
output: row.output ? safeJsonParse(row.output) : null,
|
|
1395
|
+
usage: row.usage ? safeJsonParse(row.usage) : null,
|
|
1396
|
+
metadata: row.metadata ? safeJsonParse(row.metadata) : null,
|
|
1397
|
+
userId: row.userId,
|
|
1398
|
+
conversationId: row.conversationId
|
|
1399
|
+
};
|
|
1400
|
+
this.debug(`Got history entry with ID ${key}`);
|
|
1401
|
+
const stepsTableName = `${this.options.tablePrefix}_agent_history_steps`;
|
|
1402
|
+
const stepsResult = await this.client.execute({
|
|
1403
|
+
sql: `SELECT value FROM ${stepsTableName} WHERE history_id = ? AND agent_id = ?`,
|
|
1404
|
+
args: [key, entry._agentId]
|
|
1405
|
+
});
|
|
1406
|
+
const steps = stepsResult.rows.map((row2) => {
|
|
1407
|
+
const step = safeJsonParse(row2.value);
|
|
1408
|
+
return {
|
|
1409
|
+
type: step.type,
|
|
1410
|
+
name: step.name,
|
|
1411
|
+
content: step.content,
|
|
1412
|
+
arguments: step.arguments
|
|
1413
|
+
};
|
|
1414
|
+
});
|
|
1415
|
+
const timelineEventsTableName = `${this.options.tablePrefix}_agent_history_timeline_events`;
|
|
1416
|
+
const timelineEventsResult = await this.client.execute({
|
|
1417
|
+
sql: `SELECT id, event_type, event_name, start_time, end_time,
|
|
1418
|
+
status, status_message, level, version,
|
|
1419
|
+
parent_event_id, tags, input, output, error, metadata
|
|
1420
|
+
FROM ${timelineEventsTableName}
|
|
1421
|
+
WHERE history_id = ? AND agent_id = ?`,
|
|
1422
|
+
args: [key, entry._agentId]
|
|
1423
|
+
});
|
|
1424
|
+
const events = timelineEventsResult.rows.map((row2) => {
|
|
1425
|
+
const input = row2.input ? safeJsonParse(row2.input) : void 0;
|
|
1426
|
+
const output = row2.output ? safeJsonParse(row2.output) : void 0;
|
|
1427
|
+
const error = row2.error ? safeJsonParse(row2.error) : void 0;
|
|
1428
|
+
const statusMessage = row2.status_message ? safeJsonParse(row2.status_message) : void 0;
|
|
1429
|
+
const metadata = row2.metadata ? safeJsonParse(row2.metadata) : void 0;
|
|
1430
|
+
const tags = row2.tags ? safeJsonParse(row2.tags) : void 0;
|
|
1431
|
+
return {
|
|
1432
|
+
id: row2.id,
|
|
1433
|
+
type: row2.event_type,
|
|
1434
|
+
name: row2.event_name,
|
|
1435
|
+
startTime: row2.start_time,
|
|
1436
|
+
endTime: row2.end_time,
|
|
1437
|
+
status: row2.status,
|
|
1438
|
+
statusMessage,
|
|
1439
|
+
level: row2.level,
|
|
1440
|
+
version: row2.version,
|
|
1441
|
+
parentEventId: row2.parent_event_id,
|
|
1442
|
+
tags,
|
|
1443
|
+
input,
|
|
1444
|
+
output,
|
|
1445
|
+
error: statusMessage ? statusMessage : error,
|
|
1446
|
+
metadata
|
|
1447
|
+
};
|
|
1448
|
+
});
|
|
1449
|
+
entry.steps = steps;
|
|
1450
|
+
entry.events = events;
|
|
1451
|
+
return entry;
|
|
1452
|
+
} catch (error) {
|
|
1453
|
+
this.debug(`Error getting history entry with ID ${key}`, error);
|
|
1454
|
+
return void 0;
|
|
1455
|
+
}
|
|
1456
|
+
}
|
|
1457
|
+
/**
|
|
1458
|
+
* Get a history step by ID
|
|
1459
|
+
* @param key Step ID
|
|
1460
|
+
* @returns The history step or undefined if not found
|
|
1461
|
+
*/
|
|
1462
|
+
async getHistoryStep(key) {
|
|
1463
|
+
await this.initialized;
|
|
1464
|
+
try {
|
|
1465
|
+
const tableName = `${this.options.tablePrefix}_agent_history_steps`;
|
|
1466
|
+
const result = await this.client.execute({
|
|
1467
|
+
sql: `SELECT value FROM ${tableName} WHERE key = ?`,
|
|
1468
|
+
args: [key]
|
|
1469
|
+
});
|
|
1470
|
+
if (result.rows.length === 0) {
|
|
1471
|
+
this.debug(`History step with ID ${key} not found`);
|
|
1472
|
+
return void 0;
|
|
1473
|
+
}
|
|
1474
|
+
const value = safeJsonParse(result.rows[0].value);
|
|
1475
|
+
this.debug(`Got history step with ID ${key}`);
|
|
1476
|
+
return value;
|
|
1477
|
+
} catch (error) {
|
|
1478
|
+
this.debug(`Error getting history step with ID ${key}`, error);
|
|
1479
|
+
return void 0;
|
|
1480
|
+
}
|
|
1481
|
+
}
|
|
1482
|
+
async createConversation(conversation) {
|
|
1483
|
+
await this.initialized;
|
|
1484
|
+
await debugDelay();
|
|
1485
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1486
|
+
const metadataString = safeStringify2(conversation.metadata);
|
|
1487
|
+
const tableName = `${this.options.tablePrefix}_conversations`;
|
|
1488
|
+
return await this.executeWithRetryStrategy(async () => {
|
|
1489
|
+
await this.client.execute({
|
|
1490
|
+
sql: `INSERT INTO ${tableName} (id, resource_id, user_id, title, metadata, created_at, updated_at)
|
|
1491
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
|
1492
|
+
args: [
|
|
1493
|
+
conversation.id,
|
|
1494
|
+
conversation.resourceId,
|
|
1495
|
+
conversation.userId,
|
|
1496
|
+
conversation.title,
|
|
1497
|
+
metadataString,
|
|
1498
|
+
now,
|
|
1499
|
+
now
|
|
1500
|
+
]
|
|
1501
|
+
});
|
|
1502
|
+
return {
|
|
1503
|
+
id: conversation.id,
|
|
1504
|
+
resourceId: conversation.resourceId,
|
|
1505
|
+
userId: conversation.userId,
|
|
1506
|
+
title: conversation.title,
|
|
1507
|
+
metadata: conversation.metadata,
|
|
1508
|
+
createdAt: now,
|
|
1509
|
+
updatedAt: now
|
|
1510
|
+
};
|
|
1511
|
+
}, `createConversation[${conversation.id}]`);
|
|
1512
|
+
}
|
|
1513
|
+
async getConversation(id) {
|
|
1514
|
+
await this.initialized;
|
|
1515
|
+
await debugDelay();
|
|
1516
|
+
const tableName = `${this.options.tablePrefix}_conversations`;
|
|
1517
|
+
try {
|
|
1518
|
+
const result = await this.client.execute({
|
|
1519
|
+
sql: `SELECT * FROM ${tableName} WHERE id = ?`,
|
|
1520
|
+
args: [id]
|
|
1521
|
+
});
|
|
1522
|
+
if (result.rows.length === 0) {
|
|
1523
|
+
return null;
|
|
1524
|
+
}
|
|
1525
|
+
const row = result.rows[0];
|
|
1526
|
+
return {
|
|
1527
|
+
id: row.id,
|
|
1528
|
+
resourceId: row.resource_id,
|
|
1529
|
+
userId: row.user_id,
|
|
1530
|
+
title: row.title,
|
|
1531
|
+
metadata: row.metadata ? safeJsonParse(row.metadata) : {},
|
|
1532
|
+
createdAt: row.created_at,
|
|
1533
|
+
updatedAt: row.updated_at
|
|
1534
|
+
};
|
|
1535
|
+
} catch (error) {
|
|
1536
|
+
this.debug("Error getting conversation:", error);
|
|
1537
|
+
throw new Error("Failed to get conversation from LibSQL database");
|
|
1538
|
+
}
|
|
1539
|
+
}
|
|
1540
|
+
async getConversations(resourceId) {
|
|
1541
|
+
await this.initialized;
|
|
1542
|
+
await debugDelay();
|
|
1543
|
+
const tableName = `${this.options.tablePrefix}_conversations`;
|
|
1544
|
+
try {
|
|
1545
|
+
const result = await this.client.execute({
|
|
1546
|
+
sql: `SELECT * FROM ${tableName} WHERE resource_id = ? ORDER BY updated_at DESC`,
|
|
1547
|
+
args: [resourceId]
|
|
1548
|
+
});
|
|
1549
|
+
return result.rows.map((row) => ({
|
|
1550
|
+
id: row.id,
|
|
1551
|
+
resourceId: row.resource_id,
|
|
1552
|
+
userId: row.user_id,
|
|
1553
|
+
title: row.title,
|
|
1554
|
+
metadata: safeJsonParse(row.metadata),
|
|
1555
|
+
createdAt: row.created_at,
|
|
1556
|
+
updatedAt: row.updated_at
|
|
1557
|
+
}));
|
|
1558
|
+
} catch (error) {
|
|
1559
|
+
this.debug("Error getting conversations:", error);
|
|
1560
|
+
throw new Error("Failed to get conversations from LibSQL database");
|
|
1561
|
+
}
|
|
1562
|
+
}
|
|
1563
|
+
async getConversationsByUserId(userId, options = {}) {
|
|
1564
|
+
await this.initialized;
|
|
1565
|
+
await debugDelay();
|
|
1566
|
+
const {
|
|
1567
|
+
resourceId,
|
|
1568
|
+
limit = 50,
|
|
1569
|
+
offset = 0,
|
|
1570
|
+
orderBy = "updated_at",
|
|
1571
|
+
orderDirection = "DESC"
|
|
1572
|
+
} = options;
|
|
1573
|
+
const tableName = `${this.options.tablePrefix}_conversations`;
|
|
1574
|
+
try {
|
|
1575
|
+
let sql = `SELECT * FROM ${tableName} WHERE user_id = ?`;
|
|
1576
|
+
const args = [userId];
|
|
1577
|
+
if (resourceId) {
|
|
1578
|
+
sql += " AND resource_id = ?";
|
|
1579
|
+
args.push(resourceId);
|
|
1580
|
+
}
|
|
1581
|
+
sql += ` ORDER BY ${orderBy} ${orderDirection}`;
|
|
1582
|
+
if (limit > 0) {
|
|
1583
|
+
sql += " LIMIT ? OFFSET ?";
|
|
1584
|
+
args.push(limit, offset);
|
|
1585
|
+
}
|
|
1586
|
+
const result = await this.client.execute({
|
|
1587
|
+
sql,
|
|
1588
|
+
args
|
|
1589
|
+
});
|
|
1590
|
+
return result.rows.map((row) => ({
|
|
1591
|
+
id: row.id,
|
|
1592
|
+
resourceId: row.resource_id,
|
|
1593
|
+
userId: row.user_id,
|
|
1594
|
+
title: row.title,
|
|
1595
|
+
metadata: safeJsonParse(row.metadata),
|
|
1596
|
+
createdAt: row.created_at,
|
|
1597
|
+
updatedAt: row.updated_at
|
|
1598
|
+
}));
|
|
1599
|
+
} catch (error) {
|
|
1600
|
+
this.debug("Error getting conversations by user ID:", error);
|
|
1601
|
+
throw new Error("Failed to get conversations by user ID from LibSQL database");
|
|
1602
|
+
}
|
|
1603
|
+
}
|
|
1604
|
+
/**
|
|
1605
|
+
* Query conversations with filtering and pagination options
|
|
1606
|
+
*
|
|
1607
|
+
* @param options Query options for filtering and pagination
|
|
1608
|
+
* @returns Promise that resolves to an array of conversations matching the criteria
|
|
1609
|
+
* @see {@link https://voltagent.dev/docs/agents/memory/libsql#querying-conversations | Querying Conversations}
|
|
1610
|
+
*/
|
|
1611
|
+
async queryConversations(options) {
|
|
1612
|
+
await this.initialized;
|
|
1613
|
+
await debugDelay();
|
|
1614
|
+
const {
|
|
1615
|
+
userId,
|
|
1616
|
+
resourceId,
|
|
1617
|
+
limit = 50,
|
|
1618
|
+
offset = 0,
|
|
1619
|
+
orderBy = "updated_at",
|
|
1620
|
+
orderDirection = "DESC"
|
|
1621
|
+
} = options;
|
|
1622
|
+
const tableName = `${this.options.tablePrefix}_conversations`;
|
|
1623
|
+
try {
|
|
1624
|
+
let sql = `SELECT * FROM ${tableName}`;
|
|
1625
|
+
const args = [];
|
|
1626
|
+
const conditions = [];
|
|
1627
|
+
if (userId) {
|
|
1628
|
+
conditions.push("user_id = ?");
|
|
1629
|
+
args.push(userId);
|
|
1630
|
+
}
|
|
1631
|
+
if (resourceId) {
|
|
1632
|
+
conditions.push("resource_id = ?");
|
|
1633
|
+
args.push(resourceId);
|
|
1634
|
+
}
|
|
1635
|
+
if (conditions.length > 0) {
|
|
1636
|
+
sql += ` WHERE ${conditions.join(" AND ")}`;
|
|
1637
|
+
}
|
|
1638
|
+
sql += ` ORDER BY ${orderBy} ${orderDirection}`;
|
|
1639
|
+
if (limit > 0) {
|
|
1640
|
+
sql += " LIMIT ? OFFSET ?";
|
|
1641
|
+
args.push(limit, offset);
|
|
1642
|
+
}
|
|
1643
|
+
const result = await this.client.execute({
|
|
1644
|
+
sql,
|
|
1645
|
+
args
|
|
1646
|
+
});
|
|
1647
|
+
return result.rows.map((row) => ({
|
|
1648
|
+
id: row.id,
|
|
1649
|
+
resourceId: row.resource_id,
|
|
1650
|
+
userId: row.user_id,
|
|
1651
|
+
title: row.title,
|
|
1652
|
+
metadata: safeJsonParse(row.metadata),
|
|
1653
|
+
createdAt: row.created_at,
|
|
1654
|
+
updatedAt: row.updated_at
|
|
1655
|
+
}));
|
|
1656
|
+
} catch (error) {
|
|
1657
|
+
this.debug("Error querying conversations:", error);
|
|
1658
|
+
throw new Error("Failed to query conversations from LibSQL database");
|
|
1659
|
+
}
|
|
1660
|
+
}
|
|
1661
|
+
/**
|
|
1662
|
+
* Get messages for a specific conversation with pagination support
|
|
1663
|
+
*
|
|
1664
|
+
* @param conversationId The unique identifier of the conversation to retrieve messages from
|
|
1665
|
+
* @param options Optional pagination and filtering options
|
|
1666
|
+
* @returns Promise that resolves to an array of messages in chronological order (oldest first)
|
|
1667
|
+
* @see {@link https://voltagent.dev/docs/agents/memory/libsql#conversation-messages | Getting Conversation Messages}
|
|
1668
|
+
*/
|
|
1669
|
+
async getConversationMessages(conversationId, options = {}) {
|
|
1670
|
+
await this.initialized;
|
|
1671
|
+
await debugDelay();
|
|
1672
|
+
const { limit = 100, offset = 0 } = options;
|
|
1673
|
+
const tableName = `${this.options.tablePrefix}_messages`;
|
|
1674
|
+
try {
|
|
1675
|
+
let sql = `SELECT * FROM ${tableName} WHERE conversation_id = ? ORDER BY created_at ASC`;
|
|
1676
|
+
const args = [conversationId];
|
|
1677
|
+
if (limit > 0) {
|
|
1678
|
+
sql += " LIMIT ? OFFSET ?";
|
|
1679
|
+
args.push(limit, offset);
|
|
1680
|
+
}
|
|
1681
|
+
const result = await this.client.execute({
|
|
1682
|
+
sql,
|
|
1683
|
+
args
|
|
1684
|
+
});
|
|
1685
|
+
return result.rows.map((row) => {
|
|
1686
|
+
let content = row.content;
|
|
1687
|
+
const parsedContent = safeJsonParse(content);
|
|
1688
|
+
if (parsedContent !== null) {
|
|
1689
|
+
content = parsedContent;
|
|
1690
|
+
}
|
|
1691
|
+
return {
|
|
1692
|
+
id: row.message_id,
|
|
1693
|
+
role: row.role,
|
|
1694
|
+
content,
|
|
1695
|
+
type: row.type,
|
|
1696
|
+
createdAt: row.created_at
|
|
1697
|
+
};
|
|
1698
|
+
});
|
|
1699
|
+
} catch (error) {
|
|
1700
|
+
this.debug("Error getting conversation messages:", error);
|
|
1701
|
+
throw new Error("Failed to get conversation messages from LibSQL database");
|
|
1702
|
+
}
|
|
1703
|
+
}
|
|
1704
|
+
async updateConversation(id, updates) {
|
|
1705
|
+
await this.initialized;
|
|
1706
|
+
await debugDelay();
|
|
1707
|
+
const tableName = `${this.options.tablePrefix}_conversations`;
|
|
1708
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
1709
|
+
try {
|
|
1710
|
+
const updatesList = [];
|
|
1711
|
+
const args = [];
|
|
1712
|
+
if (updates.resourceId !== void 0) {
|
|
1713
|
+
updatesList.push("resource_id = ?");
|
|
1714
|
+
args.push(updates.resourceId);
|
|
1715
|
+
}
|
|
1716
|
+
if (updates.userId !== void 0) {
|
|
1717
|
+
updatesList.push("user_id = ?");
|
|
1718
|
+
args.push(updates.userId);
|
|
1719
|
+
}
|
|
1720
|
+
if (updates.title !== void 0) {
|
|
1721
|
+
updatesList.push("title = ?");
|
|
1722
|
+
args.push(updates.title);
|
|
1723
|
+
}
|
|
1724
|
+
if (updates.metadata !== void 0) {
|
|
1725
|
+
updatesList.push("metadata = ?");
|
|
1726
|
+
args.push(safeStringify2(updates.metadata));
|
|
1727
|
+
}
|
|
1728
|
+
updatesList.push("updated_at = ?");
|
|
1729
|
+
args.push(now);
|
|
1730
|
+
args.push(id);
|
|
1731
|
+
await this.client.execute({
|
|
1732
|
+
sql: `UPDATE ${tableName} SET ${updatesList.join(", ")} WHERE id = ?`,
|
|
1733
|
+
args
|
|
1734
|
+
});
|
|
1735
|
+
const updated = await this.getConversation(id);
|
|
1736
|
+
if (!updated) {
|
|
1737
|
+
throw new Error("Conversation not found after update");
|
|
1738
|
+
}
|
|
1739
|
+
return updated;
|
|
1740
|
+
} catch (error) {
|
|
1741
|
+
this.debug("Error updating conversation:", error);
|
|
1742
|
+
throw new Error("Failed to update conversation in LibSQL database");
|
|
1743
|
+
}
|
|
1744
|
+
}
|
|
1745
|
+
async deleteConversation(id) {
|
|
1746
|
+
await this.initialized;
|
|
1747
|
+
await debugDelay();
|
|
1748
|
+
const conversationsTableName = `${this.options.tablePrefix}_conversations`;
|
|
1749
|
+
const messagesTableName = `${this.options.tablePrefix}_messages`;
|
|
1750
|
+
try {
|
|
1751
|
+
await this.client.execute({
|
|
1752
|
+
sql: `DELETE FROM ${messagesTableName} WHERE conversation_id = ?`,
|
|
1753
|
+
args: [id]
|
|
1754
|
+
});
|
|
1755
|
+
await this.client.execute({
|
|
1756
|
+
sql: `DELETE FROM ${conversationsTableName} WHERE id = ?`,
|
|
1757
|
+
args: [id]
|
|
1758
|
+
});
|
|
1759
|
+
} catch (error) {
|
|
1760
|
+
this.debug("Error deleting conversation:", error);
|
|
1761
|
+
throw new Error("Failed to delete conversation from LibSQL database");
|
|
1762
|
+
}
|
|
1763
|
+
}
|
|
1764
|
+
/**
|
|
1765
|
+
* Get all history entries for an agent with pagination
|
|
1766
|
+
* @param agentId Agent ID
|
|
1767
|
+
* @param page Page number (0-based)
|
|
1768
|
+
* @param limit Number of entries per page
|
|
1769
|
+
* @returns Object with entries array and total count
|
|
1770
|
+
*/
|
|
1771
|
+
async getAllHistoryEntriesByAgent(agentId, page, limit) {
|
|
1772
|
+
await this.initialized;
|
|
1773
|
+
try {
|
|
1774
|
+
const tableName = `${this.options.tablePrefix}_agent_history`;
|
|
1775
|
+
const offset = page * limit;
|
|
1776
|
+
const countResult = await this.client.execute({
|
|
1777
|
+
sql: `SELECT COUNT(*) as total FROM ${tableName} WHERE agent_id = ?`,
|
|
1778
|
+
args: [agentId]
|
|
1779
|
+
});
|
|
1780
|
+
const total = Number(countResult.rows[0].total);
|
|
1781
|
+
const result = await this.client.execute({
|
|
1782
|
+
sql: `SELECT id, agent_id, timestamp, status, input, output, usage, metadata, userId, conversationId
|
|
1783
|
+
FROM ${tableName} WHERE agent_id = ?
|
|
1784
|
+
ORDER BY timestamp DESC
|
|
1785
|
+
LIMIT ? OFFSET ?`,
|
|
1786
|
+
args: [agentId, limit, offset]
|
|
1787
|
+
});
|
|
1788
|
+
const entries = result.rows.map((row) => ({
|
|
1789
|
+
id: row.id,
|
|
1790
|
+
_agentId: row.agent_id,
|
|
1791
|
+
// Keep _agentId for compatibility
|
|
1792
|
+
timestamp: new Date(row.timestamp),
|
|
1793
|
+
status: row.status,
|
|
1794
|
+
input: row.input ? safeJsonParse(row.input) : null,
|
|
1795
|
+
output: row.output ? safeJsonParse(row.output) : null,
|
|
1796
|
+
usage: row.usage ? safeJsonParse(row.usage) : null,
|
|
1797
|
+
metadata: row.metadata ? safeJsonParse(row.metadata) : null,
|
|
1798
|
+
userId: row.userId,
|
|
1799
|
+
conversationId: row.conversationId
|
|
1800
|
+
}));
|
|
1801
|
+
this.debug(`Got all history entries for agent ${agentId} (${entries.length} items)`);
|
|
1802
|
+
const completeEntries = await Promise.all(
|
|
1803
|
+
entries.map(async (entry) => {
|
|
1804
|
+
const stepsTableName = `${this.options.tablePrefix}_agent_history_steps`;
|
|
1805
|
+
const stepsResult = await this.client.execute({
|
|
1806
|
+
sql: `SELECT value FROM ${stepsTableName} WHERE history_id = ? AND agent_id = ?`,
|
|
1807
|
+
args: [entry.id, agentId]
|
|
1808
|
+
});
|
|
1809
|
+
const steps = stepsResult.rows.map((row) => {
|
|
1810
|
+
const step = safeJsonParse(row.value);
|
|
1811
|
+
return {
|
|
1812
|
+
type: step.type,
|
|
1813
|
+
name: step.name,
|
|
1814
|
+
content: step.content,
|
|
1815
|
+
arguments: step.arguments
|
|
1816
|
+
};
|
|
1817
|
+
});
|
|
1818
|
+
const timelineEventsTableName = `${this.options.tablePrefix}_agent_history_timeline_events`;
|
|
1819
|
+
const timelineEventsResult = await this.client.execute({
|
|
1820
|
+
sql: `SELECT id, event_type, event_name, start_time, end_time,
|
|
1821
|
+
status, status_message, level, version,
|
|
1822
|
+
parent_event_id, tags, input, output, error, metadata
|
|
1823
|
+
FROM ${timelineEventsTableName}
|
|
1824
|
+
WHERE history_id = ? AND agent_id = ?`,
|
|
1825
|
+
args: [entry.id, agentId]
|
|
1826
|
+
});
|
|
1827
|
+
const events = timelineEventsResult.rows.map((row) => {
|
|
1828
|
+
const input = row.input ? safeJsonParse(row.input) : void 0;
|
|
1829
|
+
const output = row.output ? safeJsonParse(row.output) : void 0;
|
|
1830
|
+
const error = row.error ? safeJsonParse(row.error) : void 0;
|
|
1831
|
+
const statusMessage = row.status_message ? safeJsonParse(row.status_message) : void 0;
|
|
1832
|
+
const metadata = row.metadata ? safeJsonParse(row.metadata) : void 0;
|
|
1833
|
+
const tags = row.tags ? safeJsonParse(row.tags) : void 0;
|
|
1834
|
+
return {
|
|
1835
|
+
id: row.id,
|
|
1836
|
+
type: row.event_type,
|
|
1837
|
+
name: row.event_name,
|
|
1838
|
+
startTime: row.start_time,
|
|
1839
|
+
endTime: row.end_time,
|
|
1840
|
+
status: row.status,
|
|
1841
|
+
statusMessage,
|
|
1842
|
+
level: row.level,
|
|
1843
|
+
version: row.version,
|
|
1844
|
+
parentEventId: row.parent_event_id,
|
|
1845
|
+
tags,
|
|
1846
|
+
input,
|
|
1847
|
+
output,
|
|
1848
|
+
error: statusMessage ? statusMessage : error,
|
|
1849
|
+
metadata
|
|
1850
|
+
};
|
|
1851
|
+
});
|
|
1852
|
+
entry.steps = steps;
|
|
1853
|
+
entry.events = events;
|
|
1854
|
+
return entry;
|
|
1855
|
+
})
|
|
1856
|
+
);
|
|
1857
|
+
return {
|
|
1858
|
+
entries: completeEntries,
|
|
1859
|
+
total
|
|
1860
|
+
};
|
|
1861
|
+
} catch (error) {
|
|
1862
|
+
this.debug(`Error getting history entries for agent ${agentId}`, error);
|
|
1863
|
+
return {
|
|
1864
|
+
entries: [],
|
|
1865
|
+
total: 0
|
|
1866
|
+
};
|
|
1867
|
+
}
|
|
1868
|
+
}
|
|
1869
|
+
/**
|
|
1870
|
+
* Migrates agent history data from old structure to new structure.
|
|
1871
|
+
* If migration fails, it can be rolled back using the backup mechanism.
|
|
1872
|
+
*
|
|
1873
|
+
* Old database structure:
|
|
1874
|
+
* CREATE TABLE voltagent_memory_agent_history (
|
|
1875
|
+
* key TEXT PRIMARY KEY,
|
|
1876
|
+
* value TEXT NOT NULL,
|
|
1877
|
+
* agent_id TEXT
|
|
1878
|
+
* );
|
|
1879
|
+
*/
|
|
1880
|
+
async migrateAgentHistoryData(options = {}) {
|
|
1881
|
+
const {
|
|
1882
|
+
createBackup = true,
|
|
1883
|
+
restoreFromBackup = false,
|
|
1884
|
+
deleteBackupAfterSuccess = false
|
|
1885
|
+
} = options;
|
|
1886
|
+
const oldTableName = `${this.options.tablePrefix}_agent_history`;
|
|
1887
|
+
const oldTableBackup = `${oldTableName}_backup`;
|
|
1888
|
+
const timelineEventsTableName = `${this.options.tablePrefix}_agent_history_timeline_events`;
|
|
1889
|
+
try {
|
|
1890
|
+
this.debug("Starting agent history migration...");
|
|
1891
|
+
const flagCheck = await this.checkMigrationFlag("agent_history_data_migration");
|
|
1892
|
+
if (flagCheck.alreadyCompleted) {
|
|
1893
|
+
return { success: true, migratedCount: 0 };
|
|
1894
|
+
}
|
|
1895
|
+
if (restoreFromBackup) {
|
|
1896
|
+
this.debug("Starting restoration from backup...");
|
|
1897
|
+
const backupCheck = await this.client.execute({
|
|
1898
|
+
sql: "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
|
|
1899
|
+
args: [oldTableBackup]
|
|
1900
|
+
});
|
|
1901
|
+
if (backupCheck.rows.length === 0) {
|
|
1902
|
+
throw new Error("No backup found to restore");
|
|
1903
|
+
}
|
|
1904
|
+
await this.client.execute("BEGIN TRANSACTION;");
|
|
1905
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${oldTableName};`);
|
|
1906
|
+
await this.client.execute(`ALTER TABLE ${oldTableBackup} RENAME TO ${oldTableName};`);
|
|
1907
|
+
await this.client.execute("COMMIT;");
|
|
1908
|
+
this.debug("Restoration from backup completed successfully");
|
|
1909
|
+
return {
|
|
1910
|
+
success: true,
|
|
1911
|
+
backupCreated: false
|
|
1912
|
+
};
|
|
1913
|
+
}
|
|
1914
|
+
const tableInfoQuery = await this.client.execute(`PRAGMA table_info(${oldTableName})`);
|
|
1915
|
+
if (tableInfoQuery.rows.length === 0) {
|
|
1916
|
+
this.debug(`${oldTableName} table not found, migration not needed`);
|
|
1917
|
+
return {
|
|
1918
|
+
success: true,
|
|
1919
|
+
migratedCount: 0
|
|
1920
|
+
};
|
|
1921
|
+
}
|
|
1922
|
+
const hasValueColumn = tableInfoQuery.rows.some((row) => row.name === "value");
|
|
1923
|
+
if (!hasValueColumn) {
|
|
1924
|
+
this.debug("Table is already in new format, migration not needed");
|
|
1925
|
+
return {
|
|
1926
|
+
success: true,
|
|
1927
|
+
migratedCount: 0
|
|
1928
|
+
};
|
|
1929
|
+
}
|
|
1930
|
+
if (createBackup) {
|
|
1931
|
+
this.debug("Creating backup...");
|
|
1932
|
+
const backupCheck = await this.client.execute({
|
|
1933
|
+
sql: "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
|
|
1934
|
+
args: [oldTableBackup]
|
|
1935
|
+
});
|
|
1936
|
+
if (backupCheck.rows.length > 0) {
|
|
1937
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${oldTableBackup};`);
|
|
1938
|
+
}
|
|
1939
|
+
await this.client.execute(
|
|
1940
|
+
`CREATE TABLE ${oldTableBackup} AS SELECT * FROM ${oldTableName};`
|
|
1941
|
+
);
|
|
1942
|
+
this.debug("Backup created successfully");
|
|
1943
|
+
}
|
|
1944
|
+
const oldFormatData = await this.client.execute({
|
|
1945
|
+
sql: `SELECT key, value, agent_id FROM ${oldTableName}`
|
|
1946
|
+
});
|
|
1947
|
+
if (oldFormatData.rows.length === 0) {
|
|
1948
|
+
this.debug("No data found to migrate");
|
|
1949
|
+
return {
|
|
1950
|
+
success: true,
|
|
1951
|
+
migratedCount: 0,
|
|
1952
|
+
backupCreated: createBackup
|
|
1953
|
+
};
|
|
1954
|
+
}
|
|
1955
|
+
const tempTableName = `${oldTableName}_temp`;
|
|
1956
|
+
await this.client.execute(`
|
|
1957
|
+
CREATE TABLE ${tempTableName} (
|
|
1958
|
+
id TEXT PRIMARY KEY,
|
|
1959
|
+
agent_id TEXT NOT NULL,
|
|
1960
|
+
timestamp TEXT NOT NULL,
|
|
1961
|
+
status TEXT,
|
|
1962
|
+
input TEXT,
|
|
1963
|
+
output TEXT,
|
|
1964
|
+
usage TEXT,
|
|
1965
|
+
metadata TEXT
|
|
1966
|
+
)
|
|
1967
|
+
`);
|
|
1968
|
+
await this.client.execute("BEGIN TRANSACTION;");
|
|
1969
|
+
let migratedCount = 0;
|
|
1970
|
+
const migratedIds = /* @__PURE__ */ new Set();
|
|
1971
|
+
for (const row of oldFormatData.rows) {
|
|
1972
|
+
const key = row.key;
|
|
1973
|
+
const agentId = row.agent_id;
|
|
1974
|
+
const valueStr = row.value;
|
|
1975
|
+
try {
|
|
1976
|
+
const valueObj = safeJsonParse(valueStr);
|
|
1977
|
+
const id = valueObj.id || key;
|
|
1978
|
+
if (migratedIds.has(id)) {
|
|
1979
|
+
continue;
|
|
1980
|
+
}
|
|
1981
|
+
migratedIds.add(id);
|
|
1982
|
+
migratedCount++;
|
|
1983
|
+
const inputJSON = valueObj.input ? safeStringify2(valueObj.input) : null;
|
|
1984
|
+
const outputJSON = valueObj.output ? safeStringify2(valueObj.output) : null;
|
|
1985
|
+
const usageJSON = valueObj.usage ? safeStringify2(valueObj.usage) : null;
|
|
1986
|
+
await this.client.execute({
|
|
1987
|
+
sql: `INSERT INTO ${tempTableName}
|
|
1988
|
+
(id, agent_id, timestamp, status, input, output, usage, metadata)
|
|
1989
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
1990
|
+
args: [
|
|
1991
|
+
id,
|
|
1992
|
+
valueObj._agentId || agentId,
|
|
1993
|
+
valueObj.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
|
|
1994
|
+
valueObj.status || null,
|
|
1995
|
+
inputJSON,
|
|
1996
|
+
outputJSON,
|
|
1997
|
+
usageJSON,
|
|
1998
|
+
null
|
|
1999
|
+
]
|
|
2000
|
+
});
|
|
2001
|
+
let input = "";
|
|
2002
|
+
if (Array.isArray(valueObj.events)) {
|
|
2003
|
+
for (const event of valueObj.events) {
|
|
2004
|
+
try {
|
|
2005
|
+
if (event.affectedNodeId?.startsWith("message_")) {
|
|
2006
|
+
input = event.data.input;
|
|
2007
|
+
continue;
|
|
2008
|
+
}
|
|
2009
|
+
const eventId = event.id || this.generateId();
|
|
2010
|
+
const eventType = event.type || "unknown";
|
|
2011
|
+
let eventName = event.name || "unknown";
|
|
2012
|
+
const startTime = event.timestamp || event.startTime || (/* @__PURE__ */ new Date()).toISOString();
|
|
2013
|
+
const endTime = event.updatedAt || event.endTime || startTime;
|
|
2014
|
+
let status = event.status || event.data?.status || null;
|
|
2015
|
+
let inputData = null;
|
|
2016
|
+
if (event.input) {
|
|
2017
|
+
inputData = safeStringify2({ input: event.input });
|
|
2018
|
+
} else if (event.data?.input) {
|
|
2019
|
+
inputData = safeStringify2({ input: event.data.input });
|
|
2020
|
+
} else if (input) {
|
|
2021
|
+
inputData = safeStringify2({ input });
|
|
2022
|
+
}
|
|
2023
|
+
input = "";
|
|
2024
|
+
let metadata = null;
|
|
2025
|
+
if (event.metadata) {
|
|
2026
|
+
metadata = safeStringify2(event.metadata);
|
|
2027
|
+
} else if (event.data) {
|
|
2028
|
+
metadata = safeStringify2({
|
|
2029
|
+
id: event.affectedNodeId?.split("_").pop(),
|
|
2030
|
+
agentId: event.data?.metadata?.sourceAgentId,
|
|
2031
|
+
...event.data
|
|
2032
|
+
});
|
|
2033
|
+
}
|
|
2034
|
+
if (eventType === "agent") {
|
|
2035
|
+
if (eventName === "start") {
|
|
2036
|
+
eventName = "agent:start";
|
|
2037
|
+
status = "running";
|
|
2038
|
+
} else if (eventName === "finished") {
|
|
2039
|
+
if (event.data.status === "error") {
|
|
2040
|
+
eventName = "agent:error";
|
|
2041
|
+
} else {
|
|
2042
|
+
eventName = "agent:success";
|
|
2043
|
+
}
|
|
2044
|
+
}
|
|
2045
|
+
await this.client.execute({
|
|
2046
|
+
sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
|
|
2047
|
+
(id, history_id, agent_id, event_type, event_name, start_time, end_time,
|
|
2048
|
+
status, status_message, level, version, parent_event_id,
|
|
2049
|
+
tags, input, output, error, metadata)
|
|
2050
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
2051
|
+
args: [
|
|
2052
|
+
eventId,
|
|
2053
|
+
id,
|
|
2054
|
+
valueObj._agentId || agentId,
|
|
2055
|
+
eventType,
|
|
2056
|
+
eventName,
|
|
2057
|
+
startTime,
|
|
2058
|
+
endTime,
|
|
2059
|
+
// @ts-ignore
|
|
2060
|
+
status,
|
|
2061
|
+
eventName === "agent:error" ? event.data.error.message : null,
|
|
2062
|
+
event.level || "INFO",
|
|
2063
|
+
event.version || null,
|
|
2064
|
+
event.parentEventId || null,
|
|
2065
|
+
null,
|
|
2066
|
+
// tags
|
|
2067
|
+
inputData,
|
|
2068
|
+
event.data.output ? safeStringify2(event.data.output) : null,
|
|
2069
|
+
eventName === "agent:error" ? safeStringify2(event.data.error) : null,
|
|
2070
|
+
metadata
|
|
2071
|
+
]
|
|
2072
|
+
});
|
|
2073
|
+
} else if (eventType === "memory") {
|
|
2074
|
+
if (eventName === "memory:saveMessage") {
|
|
2075
|
+
await this.client.execute({
|
|
2076
|
+
sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
|
|
2077
|
+
(id, history_id, agent_id, event_type, event_name, start_time, end_time,
|
|
2078
|
+
status, status_message, level, version, parent_event_id,
|
|
2079
|
+
tags, input, output, error, metadata)
|
|
2080
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
2081
|
+
args: [
|
|
2082
|
+
eventId,
|
|
2083
|
+
id,
|
|
2084
|
+
valueObj._agentId || agentId,
|
|
2085
|
+
eventType,
|
|
2086
|
+
"memory:write_start",
|
|
2087
|
+
startTime,
|
|
2088
|
+
null,
|
|
2089
|
+
// no endTime
|
|
2090
|
+
"running",
|
|
2091
|
+
event.statusMessage || null,
|
|
2092
|
+
event.level || "INFO",
|
|
2093
|
+
event.version || null,
|
|
2094
|
+
event.parentEventId || null,
|
|
2095
|
+
null,
|
|
2096
|
+
// tags
|
|
2097
|
+
inputData,
|
|
2098
|
+
null,
|
|
2099
|
+
// no output
|
|
2100
|
+
null,
|
|
2101
|
+
// no error
|
|
2102
|
+
safeStringify2({
|
|
2103
|
+
id: "memory",
|
|
2104
|
+
agentId: event.affectedNodeId?.split("_").pop()
|
|
2105
|
+
})
|
|
2106
|
+
]
|
|
2107
|
+
});
|
|
2108
|
+
await this.client.execute({
|
|
2109
|
+
sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
|
|
2110
|
+
(id, history_id, agent_id, event_type, event_name, start_time, end_time,
|
|
2111
|
+
status, status_message, level, version, parent_event_id,
|
|
2112
|
+
tags, input, output, error, metadata)
|
|
2113
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
2114
|
+
args: [
|
|
2115
|
+
this.generateId(),
|
|
2116
|
+
// New ID
|
|
2117
|
+
id,
|
|
2118
|
+
valueObj._agentId || agentId,
|
|
2119
|
+
eventType,
|
|
2120
|
+
"memory:write_success",
|
|
2121
|
+
endTime,
|
|
2122
|
+
// End time
|
|
2123
|
+
endTime,
|
|
2124
|
+
"completed",
|
|
2125
|
+
event.statusMessage || null,
|
|
2126
|
+
event.level || "INFO",
|
|
2127
|
+
event.version || null,
|
|
2128
|
+
eventId,
|
|
2129
|
+
// Parent event ID
|
|
2130
|
+
null,
|
|
2131
|
+
// tags
|
|
2132
|
+
inputData,
|
|
2133
|
+
event.data.output ? safeStringify2(event.data.output) : null,
|
|
2134
|
+
event.error ? safeStringify2(event.error) : null,
|
|
2135
|
+
safeStringify2({
|
|
2136
|
+
id: "memory",
|
|
2137
|
+
agentId: event.affectedNodeId?.split("_").pop()
|
|
2138
|
+
})
|
|
2139
|
+
]
|
|
2140
|
+
});
|
|
2141
|
+
} else if (eventName === "memory:getMessages") {
|
|
2142
|
+
await this.client.execute({
|
|
2143
|
+
sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
|
|
2144
|
+
(id, history_id, agent_id, event_type, event_name, start_time, end_time,
|
|
2145
|
+
status, status_message, level, version, parent_event_id,
|
|
2146
|
+
tags, input, output, error, metadata)
|
|
2147
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
2148
|
+
args: [
|
|
2149
|
+
eventId,
|
|
2150
|
+
id,
|
|
2151
|
+
valueObj._agentId || agentId,
|
|
2152
|
+
eventType,
|
|
2153
|
+
"memory:read_start",
|
|
2154
|
+
startTime,
|
|
2155
|
+
null,
|
|
2156
|
+
// no endTime
|
|
2157
|
+
"running",
|
|
2158
|
+
event.statusMessage || null,
|
|
2159
|
+
event.level || "INFO",
|
|
2160
|
+
event.version || null,
|
|
2161
|
+
event.parentEventId || null,
|
|
2162
|
+
null,
|
|
2163
|
+
// tags
|
|
2164
|
+
inputData,
|
|
2165
|
+
null,
|
|
2166
|
+
// no output
|
|
2167
|
+
null,
|
|
2168
|
+
// no error
|
|
2169
|
+
safeStringify2({
|
|
2170
|
+
id: "memory",
|
|
2171
|
+
agentId: event.affectedNodeId?.split("_").pop()
|
|
2172
|
+
})
|
|
2173
|
+
]
|
|
2174
|
+
});
|
|
2175
|
+
await this.client.execute({
|
|
2176
|
+
sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
|
|
2177
|
+
(id, history_id, agent_id, event_type, event_name, start_time, end_time,
|
|
2178
|
+
status, status_message, level, version, parent_event_id,
|
|
2179
|
+
tags, input, output, error, metadata)
|
|
2180
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
2181
|
+
args: [
|
|
2182
|
+
this.generateId(),
|
|
2183
|
+
// New ID
|
|
2184
|
+
id,
|
|
2185
|
+
valueObj._agentId || agentId,
|
|
2186
|
+
eventType,
|
|
2187
|
+
"memory:read_success",
|
|
2188
|
+
endTime,
|
|
2189
|
+
// End time
|
|
2190
|
+
endTime,
|
|
2191
|
+
status,
|
|
2192
|
+
event.statusMessage || null,
|
|
2193
|
+
event.level || "INFO",
|
|
2194
|
+
event.version || null,
|
|
2195
|
+
eventId,
|
|
2196
|
+
// Parent event ID
|
|
2197
|
+
null,
|
|
2198
|
+
// tags
|
|
2199
|
+
inputData,
|
|
2200
|
+
event.data.output ? safeStringify2(event.data.output) : null,
|
|
2201
|
+
event.error ? safeStringify2(event.error) : null,
|
|
2202
|
+
safeStringify2({
|
|
2203
|
+
id: "memory",
|
|
2204
|
+
agentId: event.affectedNodeId?.split("_").pop()
|
|
2205
|
+
})
|
|
2206
|
+
]
|
|
2207
|
+
});
|
|
2208
|
+
} else {
|
|
2209
|
+
await this.client.execute({
|
|
2210
|
+
sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
|
|
2211
|
+
(id, history_id, agent_id, event_type, event_name, start_time, end_time,
|
|
2212
|
+
status, status_message, level, version, parent_event_id,
|
|
2213
|
+
tags, input, output, error, metadata)
|
|
2214
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
2215
|
+
args: [
|
|
2216
|
+
eventId,
|
|
2217
|
+
id,
|
|
2218
|
+
valueObj._agentId || agentId,
|
|
2219
|
+
eventType,
|
|
2220
|
+
eventName,
|
|
2221
|
+
startTime,
|
|
2222
|
+
endTime,
|
|
2223
|
+
status,
|
|
2224
|
+
event.statusMessage || null,
|
|
2225
|
+
event.level || "INFO",
|
|
2226
|
+
event.version || null,
|
|
2227
|
+
event.parentEventId || null,
|
|
2228
|
+
null,
|
|
2229
|
+
// tags
|
|
2230
|
+
inputData,
|
|
2231
|
+
event.output ? safeStringify2(event.output) : null,
|
|
2232
|
+
event.error ? safeStringify2(event.error) : null,
|
|
2233
|
+
metadata
|
|
2234
|
+
]
|
|
2235
|
+
});
|
|
2236
|
+
}
|
|
2237
|
+
} else if (eventType === "tool") {
|
|
2238
|
+
if (eventName === "tool_working") {
|
|
2239
|
+
await this.client.execute({
|
|
2240
|
+
sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
|
|
2241
|
+
(id, history_id, agent_id, event_type, event_name, start_time, end_time,
|
|
2242
|
+
status, status_message, level, version, parent_event_id,
|
|
2243
|
+
tags, input, output, error, metadata)
|
|
2244
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
2245
|
+
args: [
|
|
2246
|
+
eventId,
|
|
2247
|
+
id,
|
|
2248
|
+
valueObj._agentId || agentId,
|
|
2249
|
+
eventType,
|
|
2250
|
+
"tool:start",
|
|
2251
|
+
startTime,
|
|
2252
|
+
null,
|
|
2253
|
+
// no endTime
|
|
2254
|
+
"running",
|
|
2255
|
+
event.statusMessage || null,
|
|
2256
|
+
event.level || "INFO",
|
|
2257
|
+
event.version || null,
|
|
2258
|
+
event.parentEventId || null,
|
|
2259
|
+
null,
|
|
2260
|
+
// tags
|
|
2261
|
+
inputData,
|
|
2262
|
+
null,
|
|
2263
|
+
// no output
|
|
2264
|
+
null,
|
|
2265
|
+
// no error
|
|
2266
|
+
safeStringify2({
|
|
2267
|
+
id: event.affectedNodeId?.split("_").pop(),
|
|
2268
|
+
agentId: event.data?.metadata?.sourceAgentId,
|
|
2269
|
+
displayName: event.data.metadata.toolName
|
|
2270
|
+
})
|
|
2271
|
+
]
|
|
2272
|
+
});
|
|
2273
|
+
await this.client.execute({
|
|
2274
|
+
sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
|
|
2275
|
+
(id, history_id, agent_id, event_type, event_name, start_time, end_time,
|
|
2276
|
+
status, status_message, level, version, parent_event_id,
|
|
2277
|
+
tags, input, output, error, metadata)
|
|
2278
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
2279
|
+
args: [
|
|
2280
|
+
this.generateId(),
|
|
2281
|
+
// New ID
|
|
2282
|
+
id,
|
|
2283
|
+
valueObj._agentId || agentId,
|
|
2284
|
+
eventType,
|
|
2285
|
+
"tool:success",
|
|
2286
|
+
endTime,
|
|
2287
|
+
// End time
|
|
2288
|
+
endTime,
|
|
2289
|
+
"completed",
|
|
2290
|
+
event.statusMessage || null,
|
|
2291
|
+
event.level || "INFO",
|
|
2292
|
+
event.version || null,
|
|
2293
|
+
eventId,
|
|
2294
|
+
// Parent event ID
|
|
2295
|
+
null,
|
|
2296
|
+
// tags
|
|
2297
|
+
inputData,
|
|
2298
|
+
event.data.output ? safeStringify2(event.data.output) : null,
|
|
2299
|
+
event.error ? safeStringify2(event.error) : null,
|
|
2300
|
+
safeStringify2({
|
|
2301
|
+
id: event.affectedNodeId?.split("_").pop(),
|
|
2302
|
+
agentId: event.data?.metadata?.sourceAgentId,
|
|
2303
|
+
displayName: event.data.metadata.toolName
|
|
2304
|
+
})
|
|
2305
|
+
]
|
|
2306
|
+
});
|
|
2307
|
+
}
|
|
2308
|
+
} else {
|
|
2309
|
+
await this.client.execute({
|
|
2310
|
+
sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
|
|
2311
|
+
(id, history_id, agent_id, event_type, event_name, start_time, end_time,
|
|
2312
|
+
status, status_message, level, version, parent_event_id,
|
|
2313
|
+
tags, input, output, error, metadata)
|
|
2314
|
+
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
|
|
2315
|
+
args: [
|
|
2316
|
+
eventId,
|
|
2317
|
+
id,
|
|
2318
|
+
valueObj._agentId || agentId,
|
|
2319
|
+
eventType,
|
|
2320
|
+
eventName,
|
|
2321
|
+
startTime,
|
|
2322
|
+
endTime,
|
|
2323
|
+
status,
|
|
2324
|
+
event.statusMessage || null,
|
|
2325
|
+
event.level || "INFO",
|
|
2326
|
+
event.version || null,
|
|
2327
|
+
event.parentEventId || null,
|
|
2328
|
+
null,
|
|
2329
|
+
// tags
|
|
2330
|
+
inputData,
|
|
2331
|
+
event.output ? safeStringify2(event.output) : null,
|
|
2332
|
+
event.error ? safeStringify2(event.error) : null,
|
|
2333
|
+
safeStringify2({
|
|
2334
|
+
id: eventType === "retriever" ? "retriever" : event.type,
|
|
2335
|
+
agentId: event.affectedNodeId?.split("_").pop()
|
|
2336
|
+
})
|
|
2337
|
+
]
|
|
2338
|
+
});
|
|
2339
|
+
}
|
|
2340
|
+
} catch (error) {
|
|
2341
|
+
this.debug("Error processing event:", error);
|
|
2342
|
+
}
|
|
2343
|
+
}
|
|
2344
|
+
}
|
|
2345
|
+
} catch (error) {
|
|
2346
|
+
this.debug(`Error processing record with ID ${key}:`, error);
|
|
2347
|
+
}
|
|
2348
|
+
}
|
|
2349
|
+
await this.client.execute(`DROP TABLE ${oldTableName};`);
|
|
2350
|
+
await this.client.execute(`ALTER TABLE ${tempTableName} RENAME TO ${oldTableName};`);
|
|
2351
|
+
await this.client.execute(`
|
|
2352
|
+
CREATE INDEX IF NOT EXISTS idx_${oldTableName}_agent_id
|
|
2353
|
+
ON ${oldTableName}(agent_id)
|
|
2354
|
+
`);
|
|
2355
|
+
await this.client.execute("COMMIT;");
|
|
2356
|
+
this.debug(`Total ${migratedCount} records successfully migrated`);
|
|
2357
|
+
if (createBackup && deleteBackupAfterSuccess) {
|
|
2358
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${oldTableBackup};`);
|
|
2359
|
+
this.debug("Unnecessary backup deleted");
|
|
2360
|
+
}
|
|
2361
|
+
await this.setMigrationFlag("agent_history_data_migration", migratedCount);
|
|
2362
|
+
return {
|
|
2363
|
+
success: true,
|
|
2364
|
+
migratedCount,
|
|
2365
|
+
backupCreated: createBackup && !deleteBackupAfterSuccess
|
|
2366
|
+
};
|
|
2367
|
+
} catch (error) {
|
|
2368
|
+
await this.client.execute("ROLLBACK;");
|
|
2369
|
+
this.debug("Error occurred while migrating agent history data:", error);
|
|
2370
|
+
return {
|
|
2371
|
+
success: false,
|
|
2372
|
+
error: error instanceof Error ? error : new Error(String(error)),
|
|
2373
|
+
backupCreated: options.createBackup
|
|
2374
|
+
};
|
|
2375
|
+
}
|
|
2376
|
+
}
|
|
2377
|
+
/**
|
|
2378
|
+
* Migrate conversation schema to add user_id and update messages table
|
|
2379
|
+
*
|
|
2380
|
+
* ⚠️ **CRITICAL WARNING: DESTRUCTIVE OPERATION** ⚠️
|
|
2381
|
+
*
|
|
2382
|
+
* This method performs a DESTRUCTIVE schema migration that:
|
|
2383
|
+
* - DROPS and recreates existing tables
|
|
2384
|
+
* - Creates temporary tables during migration
|
|
2385
|
+
* - Modifies the primary key structure of the messages table
|
|
2386
|
+
* - Can cause DATA LOSS if interrupted or if errors occur
|
|
2387
|
+
*
|
|
2388
|
+
* **IMPORTANT SAFETY REQUIREMENTS:**
|
|
2389
|
+
* - 🛑 STOP all application instances before running this migration
|
|
2390
|
+
* - 🛑 Ensure NO concurrent database operations are running
|
|
2391
|
+
* - 🛑 Take a full database backup before running (independent of built-in backup)
|
|
2392
|
+
* - 🛑 Test the migration on a copy of production data first
|
|
2393
|
+
* - 🛑 Plan for downtime during migration execution
|
|
2394
|
+
*
|
|
2395
|
+
* **What this migration does:**
|
|
2396
|
+
* 1. Creates backup tables (if createBackup=true)
|
|
2397
|
+
* 2. Creates temporary tables with new schema
|
|
2398
|
+
* 3. Migrates data from old tables to new schema
|
|
2399
|
+
* 4. DROPS original tables
|
|
2400
|
+
* 5. Renames temporary tables to original names
|
|
2401
|
+
* 6. All operations are wrapped in a transaction for atomicity
|
|
2402
|
+
*
|
|
2403
|
+
* @param options Migration configuration options
|
|
2404
|
+
* @param options.createBackup Whether to create backup tables before migration (default: true, HIGHLY RECOMMENDED)
|
|
2405
|
+
* @param options.restoreFromBackup Whether to restore from existing backup instead of migrating (default: false)
|
|
2406
|
+
* @param options.deleteBackupAfterSuccess Whether to delete backup tables after successful migration (default: false)
|
|
2407
|
+
*
|
|
2408
|
+
* @returns Promise resolving to migration result with success status, migrated count, and backup info
|
|
2409
|
+
*
|
|
2410
|
+
* @example
|
|
2411
|
+
* ```typescript
|
|
2412
|
+
* // RECOMMENDED: Run with backup creation (default)
|
|
2413
|
+
* const result = await storage.migrateConversationSchema({
|
|
2414
|
+
* createBackup: true,
|
|
2415
|
+
* deleteBackupAfterSuccess: false // Keep backup for safety
|
|
2416
|
+
* });
|
|
2417
|
+
*
|
|
2418
|
+
* if (result.success) {
|
|
2419
|
+
* console.log(`Migrated ${result.migratedCount} conversations successfully`);
|
|
2420
|
+
* } else {
|
|
2421
|
+
* console.error('Migration failed:', result.error);
|
|
2422
|
+
* // Consider restoring from backup
|
|
2423
|
+
* }
|
|
2424
|
+
*
|
|
2425
|
+
* // If migration fails, restore from backup:
|
|
2426
|
+
* const restoreResult = await storage.migrateConversationSchema({
|
|
2427
|
+
* restoreFromBackup: true
|
|
2428
|
+
* });
|
|
2429
|
+
* ```
|
|
2430
|
+
*
|
|
2431
|
+
* @throws {Error} If migration fails and transaction is rolled back
|
|
2432
|
+
*
|
|
2433
|
+
* @since This migration is typically only needed when upgrading from older schema versions
|
|
2434
|
+
*/
|
|
2435
|
+
async migrateConversationSchema(options = {}) {
|
|
2436
|
+
const {
|
|
2437
|
+
createBackup = true,
|
|
2438
|
+
restoreFromBackup = false,
|
|
2439
|
+
deleteBackupAfterSuccess = false
|
|
2440
|
+
} = options;
|
|
2441
|
+
const conversationsTableName = `${this.options.tablePrefix}_conversations`;
|
|
2442
|
+
const messagesTableName = `${this.options.tablePrefix}_messages`;
|
|
2443
|
+
const conversationsBackupName = `${conversationsTableName}_backup`;
|
|
2444
|
+
const messagesBackupName = `${messagesTableName}_backup`;
|
|
2445
|
+
try {
|
|
2446
|
+
this.debug("Starting conversation schema migration...");
|
|
2447
|
+
const flagCheck = await this.checkMigrationFlag("conversation_schema_migration");
|
|
2448
|
+
if (flagCheck.alreadyCompleted) {
|
|
2449
|
+
return { success: true, migratedCount: 0 };
|
|
2450
|
+
}
|
|
2451
|
+
if (restoreFromBackup) {
|
|
2452
|
+
this.debug("Starting restoration from backup...");
|
|
2453
|
+
const convBackupCheck = await this.client.execute({
|
|
2454
|
+
sql: "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
|
|
2455
|
+
args: [conversationsBackupName]
|
|
2456
|
+
});
|
|
2457
|
+
const msgBackupCheck = await this.client.execute({
|
|
2458
|
+
sql: "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
|
|
2459
|
+
args: [messagesBackupName]
|
|
2460
|
+
});
|
|
2461
|
+
if (convBackupCheck.rows.length === 0 || msgBackupCheck.rows.length === 0) {
|
|
2462
|
+
throw new Error("No backup found to restore");
|
|
2463
|
+
}
|
|
2464
|
+
await this.client.execute("BEGIN TRANSACTION;");
|
|
2465
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${conversationsTableName};`);
|
|
2466
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${messagesTableName};`);
|
|
2467
|
+
await this.client.execute(
|
|
2468
|
+
`ALTER TABLE ${conversationsBackupName} RENAME TO ${conversationsTableName};`
|
|
2469
|
+
);
|
|
2470
|
+
await this.client.execute(
|
|
2471
|
+
`ALTER TABLE ${messagesBackupName} RENAME TO ${messagesTableName};`
|
|
2472
|
+
);
|
|
2473
|
+
await this.client.execute("COMMIT;");
|
|
2474
|
+
this.debug("Restoration from backup completed successfully");
|
|
2475
|
+
return { success: true, backupCreated: false };
|
|
2476
|
+
}
|
|
2477
|
+
const convTableInfo = await this.client.execute(
|
|
2478
|
+
`PRAGMA table_info(${conversationsTableName})`
|
|
2479
|
+
);
|
|
2480
|
+
const msgTableInfo = await this.client.execute(`PRAGMA table_info(${messagesTableName})`);
|
|
2481
|
+
const hasUserIdInConversations = convTableInfo.rows.some((row) => row.name === "user_id");
|
|
2482
|
+
const hasUserIdInMessages = msgTableInfo.rows.some((row) => row.name === "user_id");
|
|
2483
|
+
if (hasUserIdInConversations && !hasUserIdInMessages) {
|
|
2484
|
+
this.debug("Tables are already in new format, migration not needed");
|
|
2485
|
+
return { success: true, migratedCount: 0 };
|
|
2486
|
+
}
|
|
2487
|
+
if (convTableInfo.rows.length === 0 && msgTableInfo.rows.length === 0) {
|
|
2488
|
+
this.debug("Tables don't exist, migration not needed");
|
|
2489
|
+
return { success: true, migratedCount: 0 };
|
|
2490
|
+
}
|
|
2491
|
+
if (createBackup) {
|
|
2492
|
+
this.debug("Creating backups...");
|
|
2493
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${conversationsBackupName};`);
|
|
2494
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${messagesBackupName};`);
|
|
2495
|
+
if (convTableInfo.rows.length > 0) {
|
|
2496
|
+
await this.client.execute(
|
|
2497
|
+
`CREATE TABLE ${conversationsBackupName} AS SELECT * FROM ${conversationsTableName};`
|
|
2498
|
+
);
|
|
2499
|
+
}
|
|
2500
|
+
if (msgTableInfo.rows.length > 0) {
|
|
2501
|
+
await this.client.execute(
|
|
2502
|
+
`CREATE TABLE ${messagesBackupName} AS SELECT * FROM ${messagesTableName};`
|
|
2503
|
+
);
|
|
2504
|
+
}
|
|
2505
|
+
this.debug("Backups created successfully");
|
|
2506
|
+
}
|
|
2507
|
+
let conversationData = [];
|
|
2508
|
+
let messageData = [];
|
|
2509
|
+
if (convTableInfo.rows.length > 0) {
|
|
2510
|
+
const convResult = await this.client.execute(`SELECT * FROM ${conversationsTableName}`);
|
|
2511
|
+
conversationData = convResult.rows;
|
|
2512
|
+
}
|
|
2513
|
+
if (msgTableInfo.rows.length > 0) {
|
|
2514
|
+
const msgResult = await this.client.execute(`SELECT * FROM ${messagesTableName}`);
|
|
2515
|
+
messageData = msgResult.rows;
|
|
2516
|
+
}
|
|
2517
|
+
await this.client.execute("BEGIN TRANSACTION;");
|
|
2518
|
+
const tempConversationsTable = `${conversationsTableName}_temp`;
|
|
2519
|
+
const tempMessagesTable = `${messagesTableName}_temp`;
|
|
2520
|
+
await this.client.execute(`
|
|
2521
|
+
CREATE TABLE ${tempConversationsTable} (
|
|
2522
|
+
id TEXT PRIMARY KEY,
|
|
2523
|
+
resource_id TEXT NOT NULL,
|
|
2524
|
+
user_id TEXT NOT NULL,
|
|
2525
|
+
title TEXT NOT NULL,
|
|
2526
|
+
metadata TEXT NOT NULL,
|
|
2527
|
+
created_at TEXT NOT NULL,
|
|
2528
|
+
updated_at TEXT NOT NULL
|
|
2529
|
+
)
|
|
2530
|
+
`);
|
|
2531
|
+
await this.client.execute(`
|
|
2532
|
+
CREATE TABLE ${tempMessagesTable} (
|
|
2533
|
+
conversation_id TEXT NOT NULL,
|
|
2534
|
+
message_id TEXT NOT NULL,
|
|
2535
|
+
role TEXT NOT NULL,
|
|
2536
|
+
content TEXT NOT NULL,
|
|
2537
|
+
type TEXT NOT NULL,
|
|
2538
|
+
created_at TEXT NOT NULL,
|
|
2539
|
+
PRIMARY KEY (conversation_id, message_id)
|
|
2540
|
+
)
|
|
2541
|
+
`);
|
|
2542
|
+
let migratedCount = 0;
|
|
2543
|
+
const createdConversations = /* @__PURE__ */ new Set();
|
|
2544
|
+
for (const row of messageData) {
|
|
2545
|
+
const conversationId = row.conversation_id;
|
|
2546
|
+
let userId = "default";
|
|
2547
|
+
if (hasUserIdInMessages && row.user_id) {
|
|
2548
|
+
userId = row.user_id;
|
|
2549
|
+
}
|
|
2550
|
+
if (!createdConversations.has(conversationId)) {
|
|
2551
|
+
const existingConversation = conversationData.find((conv) => conv.id === conversationId);
|
|
2552
|
+
if (existingConversation) {
|
|
2553
|
+
let convUserId = userId;
|
|
2554
|
+
if (hasUserIdInConversations && existingConversation.user_id) {
|
|
2555
|
+
convUserId = existingConversation.user_id;
|
|
2556
|
+
}
|
|
2557
|
+
await this.client.execute({
|
|
2558
|
+
sql: `INSERT INTO ${tempConversationsTable}
|
|
2559
|
+
(id, resource_id, user_id, title, metadata, created_at, updated_at)
|
|
2560
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
|
2561
|
+
args: [
|
|
2562
|
+
existingConversation.id,
|
|
2563
|
+
existingConversation.resource_id,
|
|
2564
|
+
convUserId,
|
|
2565
|
+
existingConversation.title,
|
|
2566
|
+
existingConversation.metadata,
|
|
2567
|
+
existingConversation.created_at,
|
|
2568
|
+
existingConversation.updated_at
|
|
2569
|
+
]
|
|
2570
|
+
});
|
|
2571
|
+
} else {
|
|
2572
|
+
const now = (/* @__PURE__ */ new Date()).toISOString();
|
|
2573
|
+
await this.client.execute({
|
|
2574
|
+
sql: `INSERT INTO ${tempConversationsTable}
|
|
2575
|
+
(id, resource_id, user_id, title, metadata, created_at, updated_at)
|
|
2576
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
|
2577
|
+
args: [
|
|
2578
|
+
conversationId,
|
|
2579
|
+
"default",
|
|
2580
|
+
// Default resource_id for auto-created conversations
|
|
2581
|
+
userId,
|
|
2582
|
+
"Migrated Conversation",
|
|
2583
|
+
// Default title
|
|
2584
|
+
safeStringify2({}),
|
|
2585
|
+
// Empty metadata
|
|
2586
|
+
now,
|
|
2587
|
+
now
|
|
2588
|
+
]
|
|
2589
|
+
});
|
|
2590
|
+
}
|
|
2591
|
+
createdConversations.add(conversationId);
|
|
2592
|
+
migratedCount++;
|
|
2593
|
+
}
|
|
2594
|
+
await this.client.execute({
|
|
2595
|
+
sql: `INSERT INTO ${tempMessagesTable}
|
|
2596
|
+
(conversation_id, message_id, role, content, type, created_at)
|
|
2597
|
+
VALUES (?, ?, ?, ?, ?, ?)`,
|
|
2598
|
+
args: [
|
|
2599
|
+
row.conversation_id,
|
|
2600
|
+
row.message_id,
|
|
2601
|
+
row.role,
|
|
2602
|
+
row.content,
|
|
2603
|
+
row.type,
|
|
2604
|
+
row.created_at
|
|
2605
|
+
]
|
|
2606
|
+
});
|
|
2607
|
+
}
|
|
2608
|
+
for (const row of conversationData) {
|
|
2609
|
+
const conversationId = row.id;
|
|
2610
|
+
if (!createdConversations.has(conversationId)) {
|
|
2611
|
+
let userId = "default";
|
|
2612
|
+
if (hasUserIdInConversations && row.user_id) {
|
|
2613
|
+
userId = row.user_id;
|
|
2614
|
+
}
|
|
2615
|
+
await this.client.execute({
|
|
2616
|
+
sql: `INSERT INTO ${tempConversationsTable}
|
|
2617
|
+
(id, resource_id, user_id, title, metadata, created_at, updated_at)
|
|
2618
|
+
VALUES (?, ?, ?, ?, ?, ?, ?)`,
|
|
2619
|
+
args: [
|
|
2620
|
+
row.id,
|
|
2621
|
+
row.resource_id,
|
|
2622
|
+
userId,
|
|
2623
|
+
row.title,
|
|
2624
|
+
row.metadata,
|
|
2625
|
+
row.created_at,
|
|
2626
|
+
row.updated_at
|
|
2627
|
+
]
|
|
2628
|
+
});
|
|
2629
|
+
migratedCount++;
|
|
2630
|
+
}
|
|
2631
|
+
}
|
|
2632
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${conversationsTableName};`);
|
|
2633
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${messagesTableName};`);
|
|
2634
|
+
await this.client.execute(
|
|
2635
|
+
`ALTER TABLE ${tempConversationsTable} RENAME TO ${conversationsTableName};`
|
|
2636
|
+
);
|
|
2637
|
+
await this.client.execute(`ALTER TABLE ${tempMessagesTable} RENAME TO ${messagesTableName};`);
|
|
2638
|
+
await this.client.execute(`
|
|
2639
|
+
CREATE INDEX IF NOT EXISTS idx_${messagesTableName}_lookup
|
|
2640
|
+
ON ${messagesTableName}(conversation_id, created_at)
|
|
2641
|
+
`);
|
|
2642
|
+
await this.client.execute(`
|
|
2643
|
+
CREATE INDEX IF NOT EXISTS idx_${conversationsTableName}_resource
|
|
2644
|
+
ON ${conversationsTableName}(resource_id)
|
|
2645
|
+
`);
|
|
2646
|
+
await this.client.execute(`
|
|
2647
|
+
CREATE INDEX IF NOT EXISTS idx_${conversationsTableName}_user
|
|
2648
|
+
ON ${conversationsTableName}(user_id)
|
|
2649
|
+
`);
|
|
2650
|
+
await this.client.execute("COMMIT;");
|
|
2651
|
+
if (deleteBackupAfterSuccess) {
|
|
2652
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${conversationsBackupName};`);
|
|
2653
|
+
await this.client.execute(`DROP TABLE IF EXISTS ${messagesBackupName};`);
|
|
2654
|
+
}
|
|
2655
|
+
await this.setMigrationFlag("conversation_schema_migration", migratedCount);
|
|
2656
|
+
this.debug(
|
|
2657
|
+
`Conversation schema migration completed successfully. Migrated ${migratedCount} conversations.`
|
|
2658
|
+
);
|
|
2659
|
+
return {
|
|
2660
|
+
success: true,
|
|
2661
|
+
migratedCount,
|
|
2662
|
+
backupCreated: createBackup
|
|
2663
|
+
};
|
|
2664
|
+
} catch (error) {
|
|
2665
|
+
this.debug("Error during conversation schema migration:", error);
|
|
2666
|
+
try {
|
|
2667
|
+
await this.client.execute("ROLLBACK;");
|
|
2668
|
+
} catch (rollbackError) {
|
|
2669
|
+
this.debug("Error rolling back transaction:", rollbackError);
|
|
2670
|
+
}
|
|
2671
|
+
return {
|
|
2672
|
+
success: false,
|
|
2673
|
+
error,
|
|
2674
|
+
backupCreated: createBackup
|
|
2675
|
+
};
|
|
2676
|
+
}
|
|
2677
|
+
}
|
|
2678
|
+
/**
|
|
2679
|
+
* Get conversations for a user with a fluent query builder interface
|
|
2680
|
+
* @param userId User ID to filter by
|
|
2681
|
+
* @returns Query builder object
|
|
2682
|
+
*/
|
|
2683
|
+
getUserConversations(userId) {
|
|
2684
|
+
return {
|
|
2685
|
+
/**
|
|
2686
|
+
* Limit the number of results
|
|
2687
|
+
* @param count Number of conversations to return
|
|
2688
|
+
* @returns Query builder
|
|
2689
|
+
*/
|
|
2690
|
+
limit: /* @__PURE__ */ __name((count) => ({
|
|
2691
|
+
/**
|
|
2692
|
+
* Order results by a specific field
|
|
2693
|
+
* @param field Field to order by
|
|
2694
|
+
* @param direction Sort direction
|
|
2695
|
+
* @returns Query builder
|
|
2696
|
+
*/
|
|
2697
|
+
orderBy: /* @__PURE__ */ __name((field = "updated_at", direction = "DESC") => ({
|
|
2698
|
+
/**
|
|
2699
|
+
* Execute the query and return results
|
|
2700
|
+
* @returns Promise of conversations
|
|
2701
|
+
*/
|
|
2702
|
+
execute: /* @__PURE__ */ __name(() => this.getConversationsByUserId(userId, {
|
|
2703
|
+
limit: count,
|
|
2704
|
+
orderBy: field,
|
|
2705
|
+
orderDirection: direction
|
|
2706
|
+
}), "execute")
|
|
2707
|
+
}), "orderBy"),
|
|
2708
|
+
/**
|
|
2709
|
+
* Execute the query with default ordering
|
|
2710
|
+
* @returns Promise of conversations
|
|
2711
|
+
*/
|
|
2712
|
+
execute: /* @__PURE__ */ __name(() => this.getConversationsByUserId(userId, { limit: count }), "execute")
|
|
2713
|
+
}), "limit"),
|
|
2714
|
+
/**
|
|
2715
|
+
* Order results by a specific field
|
|
2716
|
+
* @param field Field to order by
|
|
2717
|
+
* @param direction Sort direction
|
|
2718
|
+
* @returns Query builder
|
|
2719
|
+
*/
|
|
2720
|
+
orderBy: /* @__PURE__ */ __name((field = "updated_at", direction = "DESC") => ({
|
|
2721
|
+
/**
|
|
2722
|
+
* Limit the number of results
|
|
2723
|
+
* @param count Number of conversations to return
|
|
2724
|
+
* @returns Query builder
|
|
2725
|
+
*/
|
|
2726
|
+
limit: /* @__PURE__ */ __name((count) => ({
|
|
2727
|
+
/**
|
|
2728
|
+
* Execute the query and return results
|
|
2729
|
+
* @returns Promise of conversations
|
|
2730
|
+
*/
|
|
2731
|
+
execute: /* @__PURE__ */ __name(() => this.getConversationsByUserId(userId, {
|
|
2732
|
+
limit: count,
|
|
2733
|
+
orderBy: field,
|
|
2734
|
+
orderDirection: direction
|
|
2735
|
+
}), "execute")
|
|
2736
|
+
}), "limit"),
|
|
2737
|
+
/**
|
|
2738
|
+
* Execute the query without limit
|
|
2739
|
+
* @returns Promise of conversations
|
|
2740
|
+
*/
|
|
2741
|
+
execute: /* @__PURE__ */ __name(() => this.getConversationsByUserId(userId, {
|
|
2742
|
+
orderBy: field,
|
|
2743
|
+
orderDirection: direction
|
|
2744
|
+
}), "execute")
|
|
2745
|
+
}), "orderBy"),
|
|
2746
|
+
/**
|
|
2747
|
+
* Execute the query with default options
|
|
2748
|
+
* @returns Promise of conversations
|
|
2749
|
+
*/
|
|
2750
|
+
execute: /* @__PURE__ */ __name(() => this.getConversationsByUserId(userId), "execute")
|
|
2751
|
+
};
|
|
2752
|
+
}
|
|
2753
|
+
/**
|
|
2754
|
+
* Get conversation by ID and ensure it belongs to the specified user
|
|
2755
|
+
* @param conversationId Conversation ID
|
|
2756
|
+
* @param userId User ID to validate ownership
|
|
2757
|
+
* @returns Conversation or null
|
|
2758
|
+
*/
|
|
2759
|
+
async getUserConversation(conversationId, userId) {
|
|
2760
|
+
const conversation = await this.getConversation(conversationId);
|
|
2761
|
+
if (!conversation || conversation.userId !== userId) {
|
|
2762
|
+
return null;
|
|
2763
|
+
}
|
|
2764
|
+
return conversation;
|
|
2765
|
+
}
|
|
2766
|
+
/**
|
|
2767
|
+
* Get paginated conversations for a user
|
|
2768
|
+
* @param userId User ID
|
|
2769
|
+
* @param page Page number (1-based)
|
|
2770
|
+
* @param pageSize Number of items per page
|
|
2771
|
+
* @returns Object with conversations and pagination info
|
|
2772
|
+
*/
|
|
2773
|
+
async getPaginatedUserConversations(userId, page = 1, pageSize = 10) {
|
|
2774
|
+
const offset = (page - 1) * pageSize;
|
|
2775
|
+
const conversations = await this.getConversationsByUserId(userId, {
|
|
2776
|
+
limit: pageSize + 1,
|
|
2777
|
+
offset,
|
|
2778
|
+
orderBy: "updated_at",
|
|
2779
|
+
orderDirection: "DESC"
|
|
2780
|
+
});
|
|
2781
|
+
const hasMore = conversations.length > pageSize;
|
|
2782
|
+
const results = hasMore ? conversations.slice(0, pageSize) : conversations;
|
|
2783
|
+
return {
|
|
2784
|
+
conversations: results,
|
|
2785
|
+
page,
|
|
2786
|
+
pageSize,
|
|
2787
|
+
hasMore
|
|
2788
|
+
};
|
|
2789
|
+
}
|
|
2790
|
+
/**
|
|
2791
|
+
* Check and create migration flag table, return if migration already completed
|
|
2792
|
+
* @param migrationType Type of migration to check
|
|
2793
|
+
* @returns Object with completion status and details
|
|
2794
|
+
*/
|
|
2795
|
+
async checkMigrationFlag(migrationType) {
|
|
2796
|
+
const conversationsTableName = `${this.options.tablePrefix}_conversations`;
|
|
2797
|
+
const migrationFlagTable = `${conversationsTableName}_migration_flags`;
|
|
2798
|
+
try {
|
|
2799
|
+
const result = await this.client.execute({
|
|
2800
|
+
sql: `SELECT * FROM ${migrationFlagTable} WHERE migration_type = ?`,
|
|
2801
|
+
args: [migrationType]
|
|
2802
|
+
});
|
|
2803
|
+
if (result.rows.length > 0) {
|
|
2804
|
+
const migrationFlag = result.rows[0];
|
|
2805
|
+
this.debug(`${migrationType} migration already completed`);
|
|
2806
|
+
this.debug(`Migration completed on: ${migrationFlag.completed_at}`);
|
|
2807
|
+
this.debug(`Migrated ${migrationFlag.migrated_count || 0} records previously`);
|
|
2808
|
+
return {
|
|
2809
|
+
alreadyCompleted: true,
|
|
2810
|
+
migrationCount: migrationFlag.migrated_count,
|
|
2811
|
+
completedAt: migrationFlag.completed_at
|
|
2812
|
+
};
|
|
2813
|
+
}
|
|
2814
|
+
this.debug("Migration flags table found, but no migration flag exists yet");
|
|
2815
|
+
return { alreadyCompleted: false };
|
|
2816
|
+
} catch (flagError) {
|
|
2817
|
+
this.debug("Migration flag table not found, creating it...");
|
|
2818
|
+
this.debug("Original error:", flagError);
|
|
2819
|
+
try {
|
|
2820
|
+
await this.client.execute(`
|
|
2821
|
+
CREATE TABLE IF NOT EXISTS ${migrationFlagTable} (
|
|
2822
|
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
2823
|
+
migration_type TEXT NOT NULL UNIQUE,
|
|
2824
|
+
completed_at TEXT NOT NULL DEFAULT (datetime('now')),
|
|
2825
|
+
migrated_count INTEGER DEFAULT 0,
|
|
2826
|
+
metadata TEXT DEFAULT '{}'
|
|
2827
|
+
)
|
|
2828
|
+
`);
|
|
2829
|
+
this.debug("Migration flags table created successfully");
|
|
2830
|
+
} catch (createError) {
|
|
2831
|
+
this.debug("Failed to create migration flags table:", createError);
|
|
2832
|
+
}
|
|
2833
|
+
return { alreadyCompleted: false };
|
|
2834
|
+
}
|
|
2835
|
+
}
|
|
2836
|
+
/**
|
|
2837
|
+
* Set migration flag after successful completion
|
|
2838
|
+
* @param migrationType Type of migration completed
|
|
2839
|
+
* @param migratedCount Number of records migrated
|
|
2840
|
+
*/
|
|
2841
|
+
async setMigrationFlag(migrationType, migratedCount) {
|
|
2842
|
+
try {
|
|
2843
|
+
const conversationsTableName = `${this.options.tablePrefix}_conversations`;
|
|
2844
|
+
const migrationFlagTable = `${conversationsTableName}_migration_flags`;
|
|
2845
|
+
await this.client.execute({
|
|
2846
|
+
sql: `INSERT OR REPLACE INTO ${migrationFlagTable}
|
|
2847
|
+
(migration_type, completed_at, migrated_count)
|
|
2848
|
+
VALUES (?, datetime('now'), ?)`,
|
|
2849
|
+
args: [migrationType, migratedCount]
|
|
2850
|
+
});
|
|
2851
|
+
this.debug("Migration flag set successfully");
|
|
2852
|
+
} catch (flagSetError) {
|
|
2853
|
+
this.debug("Could not set migration flag (non-critical):", flagSetError);
|
|
2854
|
+
}
|
|
2855
|
+
}
|
|
2856
|
+
/**
|
|
2857
|
+
* Migrate agent history schema to add userId and conversationId columns
|
|
2858
|
+
*/
|
|
2859
|
+
async migrateAgentHistorySchema() {
|
|
2860
|
+
const historyTableName = `${this.options.tablePrefix}_agent_history`;
|
|
2861
|
+
try {
|
|
2862
|
+
this.debug("Starting agent history schema migration...");
|
|
2863
|
+
const flagCheck = await this.checkMigrationFlag("agent_history_schema_migration");
|
|
2864
|
+
if (flagCheck.alreadyCompleted) {
|
|
2865
|
+
return { success: true };
|
|
2866
|
+
}
|
|
2867
|
+
const tableInfo = await this.client.execute(`PRAGMA table_info(${historyTableName})`);
|
|
2868
|
+
if (tableInfo.rows.length === 0) {
|
|
2869
|
+
this.debug("Agent history table doesn't exist, migration not needed");
|
|
2870
|
+
return { success: true };
|
|
2871
|
+
}
|
|
2872
|
+
const hasUserIdColumn = tableInfo.rows.some((row) => row.name === "userId");
|
|
2873
|
+
const hasConversationIdColumn = tableInfo.rows.some((row) => row.name === "conversationId");
|
|
2874
|
+
if (hasUserIdColumn && hasConversationIdColumn) {
|
|
2875
|
+
this.debug("Both userId and conversationId columns already exist, skipping migration");
|
|
2876
|
+
await this.setMigrationFlag("agent_history_schema_migration", 0);
|
|
2877
|
+
return { success: true };
|
|
2878
|
+
}
|
|
2879
|
+
if (!hasUserIdColumn) {
|
|
2880
|
+
await this.client.execute(`ALTER TABLE ${historyTableName} ADD COLUMN userId TEXT`);
|
|
2881
|
+
this.debug("Added userId column to agent history table");
|
|
2882
|
+
}
|
|
2883
|
+
if (!hasConversationIdColumn) {
|
|
2884
|
+
await this.client.execute(`ALTER TABLE ${historyTableName} ADD COLUMN conversationId TEXT`);
|
|
2885
|
+
this.debug("Added conversationId column to agent history table");
|
|
2886
|
+
}
|
|
2887
|
+
if (!hasUserIdColumn) {
|
|
2888
|
+
await this.client.execute(`
|
|
2889
|
+
CREATE INDEX IF NOT EXISTS idx_${historyTableName}_userId
|
|
2890
|
+
ON ${historyTableName}(userId)
|
|
2891
|
+
`);
|
|
2892
|
+
}
|
|
2893
|
+
if (!hasConversationIdColumn) {
|
|
2894
|
+
await this.client.execute(`
|
|
2895
|
+
CREATE INDEX IF NOT EXISTS idx_${historyTableName}_conversationId
|
|
2896
|
+
ON ${historyTableName}(conversationId)
|
|
2897
|
+
`);
|
|
2898
|
+
}
|
|
2899
|
+
await this.setMigrationFlag("agent_history_schema_migration", 0);
|
|
2900
|
+
this.debug("Agent history schema migration completed successfully");
|
|
2901
|
+
return { success: true };
|
|
2902
|
+
} catch (error) {
|
|
2903
|
+
this.debug("Error during agent history schema migration:", error);
|
|
2904
|
+
return {
|
|
2905
|
+
success: false,
|
|
2906
|
+
error
|
|
2907
|
+
};
|
|
2908
|
+
}
|
|
2909
|
+
}
|
|
2910
|
+
// ===== WorkflowMemory Interface Implementation =====
|
|
2911
|
+
// Delegate all workflow operations to the workflow extension
|
|
2912
|
+
async storeWorkflowHistory(entry) {
|
|
2913
|
+
await this.initialized;
|
|
2914
|
+
return this.workflowExtension.storeWorkflowHistory(entry);
|
|
2915
|
+
}
|
|
2916
|
+
async getWorkflowHistory(id) {
|
|
2917
|
+
await this.initialized;
|
|
2918
|
+
return this.workflowExtension.getWorkflowHistory(id);
|
|
2919
|
+
}
|
|
2920
|
+
async getWorkflowHistoryByWorkflowId(workflowId) {
|
|
2921
|
+
await this.initialized;
|
|
2922
|
+
return this.workflowExtension.getWorkflowHistoryByWorkflowId(workflowId);
|
|
2923
|
+
}
|
|
2924
|
+
async updateWorkflowHistory(id, updates) {
|
|
2925
|
+
await this.initialized;
|
|
2926
|
+
return this.workflowExtension.updateWorkflowHistory(id, updates);
|
|
2927
|
+
}
|
|
2928
|
+
async deleteWorkflowHistory(id) {
|
|
2929
|
+
await this.initialized;
|
|
2930
|
+
return this.workflowExtension.deleteWorkflowHistory(id);
|
|
2931
|
+
}
|
|
2932
|
+
async storeWorkflowStep(step) {
|
|
2933
|
+
await this.initialized;
|
|
2934
|
+
return this.workflowExtension.storeWorkflowStep(step);
|
|
2935
|
+
}
|
|
2936
|
+
async getWorkflowStep(id) {
|
|
2937
|
+
await this.initialized;
|
|
2938
|
+
return this.workflowExtension.getWorkflowStep(id);
|
|
2939
|
+
}
|
|
2940
|
+
async getWorkflowSteps(workflowHistoryId) {
|
|
2941
|
+
await this.initialized;
|
|
2942
|
+
return this.workflowExtension.getWorkflowSteps(workflowHistoryId);
|
|
2943
|
+
}
|
|
2944
|
+
async updateWorkflowStep(id, updates) {
|
|
2945
|
+
await this.initialized;
|
|
2946
|
+
return this.workflowExtension.updateWorkflowStep(id, updates);
|
|
2947
|
+
}
|
|
2948
|
+
async deleteWorkflowStep(id) {
|
|
2949
|
+
await this.initialized;
|
|
2950
|
+
return this.workflowExtension.deleteWorkflowStep(id);
|
|
2951
|
+
}
|
|
2952
|
+
async storeWorkflowTimelineEvent(event) {
|
|
2953
|
+
await this.initialized;
|
|
2954
|
+
return this.workflowExtension.storeWorkflowTimelineEvent(event);
|
|
2955
|
+
}
|
|
2956
|
+
async getWorkflowTimelineEvent(id) {
|
|
2957
|
+
await this.initialized;
|
|
2958
|
+
return this.workflowExtension.getWorkflowTimelineEvent(id);
|
|
2959
|
+
}
|
|
2960
|
+
async getWorkflowTimelineEvents(workflowHistoryId) {
|
|
2961
|
+
await this.initialized;
|
|
2962
|
+
return this.workflowExtension.getWorkflowTimelineEvents(workflowHistoryId);
|
|
2963
|
+
}
|
|
2964
|
+
async deleteWorkflowTimelineEvent(id) {
|
|
2965
|
+
await this.initialized;
|
|
2966
|
+
return this.workflowExtension.deleteWorkflowTimelineEvent(id);
|
|
2967
|
+
}
|
|
2968
|
+
async getAllWorkflowIds() {
|
|
2969
|
+
await this.initialized;
|
|
2970
|
+
return this.workflowExtension.getAllWorkflowIds();
|
|
2971
|
+
}
|
|
2972
|
+
async getWorkflowStats(workflowId) {
|
|
2973
|
+
await this.initialized;
|
|
2974
|
+
return this.workflowExtension.getWorkflowStats(workflowId);
|
|
2975
|
+
}
|
|
2976
|
+
async getWorkflowHistoryWithStepsAndEvents(id) {
|
|
2977
|
+
await this.initialized;
|
|
2978
|
+
return this.workflowExtension.getWorkflowHistoryWithStepsAndEvents(id);
|
|
2979
|
+
}
|
|
2980
|
+
async deleteWorkflowHistoryWithRelated(id) {
|
|
2981
|
+
await this.initialized;
|
|
2982
|
+
return this.workflowExtension.deleteWorkflowHistoryWithRelated(id);
|
|
2983
|
+
}
|
|
2984
|
+
async cleanupOldWorkflowHistories(workflowId, maxEntries) {
|
|
2985
|
+
await this.initialized;
|
|
2986
|
+
return this.workflowExtension.cleanupOldWorkflowHistories(workflowId, maxEntries);
|
|
2987
|
+
}
|
|
2988
|
+
/**
|
|
2989
|
+
* Get the workflow extension for advanced workflow operations
|
|
2990
|
+
*/
|
|
2991
|
+
getWorkflowExtension() {
|
|
2992
|
+
return this.workflowExtension;
|
|
2993
|
+
}
|
|
2994
|
+
};
|
|
2995
|
+
export {
|
|
2996
|
+
LibSQLStorage
|
|
2997
|
+
};
|
|
2998
|
+
//# sourceMappingURL=index.mjs.map
|