@voltagent/libsql 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js ADDED
@@ -0,0 +1,3021 @@
1
+ "use strict";
2
+ var __defProp = Object.defineProperty;
3
+ var __getOwnPropDesc = Object.getOwnPropertyDescriptor;
4
+ var __getOwnPropNames = Object.getOwnPropertyNames;
5
+ var __hasOwnProp = Object.prototype.hasOwnProperty;
6
+ var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
7
+ var __export = (target, all) => {
8
+ for (var name in all)
9
+ __defProp(target, name, { get: all[name], enumerable: true });
10
+ };
11
+ var __copyProps = (to, from, except, desc) => {
12
+ if (from && typeof from === "object" || typeof from === "function") {
13
+ for (let key of __getOwnPropNames(from))
14
+ if (!__hasOwnProp.call(to, key) && key !== except)
15
+ __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });
16
+ }
17
+ return to;
18
+ };
19
+ var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod);
20
+
21
+ // src/index.ts
22
+ var index_exports = {};
23
+ __export(index_exports, {
24
+ LibSQLStorage: () => LibSQLStorage
25
+ });
26
+ module.exports = __toCommonJS(index_exports);
27
+ var import_node_fs = require("fs");
28
+ var import_node_path = require("path");
29
+ var import_client = require("@libsql/client");
30
+ var import_core = require("@voltagent/core");
31
+ var import_utils2 = require("@voltagent/internal/utils");
32
+ var import_logger2 = require("@voltagent/logger");
33
+
34
+ // src/migrations/add-suspended-status.ts
35
+ async function addSuspendedStatusMigration(db, tablePrefix = "voltagent_memory") {
36
+ const migrationName = "add_suspended_status_to_workflow_history";
37
+ await db.execute(`
38
+ CREATE TABLE IF NOT EXISTS ${tablePrefix}_migrations (
39
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
40
+ name TEXT NOT NULL UNIQUE,
41
+ applied_at TEXT DEFAULT CURRENT_TIMESTAMP
42
+ )
43
+ `);
44
+ const result = await db.execute({
45
+ sql: `SELECT * FROM ${tablePrefix}_migrations WHERE name = ?`,
46
+ args: [migrationName]
47
+ });
48
+ if (result.rows.length > 0) {
49
+ return;
50
+ }
51
+ try {
52
+ const needsMigration = await checkIfSuspendedStatusNeeded(db, tablePrefix);
53
+ if (!needsMigration) {
54
+ } else {
55
+ await performSuspendedStatusMigration(db, tablePrefix);
56
+ }
57
+ await db.execute({
58
+ sql: `INSERT INTO ${tablePrefix}_migrations (name) VALUES (?)`,
59
+ args: [migrationName]
60
+ });
61
+ } catch (error) {
62
+ console.error(`[Migration] Failed to apply '${migrationName}':`, error);
63
+ throw error;
64
+ }
65
+ }
66
+ __name(addSuspendedStatusMigration, "addSuspendedStatusMigration");
67
+ async function checkIfSuspendedStatusNeeded(db, tablePrefix) {
68
+ try {
69
+ const testId = `test-suspended-check-${Date.now()}`;
70
+ await db.execute({
71
+ sql: `
72
+ INSERT INTO ${tablePrefix}_workflow_history
73
+ (id, name, workflow_id, status, start_time)
74
+ VALUES (?, 'test', 'test', 'suspended', datetime('now'))
75
+ `,
76
+ args: [testId]
77
+ });
78
+ await db.execute({
79
+ sql: `DELETE FROM ${tablePrefix}_workflow_history WHERE id = ?`,
80
+ args: [testId]
81
+ });
82
+ return false;
83
+ } catch (error) {
84
+ if (error.message?.includes("CHECK constraint failed")) {
85
+ return true;
86
+ }
87
+ throw error;
88
+ }
89
+ }
90
+ __name(checkIfSuspendedStatusNeeded, "checkIfSuspendedStatusNeeded");
91
+ async function performSuspendedStatusMigration(db, tablePrefix) {
92
+ await db.execute("BEGIN TRANSACTION");
93
+ try {
94
+ await db.execute(`
95
+ CREATE TABLE ${tablePrefix}_workflow_history_temp (
96
+ id TEXT PRIMARY KEY,
97
+ name TEXT NOT NULL,
98
+ workflow_id TEXT NOT NULL,
99
+ status TEXT NOT NULL CHECK (status IN ('running', 'completed', 'error', 'cancelled', 'suspended')),
100
+ start_time TEXT NOT NULL,
101
+ end_time TEXT,
102
+ input TEXT,
103
+ output TEXT,
104
+ user_id TEXT,
105
+ conversation_id TEXT,
106
+ metadata TEXT,
107
+ created_at TEXT DEFAULT CURRENT_TIMESTAMP,
108
+ updated_at TEXT DEFAULT CURRENT_TIMESTAMP
109
+ )
110
+ `);
111
+ await db.execute(`
112
+ INSERT INTO ${tablePrefix}_workflow_history_temp
113
+ SELECT * FROM ${tablePrefix}_workflow_history
114
+ `);
115
+ await db.execute(`DROP TABLE ${tablePrefix}_workflow_history`);
116
+ await db.execute(`
117
+ ALTER TABLE ${tablePrefix}_workflow_history_temp
118
+ RENAME TO ${tablePrefix}_workflow_history
119
+ `);
120
+ await db.execute(
121
+ `CREATE INDEX idx_${tablePrefix}_workflow_history_workflow_id ON ${tablePrefix}_workflow_history(workflow_id)`
122
+ );
123
+ await db.execute(
124
+ `CREATE INDEX idx_${tablePrefix}_workflow_history_status ON ${tablePrefix}_workflow_history(status)`
125
+ );
126
+ await db.execute(
127
+ `CREATE INDEX idx_${tablePrefix}_workflow_history_start_time ON ${tablePrefix}_workflow_history(start_time)`
128
+ );
129
+ await db.execute(
130
+ `CREATE INDEX idx_${tablePrefix}_workflow_history_user_id ON ${tablePrefix}_workflow_history(user_id)`
131
+ );
132
+ await db.execute(
133
+ `CREATE INDEX idx_${tablePrefix}_workflow_history_conversation_id ON ${tablePrefix}_workflow_history(conversation_id)`
134
+ );
135
+ await db.execute("COMMIT");
136
+ } catch (error) {
137
+ await db.execute("ROLLBACK");
138
+ throw error;
139
+ }
140
+ }
141
+ __name(performSuspendedStatusMigration, "performSuspendedStatusMigration");
142
+
143
+ // src/migrations/workflow-tables.ts
144
+ async function createWorkflowTables(db, tablePrefix = "voltagent_memory") {
145
+ await db.execute(`
146
+ CREATE TABLE IF NOT EXISTS ${tablePrefix}_workflow_history (
147
+ id TEXT PRIMARY KEY,
148
+ name TEXT NOT NULL,
149
+ workflow_id TEXT NOT NULL,
150
+ status TEXT NOT NULL CHECK (status IN ('running', 'completed', 'error', 'cancelled', 'suspended')),
151
+ start_time TEXT NOT NULL,
152
+ end_time TEXT,
153
+ input TEXT,
154
+ output TEXT,
155
+ user_id TEXT,
156
+ conversation_id TEXT,
157
+ metadata TEXT,
158
+ created_at TEXT DEFAULT CURRENT_TIMESTAMP,
159
+ updated_at TEXT DEFAULT CURRENT_TIMESTAMP
160
+ )
161
+ `);
162
+ await db.execute(
163
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_history_workflow_id ON ${tablePrefix}_workflow_history(workflow_id)`
164
+ );
165
+ await db.execute(
166
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_history_status ON ${tablePrefix}_workflow_history(status)`
167
+ );
168
+ await db.execute(
169
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_history_start_time ON ${tablePrefix}_workflow_history(start_time)`
170
+ );
171
+ await db.execute(
172
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_history_user_id ON ${tablePrefix}_workflow_history(user_id)`
173
+ );
174
+ await db.execute(
175
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_history_conversation_id ON ${tablePrefix}_workflow_history(conversation_id)`
176
+ );
177
+ await db.execute(`
178
+ CREATE TABLE IF NOT EXISTS ${tablePrefix}_workflow_steps (
179
+ id TEXT PRIMARY KEY,
180
+ workflow_history_id TEXT NOT NULL,
181
+ step_index INTEGER NOT NULL,
182
+ step_type TEXT NOT NULL,
183
+ step_name TEXT NOT NULL,
184
+ step_id TEXT,
185
+ status TEXT NOT NULL CHECK (status IN ('running', 'completed', 'error', 'skipped')),
186
+ start_time TEXT NOT NULL,
187
+ end_time TEXT,
188
+ input TEXT,
189
+ output TEXT,
190
+ error_message TEXT,
191
+ agent_execution_id TEXT,
192
+ parallel_index INTEGER,
193
+ parent_step_id TEXT,
194
+ metadata TEXT,
195
+ created_at TEXT DEFAULT CURRENT_TIMESTAMP,
196
+ updated_at TEXT DEFAULT CURRENT_TIMESTAMP
197
+ )
198
+ `);
199
+ await db.execute(
200
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_steps_workflow_history ON ${tablePrefix}_workflow_steps(workflow_history_id)`
201
+ );
202
+ await db.execute(
203
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_steps_agent_execution ON ${tablePrefix}_workflow_steps(agent_execution_id)`
204
+ );
205
+ await db.execute(
206
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_steps_step_index ON ${tablePrefix}_workflow_steps(workflow_history_id, step_index)`
207
+ );
208
+ await db.execute(
209
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_steps_parallel ON ${tablePrefix}_workflow_steps(parent_step_id, parallel_index)`
210
+ );
211
+ await db.execute(`
212
+ CREATE TABLE IF NOT EXISTS ${tablePrefix}_workflow_timeline_events (
213
+ id TEXT PRIMARY KEY,
214
+ workflow_history_id TEXT NOT NULL,
215
+ event_id TEXT NOT NULL,
216
+ name TEXT NOT NULL,
217
+ type TEXT NOT NULL CHECK (type IN ('workflow', 'workflow-step')),
218
+ start_time TEXT NOT NULL,
219
+ end_time TEXT,
220
+ status TEXT NOT NULL,
221
+ level TEXT DEFAULT 'INFO',
222
+ input TEXT,
223
+ output TEXT,
224
+ status_message TEXT,
225
+ metadata TEXT,
226
+ trace_id TEXT,
227
+ parent_event_id TEXT,
228
+ event_sequence INTEGER,
229
+ created_at TEXT DEFAULT CURRENT_TIMESTAMP
230
+ )
231
+ `);
232
+ await db.execute(
233
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_timeline_events_workflow_history ON ${tablePrefix}_workflow_timeline_events(workflow_history_id)`
234
+ );
235
+ await db.execute(
236
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_timeline_events_trace ON ${tablePrefix}_workflow_timeline_events(trace_id)`
237
+ );
238
+ await db.execute(
239
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_timeline_events_parent ON ${tablePrefix}_workflow_timeline_events(parent_event_id)`
240
+ );
241
+ await db.execute(
242
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_timeline_events_type ON ${tablePrefix}_workflow_timeline_events(type)`
243
+ );
244
+ await db.execute(
245
+ `CREATE INDEX IF NOT EXISTS idx_${tablePrefix}_workflow_timeline_events_sequence ON ${tablePrefix}_workflow_timeline_events(event_sequence)`
246
+ );
247
+ const checkWorkflowIdColumn = await db.execute(`
248
+ SELECT COUNT(*) as count
249
+ FROM pragma_table_info('agent_history')
250
+ WHERE name = 'workflow_id'
251
+ `);
252
+ if (checkWorkflowIdColumn.rows[0].count === 0) {
253
+ await db.execute("ALTER TABLE agent_history ADD COLUMN workflow_id TEXT");
254
+ }
255
+ const checkWorkflowStepIdColumn = await db.execute(`
256
+ SELECT COUNT(*) as count
257
+ FROM pragma_table_info('agent_history')
258
+ WHERE name = 'workflow_step_id'
259
+ `);
260
+ if (checkWorkflowStepIdColumn.rows[0].count === 0) {
261
+ await db.execute("ALTER TABLE agent_history ADD COLUMN workflow_step_id TEXT");
262
+ }
263
+ await db.execute(
264
+ "CREATE INDEX IF NOT EXISTS idx_agent_history_workflow_id ON agent_history(workflow_id)"
265
+ );
266
+ await db.execute(
267
+ "CREATE INDEX IF NOT EXISTS idx_agent_history_workflow_step ON agent_history(workflow_step_id)"
268
+ );
269
+ }
270
+ __name(createWorkflowTables, "createWorkflowTables");
271
+
272
+ // src/workflow-extension.ts
273
+ var import_utils = require("@voltagent/internal/utils");
274
+ var import_logger = require("@voltagent/logger");
275
+ var LibSQLWorkflowExtension = class {
276
+ constructor(client, _tablePrefix = "voltagent_memory", logger) {
277
+ this.client = client;
278
+ this._tablePrefix = _tablePrefix;
279
+ this.logger = logger || (0, import_logger.createPinoLogger)({ name: "libsql-workflow" });
280
+ }
281
+ static {
282
+ __name(this, "LibSQLWorkflowExtension");
283
+ }
284
+ logger;
285
+ /**
286
+ * Store a workflow history entry
287
+ */
288
+ async storeWorkflowHistory(entry) {
289
+ await this.client.execute({
290
+ sql: `
291
+ INSERT INTO ${this._tablePrefix}_workflow_history (
292
+ id, name, workflow_id, status, start_time, end_time,
293
+ input, output, user_id, conversation_id, metadata, created_at, updated_at
294
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
295
+ `,
296
+ args: [
297
+ entry.id,
298
+ entry.workflowName,
299
+ entry.workflowId,
300
+ entry.status,
301
+ entry.startTime.toISOString(),
302
+ entry.endTime?.toISOString() || null,
303
+ (0, import_utils.safeStringify)(entry.input),
304
+ entry.output ? (0, import_utils.safeStringify)(entry.output) : null,
305
+ entry.userId || null,
306
+ entry.conversationId || null,
307
+ entry.metadata ? (0, import_utils.safeStringify)(entry.metadata) : null,
308
+ entry.createdAt?.toISOString() || (/* @__PURE__ */ new Date()).toISOString(),
309
+ entry.updatedAt?.toISOString() || (/* @__PURE__ */ new Date()).toISOString()
310
+ ]
311
+ });
312
+ }
313
+ /**
314
+ * Get a workflow history entry by ID
315
+ */
316
+ async getWorkflowHistory(id) {
317
+ const result = await this.client.execute({
318
+ sql: `SELECT * FROM ${this._tablePrefix}_workflow_history WHERE id = ?`,
319
+ args: [id]
320
+ });
321
+ if (result.rows.length === 0) return null;
322
+ return this.parseWorkflowHistoryRow(result.rows[0]);
323
+ }
324
+ /**
325
+ * Get all workflow history entries for a specific workflow
326
+ */
327
+ async getWorkflowHistoryByWorkflowId(workflowId) {
328
+ const result = await this.client.execute({
329
+ sql: `SELECT * FROM ${this._tablePrefix}_workflow_history WHERE workflow_id = ? ORDER BY start_time DESC`,
330
+ args: [workflowId]
331
+ });
332
+ return result.rows.map((row) => this.parseWorkflowHistoryRow(row));
333
+ }
334
+ /**
335
+ * Update a workflow history entry
336
+ */
337
+ async updateWorkflowHistory(id, updates) {
338
+ this.logger.trace(`Updating workflow history ${id}`, {
339
+ status: updates.status,
340
+ hasMetadata: !!updates.metadata,
341
+ hasSuspension: !!updates.metadata?.suspension
342
+ });
343
+ const setClauses = [];
344
+ const args = [];
345
+ if (updates.status !== void 0) {
346
+ setClauses.push("status = ?");
347
+ args.push(updates.status);
348
+ }
349
+ if (updates.endTime !== void 0) {
350
+ setClauses.push("end_time = ?");
351
+ args.push(updates.endTime.toISOString());
352
+ }
353
+ if (updates.output !== void 0) {
354
+ setClauses.push("output = ?");
355
+ args.push((0, import_utils.safeStringify)(updates.output));
356
+ }
357
+ if (updates.userId !== void 0) {
358
+ setClauses.push("user_id = ?");
359
+ args.push(updates.userId);
360
+ }
361
+ if (updates.conversationId !== void 0) {
362
+ setClauses.push("conversation_id = ?");
363
+ args.push(updates.conversationId);
364
+ }
365
+ if (updates.metadata !== void 0) {
366
+ setClauses.push("metadata = ?");
367
+ const metadataJson = (0, import_utils.safeStringify)(updates.metadata);
368
+ args.push(metadataJson);
369
+ this.logger.trace(`Setting metadata for ${id}:`, { metadata: metadataJson });
370
+ }
371
+ setClauses.push("updated_at = ?");
372
+ args.push((/* @__PURE__ */ new Date()).toISOString());
373
+ args.push(id);
374
+ const sql = `UPDATE ${this._tablePrefix}_workflow_history SET ${setClauses.join(", ")} WHERE id = ?`;
375
+ this.logger.trace("Executing SQL:", { sql, args });
376
+ try {
377
+ const result = await this.client.execute({ sql, args });
378
+ this.logger.trace(
379
+ `Successfully updated workflow history ${id}, rows affected: ${result.rowsAffected}`
380
+ );
381
+ } catch (error) {
382
+ this.logger.error(`Failed to update workflow history ${id}:`, { error });
383
+ throw error;
384
+ }
385
+ }
386
+ /**
387
+ * Delete a workflow history entry
388
+ */
389
+ async deleteWorkflowHistory(id) {
390
+ await this.client.execute({
391
+ sql: `DELETE FROM ${this._tablePrefix}_workflow_history WHERE id = ?`,
392
+ args: [id]
393
+ });
394
+ }
395
+ /**
396
+ * Store a workflow step entry
397
+ */
398
+ async storeWorkflowStep(step) {
399
+ await this.client.execute({
400
+ sql: `
401
+ INSERT INTO ${this._tablePrefix}_workflow_steps (
402
+ id, workflow_history_id, step_index, step_type, step_name, step_id,
403
+ status, start_time, end_time, input, output, error_message,
404
+ agent_execution_id, parallel_index, parent_step_id, metadata,
405
+ created_at, updated_at
406
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
407
+ `,
408
+ args: [
409
+ step.id,
410
+ step.workflowHistoryId,
411
+ step.stepIndex,
412
+ step.stepType,
413
+ step.stepName,
414
+ step.stepId || null,
415
+ step.status,
416
+ step.startTime.toISOString(),
417
+ step.endTime?.toISOString() || null,
418
+ step.input ? (0, import_utils.safeStringify)(step.input) : null,
419
+ step.output ? (0, import_utils.safeStringify)(step.output) : null,
420
+ step.error ? (0, import_utils.safeStringify)(step.error) : null,
421
+ step.agentExecutionId || null,
422
+ step.parallelIndex || null,
423
+ step.parallelParentStepId || null,
424
+ step.metadata ? (0, import_utils.safeStringify)(step.metadata) : null,
425
+ step.createdAt?.toISOString() || (/* @__PURE__ */ new Date()).toISOString(),
426
+ step.updatedAt?.toISOString() || (/* @__PURE__ */ new Date()).toISOString()
427
+ ]
428
+ });
429
+ }
430
+ /**
431
+ * Get a workflow step by ID
432
+ */
433
+ async getWorkflowStep(id) {
434
+ const result = await this.client.execute({
435
+ sql: `SELECT * FROM ${this._tablePrefix}_workflow_steps WHERE id = ?`,
436
+ args: [id]
437
+ });
438
+ if (result.rows.length === 0) return null;
439
+ return this.parseWorkflowStepRow(result.rows[0]);
440
+ }
441
+ /**
442
+ * Get all workflow steps for a specific workflow history
443
+ */
444
+ async getWorkflowSteps(workflowHistoryId) {
445
+ const result = await this.client.execute({
446
+ sql: `SELECT * FROM ${this._tablePrefix}_workflow_steps WHERE workflow_history_id = ? ORDER BY step_index ASC`,
447
+ args: [workflowHistoryId]
448
+ });
449
+ return result.rows.map((row) => this.parseWorkflowStepRow(row));
450
+ }
451
+ /**
452
+ * Update a workflow step
453
+ */
454
+ async updateWorkflowStep(id, updates) {
455
+ const setClauses = [];
456
+ const args = [];
457
+ if (updates.status !== void 0) {
458
+ setClauses.push("status = ?");
459
+ args.push(updates.status);
460
+ }
461
+ if (updates.endTime !== void 0) {
462
+ setClauses.push("end_time = ?");
463
+ args.push(updates.endTime.toISOString());
464
+ }
465
+ if (updates.output !== void 0) {
466
+ setClauses.push("output = ?");
467
+ args.push((0, import_utils.safeStringify)(updates.output));
468
+ }
469
+ if (updates.error !== void 0) {
470
+ setClauses.push("error_message = ?");
471
+ args.push((0, import_utils.safeStringify)(updates.error));
472
+ }
473
+ if (updates.agentExecutionId !== void 0) {
474
+ setClauses.push("agent_execution_id = ?");
475
+ args.push(updates.agentExecutionId);
476
+ }
477
+ if (updates.metadata !== void 0) {
478
+ setClauses.push("metadata = ?");
479
+ args.push((0, import_utils.safeStringify)(updates.metadata));
480
+ }
481
+ setClauses.push("updated_at = ?");
482
+ args.push((/* @__PURE__ */ new Date()).toISOString());
483
+ args.push(id);
484
+ await this.client.execute({
485
+ sql: `UPDATE ${this._tablePrefix}_workflow_steps SET ${setClauses.join(", ")} WHERE id = ?`,
486
+ args
487
+ });
488
+ }
489
+ /**
490
+ * Delete a workflow step
491
+ */
492
+ async deleteWorkflowStep(id) {
493
+ await this.client.execute({
494
+ sql: `DELETE FROM ${this._tablePrefix}_workflow_steps WHERE id = ?`,
495
+ args: [id]
496
+ });
497
+ }
498
+ /**
499
+ * Store a workflow timeline event
500
+ */
501
+ async storeWorkflowTimelineEvent(event) {
502
+ await this.client.execute({
503
+ sql: `
504
+ INSERT INTO ${this._tablePrefix}_workflow_timeline_events (
505
+ id, workflow_history_id, event_id, name, type,
506
+ start_time, end_time, status, level, input, output,
507
+ status_message, metadata, trace_id, parent_event_id, event_sequence, created_at
508
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
509
+ `,
510
+ args: [
511
+ event.id,
512
+ event.workflowHistoryId,
513
+ event.eventId,
514
+ event.name,
515
+ event.type,
516
+ event.startTime,
517
+ event.endTime || null,
518
+ event.status,
519
+ event.level || "INFO",
520
+ event.input ? (0, import_utils.safeStringify)(event.input) : null,
521
+ event.output ? (0, import_utils.safeStringify)(event.output) : null,
522
+ event.statusMessage ? (0, import_utils.safeStringify)(event.statusMessage) : null,
523
+ event.metadata ? (0, import_utils.safeStringify)(event.metadata) : null,
524
+ event.traceId || null,
525
+ event.parentEventId || null,
526
+ event.eventSequence || null,
527
+ // Event sequence for ordering
528
+ event.createdAt.toISOString()
529
+ ]
530
+ });
531
+ }
532
+ /**
533
+ * Get a workflow timeline event by ID
534
+ */
535
+ async getWorkflowTimelineEvent(id) {
536
+ const result = await this.client.execute({
537
+ sql: `SELECT * FROM ${this._tablePrefix}_workflow_timeline_events WHERE id = ?`,
538
+ args: [id]
539
+ });
540
+ if (result.rows.length === 0) return null;
541
+ return this.parseWorkflowTimelineEventRow(result.rows[0]);
542
+ }
543
+ /**
544
+ * Get all workflow timeline events for a specific workflow history
545
+ */
546
+ async getWorkflowTimelineEvents(workflowHistoryId) {
547
+ const result = await this.client.execute({
548
+ sql: `SELECT * FROM ${this._tablePrefix}_workflow_timeline_events WHERE workflow_history_id = ? ORDER BY event_sequence ASC, start_time ASC`,
549
+ args: [workflowHistoryId]
550
+ });
551
+ return result.rows.map((row) => this.parseWorkflowTimelineEventRow(row));
552
+ }
553
+ /**
554
+ * Delete a workflow timeline event
555
+ */
556
+ async deleteWorkflowTimelineEvent(id) {
557
+ await this.client.execute({
558
+ sql: `DELETE FROM ${this._tablePrefix}_workflow_timeline_events WHERE id = ?`,
559
+ args: [id]
560
+ });
561
+ }
562
+ /**
563
+ * Get all workflow IDs
564
+ */
565
+ async getAllWorkflowIds() {
566
+ const result = await this.client.execute({
567
+ sql: `SELECT DISTINCT workflow_id FROM ${this._tablePrefix}_workflow_history`,
568
+ args: []
569
+ });
570
+ return result.rows.map((row) => row.workflow_id);
571
+ }
572
+ /**
573
+ * Get workflow statistics
574
+ */
575
+ async getWorkflowStats(workflowId) {
576
+ const result = await this.client.execute({
577
+ sql: `
578
+ SELECT
579
+ COUNT(*) as total_executions,
580
+ SUM(CASE WHEN status = 'completed' THEN 1 ELSE 0 END) as successful_executions,
581
+ SUM(CASE WHEN status = 'error' THEN 1 ELSE 0 END) as failed_executions,
582
+ AVG(CASE WHEN end_time IS NOT NULL THEN
583
+ (julianday(end_time) - julianday(start_time)) * 24 * 60 * 60 * 1000
584
+ ELSE NULL END) as avg_duration_ms,
585
+ MAX(start_time) as last_execution_time
586
+ FROM ${this._tablePrefix}_workflow_history
587
+ WHERE workflow_id = ?
588
+ `,
589
+ args: [workflowId]
590
+ });
591
+ if (result.rows.length === 0) {
592
+ return {
593
+ totalExecutions: 0,
594
+ successfulExecutions: 0,
595
+ failedExecutions: 0,
596
+ averageExecutionTime: 0,
597
+ lastExecutionTime: void 0
598
+ };
599
+ }
600
+ const row = result.rows[0];
601
+ return {
602
+ totalExecutions: Number(row.total_executions) || 0,
603
+ successfulExecutions: Number(row.successful_executions) || 0,
604
+ failedExecutions: Number(row.failed_executions) || 0,
605
+ averageExecutionTime: Number(row.avg_duration_ms) || 0,
606
+ lastExecutionTime: row.last_execution_time ? new Date(row.last_execution_time) : void 0
607
+ };
608
+ }
609
+ /**
610
+ * Get workflow history with all related data (steps and events)
611
+ */
612
+ async getWorkflowHistoryWithStepsAndEvents(id) {
613
+ const history = await this.getWorkflowHistory(id);
614
+ if (!history) return null;
615
+ const [steps, events] = await Promise.all([
616
+ this.getWorkflowSteps(id),
617
+ this.getWorkflowTimelineEvents(id)
618
+ ]);
619
+ history.steps = steps;
620
+ history.events = events;
621
+ return history;
622
+ }
623
+ /**
624
+ * Delete workflow history and all related data
625
+ */
626
+ async deleteWorkflowHistoryWithRelated(id) {
627
+ await this.deleteWorkflowHistory(id);
628
+ }
629
+ /**
630
+ * Clean up old workflow histories
631
+ */
632
+ async cleanupOldWorkflowHistories(workflowId, maxEntries) {
633
+ const countResult = await this.client.execute({
634
+ sql: `SELECT COUNT(*) as count FROM ${this._tablePrefix}_workflow_history WHERE workflow_id = ?`,
635
+ args: [workflowId]
636
+ });
637
+ const currentCount = Number(countResult.rows[0].count);
638
+ if (currentCount <= maxEntries) return 0;
639
+ const deleteCount = currentCount - maxEntries;
640
+ const deleteResult = await this.client.execute({
641
+ sql: `
642
+ DELETE FROM ${this._tablePrefix}_workflow_history
643
+ WHERE workflow_id = ?
644
+ AND id IN (
645
+ SELECT id FROM ${this._tablePrefix}_workflow_history
646
+ WHERE workflow_id = ?
647
+ ORDER BY start_time ASC
648
+ LIMIT ?
649
+ )
650
+ `,
651
+ args: [workflowId, workflowId, deleteCount]
652
+ });
653
+ return deleteResult.rowsAffected;
654
+ }
655
+ /**
656
+ * Parse workflow history row from database
657
+ */
658
+ parseWorkflowHistoryRow(row) {
659
+ return {
660
+ id: row.id,
661
+ workflowName: row.name,
662
+ workflowId: row.workflow_id,
663
+ status: row.status,
664
+ startTime: new Date(row.start_time),
665
+ endTime: row.end_time ? new Date(row.end_time) : void 0,
666
+ input: row.input ? JSON.parse(row.input) : null,
667
+ output: row.output ? JSON.parse(row.output) : void 0,
668
+ userId: row.user_id,
669
+ conversationId: row.conversation_id,
670
+ metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
671
+ steps: [],
672
+ // Will be loaded separately if needed
673
+ events: [],
674
+ // Will be loaded separately if needed
675
+ createdAt: new Date(row.created_at),
676
+ updatedAt: new Date(row.updated_at)
677
+ };
678
+ }
679
+ /**
680
+ * Parse workflow step row from database
681
+ */
682
+ parseWorkflowStepRow(row) {
683
+ return {
684
+ id: row.id,
685
+ workflowHistoryId: row.workflow_history_id,
686
+ stepIndex: Number(row.step_index),
687
+ stepType: row.step_type,
688
+ stepName: row.step_name,
689
+ stepId: row.step_id || void 0,
690
+ status: row.status,
691
+ startTime: new Date(row.start_time),
692
+ endTime: row.end_time ? new Date(row.end_time) : void 0,
693
+ input: row.input ? JSON.parse(row.input) : void 0,
694
+ output: row.output ? JSON.parse(row.output) : void 0,
695
+ error: row.error_message ? JSON.parse(row.error_message) : void 0,
696
+ agentExecutionId: row.agent_execution_id || void 0,
697
+ parallelIndex: row.parallel_index ? Number(row.parallel_index) : void 0,
698
+ parallelParentStepId: row.parent_step_id || void 0,
699
+ metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
700
+ createdAt: new Date(row.created_at),
701
+ updatedAt: new Date(row.updated_at)
702
+ };
703
+ }
704
+ /**
705
+ * Parse workflow timeline event row from database
706
+ */
707
+ parseWorkflowTimelineEventRow(row) {
708
+ return {
709
+ id: row.id,
710
+ workflowHistoryId: row.workflow_history_id,
711
+ eventId: row.event_id,
712
+ name: row.name,
713
+ type: row.type,
714
+ startTime: row.start_time,
715
+ endTime: row.end_time ? row.end_time : void 0,
716
+ status: row.status,
717
+ level: row.level || void 0,
718
+ input: row.input ? JSON.parse(row.input) : void 0,
719
+ output: row.output ? JSON.parse(row.output) : void 0,
720
+ statusMessage: row.status_message ? JSON.parse(row.status_message) : void 0,
721
+ metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
722
+ traceId: row.trace_id || void 0,
723
+ parentEventId: row.parent_event_id || void 0,
724
+ eventSequence: Number(row.event_sequence),
725
+ createdAt: new Date(row.created_at)
726
+ };
727
+ }
728
+ };
729
+
730
+ // src/index.ts
731
+ async function debugDelay() {
732
+ const min = 0;
733
+ const max = 0;
734
+ const delay = Math.floor(Math.random() * (max - min + 1)) + min;
735
+ return new Promise((resolve) => setTimeout(resolve, delay));
736
+ }
737
+ __name(debugDelay, "debugDelay");
738
+ var LibSQLStorage = class {
739
+ static {
740
+ __name(this, "LibSQLStorage");
741
+ }
742
+ client;
743
+ options;
744
+ initialized;
745
+ workflowExtension;
746
+ logger;
747
+ retryAttempts;
748
+ baseDelayMs;
749
+ /**
750
+ * Create a new LibSQL storage
751
+ * @param options Configuration options
752
+ */
753
+ constructor(options) {
754
+ this.logger = options.logger || (0, import_logger2.createPinoLogger)({ name: "libsql-storage" });
755
+ this.retryAttempts = options.retryAttempts ?? 3;
756
+ this.baseDelayMs = options.baseDelayMs ?? 50;
757
+ this.options = {
758
+ storageLimit: options.storageLimit || 100,
759
+ tablePrefix: options.tablePrefix || "voltagent_memory",
760
+ debug: options.debug || false,
761
+ url: options.url || "file:./.voltagent/memory.db",
762
+ authToken: options.authToken,
763
+ retryAttempts: this.retryAttempts,
764
+ baseDelayMs: this.baseDelayMs
765
+ };
766
+ if (this.options.url.startsWith("file:") && !this.options.url.includes(":memory:")) {
767
+ const filePath = this.options.url.substring(5);
768
+ const dir = (0, import_node_path.dirname)(filePath);
769
+ if (dir && dir !== "." && !(0, import_node_fs.existsSync)(dir)) {
770
+ try {
771
+ (0, import_node_fs.mkdirSync)(dir, { recursive: true });
772
+ this.debug("Created directory for database", { dir });
773
+ } catch (error) {
774
+ this.logger.warn("Failed to create directory for database", { dir, error });
775
+ }
776
+ }
777
+ }
778
+ this.client = (0, import_client.createClient)({
779
+ url: this.options.url,
780
+ authToken: this.options.authToken
781
+ });
782
+ this.debug("LibSQL storage provider initialized with options", this.options);
783
+ this.workflowExtension = new LibSQLWorkflowExtension(
784
+ this.client,
785
+ this.options.tablePrefix,
786
+ this.logger
787
+ );
788
+ this.initialized = this.initializeDatabase();
789
+ }
790
+ /**
791
+ * Log a debug message if debug is enabled
792
+ * @param message Message to log
793
+ * @param data Additional data to log
794
+ */
795
+ debug(message, data) {
796
+ if (this.options?.debug) {
797
+ this.logger.debug(`${message}`, data || "");
798
+ }
799
+ }
800
+ /**
801
+ * Calculate delay with jitter for better load distribution
802
+ * @param attempt Current retry attempt number
803
+ * @returns Delay in milliseconds
804
+ */
805
+ calculateRetryDelay(attempt) {
806
+ const exponentialDelay = this.baseDelayMs * 2 ** (attempt - 1);
807
+ const jitterFactor = 0.2 + Math.random() * 0.2;
808
+ const delayWithJitter = exponentialDelay * (1 + jitterFactor);
809
+ return Math.min(delayWithJitter, 2e3);
810
+ }
811
+ /**
812
+ * Execute a database operation with retry strategy
813
+ * Implements jittered exponential backoff
814
+ * @param operationFn The operation function to execute
815
+ * @param operationName Operation name for logging
816
+ * @returns The result of the operation
817
+ */
818
+ async executeWithRetryStrategy(operationFn, operationName) {
819
+ let attempt = 0;
820
+ while (attempt < this.retryAttempts) {
821
+ attempt++;
822
+ try {
823
+ return await operationFn();
824
+ } catch (error) {
825
+ const isBusyError = error.message && (error.message.includes("SQLITE_BUSY") || error.message.includes("database is locked") || error.code === "SQLITE_BUSY");
826
+ if (!isBusyError || attempt >= this.retryAttempts) {
827
+ this.debug(`Operation failed: ${operationName}`, {
828
+ attempt,
829
+ error: error.message
830
+ });
831
+ throw error;
832
+ }
833
+ const delay = this.calculateRetryDelay(attempt);
834
+ this.debug(`Retrying ${operationName}`, {
835
+ attempt,
836
+ remainingAttempts: this.retryAttempts - attempt,
837
+ delay
838
+ });
839
+ await new Promise((resolve) => setTimeout(resolve, delay));
840
+ }
841
+ }
842
+ throw new Error(`Max retry attempts (${this.retryAttempts}) exceeded for ${operationName}`);
843
+ }
844
+ /**
845
+ * Initialize workflow tables
846
+ */
847
+ async initializeWorkflowTables() {
848
+ try {
849
+ await createWorkflowTables(this.client, this.options.tablePrefix);
850
+ this.debug("Workflow tables initialized successfully");
851
+ await addSuspendedStatusMigration(this.client, this.options.tablePrefix);
852
+ this.debug("Workflow migrations applied successfully");
853
+ } catch (error) {
854
+ this.debug("Error initializing workflow tables:", error);
855
+ }
856
+ }
857
+ /**
858
+ * Initialize the database tables
859
+ * @returns Promise that resolves when initialization is complete
860
+ */
861
+ async initializeDatabase() {
862
+ if (this.options.url.startsWith("file:") || this.options.url.includes(":memory:")) {
863
+ try {
864
+ await this.client.execute("PRAGMA journal_mode=WAL;");
865
+ this.debug("PRAGMA journal_mode=WAL set.");
866
+ } catch (err) {
867
+ this.debug("Failed to set PRAGMA journal_mode=WAL.", err);
868
+ }
869
+ try {
870
+ await this.client.execute("PRAGMA busy_timeout = 5000;");
871
+ this.debug("PRAGMA busy_timeout=5000 set.");
872
+ } catch (err) {
873
+ this.debug("Failed to set PRAGMA busy_timeout.", err);
874
+ }
875
+ }
876
+ const conversationsTableName = `${this.options.tablePrefix}_conversations`;
877
+ await this.client.execute(`
878
+ CREATE TABLE IF NOT EXISTS ${conversationsTableName} (
879
+ id TEXT PRIMARY KEY,
880
+ resource_id TEXT NOT NULL,
881
+ user_id TEXT NOT NULL,
882
+ title TEXT NOT NULL,
883
+ metadata TEXT NOT NULL,
884
+ created_at TEXT NOT NULL,
885
+ updated_at TEXT NOT NULL
886
+ )
887
+ `);
888
+ const messagesTableName = `${this.options.tablePrefix}_messages`;
889
+ await this.client.execute(`
890
+ CREATE TABLE IF NOT EXISTS ${messagesTableName} (
891
+ conversation_id TEXT NOT NULL,
892
+ message_id TEXT NOT NULL,
893
+ role TEXT NOT NULL,
894
+ content TEXT NOT NULL,
895
+ type TEXT NOT NULL,
896
+ created_at TEXT NOT NULL,
897
+ PRIMARY KEY (conversation_id, message_id)
898
+ )
899
+ `);
900
+ const historyTableName = `${this.options.tablePrefix}_agent_history`;
901
+ await this.client.execute(`
902
+ CREATE TABLE IF NOT EXISTS ${historyTableName} (
903
+ id TEXT PRIMARY KEY,
904
+ agent_id TEXT NOT NULL,
905
+ timestamp TEXT NOT NULL,
906
+ status TEXT,
907
+ input TEXT,
908
+ output TEXT,
909
+ usage TEXT,
910
+ metadata TEXT,
911
+ userId TEXT,
912
+ conversationId TEXT
913
+ )
914
+ `);
915
+ const historyStepsTableName = `${this.options.tablePrefix}_agent_history_steps`;
916
+ await this.client.execute(`
917
+ CREATE TABLE IF NOT EXISTS ${historyStepsTableName} (
918
+ key TEXT PRIMARY KEY,
919
+ value TEXT NOT NULL,
920
+ history_id TEXT NOT NULL,
921
+ agent_id TEXT
922
+ )
923
+ `);
924
+ const timelineEventsTableName = `${this.options.tablePrefix}_agent_history_timeline_events`;
925
+ await this.client.execute(`
926
+ CREATE TABLE IF NOT EXISTS ${timelineEventsTableName} (
927
+ id TEXT PRIMARY KEY,
928
+ history_id TEXT NOT NULL,
929
+ agent_id TEXT,
930
+ event_type TEXT NOT NULL,
931
+ event_name TEXT NOT NULL,
932
+ start_time TEXT NOT NULL,
933
+ end_time TEXT,
934
+ status TEXT,
935
+ status_message TEXT,
936
+ level TEXT,
937
+ version TEXT,
938
+ parent_event_id TEXT,
939
+ tags TEXT,
940
+ input TEXT,
941
+ output TEXT,
942
+ error TEXT,
943
+ metadata TEXT
944
+ )
945
+ `);
946
+ await this.client.execute(`
947
+ CREATE INDEX IF NOT EXISTS idx_${messagesTableName}_lookup
948
+ ON ${messagesTableName}(conversation_id, created_at)
949
+ `);
950
+ await this.client.execute(`
951
+ CREATE INDEX IF NOT EXISTS idx_${conversationsTableName}_resource
952
+ ON ${conversationsTableName}(resource_id)
953
+ `);
954
+ try {
955
+ const tableInfo = await this.client.execute(`PRAGMA table_info(${conversationsTableName})`);
956
+ const hasUserIdColumn = tableInfo.rows.some((row) => row.name === "user_id");
957
+ if (hasUserIdColumn) {
958
+ await this.client.execute(`
959
+ CREATE INDEX IF NOT EXISTS idx_${conversationsTableName}_user
960
+ ON ${conversationsTableName}(user_id)
961
+ `);
962
+ }
963
+ } catch (error) {
964
+ this.debug("Error creating user_id index, will be created after migration:", error);
965
+ }
966
+ await this.client.execute(`
967
+ CREATE INDEX IF NOT EXISTS idx_${historyStepsTableName}_history_id
968
+ ON ${historyStepsTableName}(history_id)
969
+ `);
970
+ await this.initializeWorkflowTables();
971
+ await this.client.execute(`
972
+ CREATE INDEX IF NOT EXISTS idx_${historyTableName}_agent_id
973
+ ON ${historyTableName}(agent_id)
974
+ `);
975
+ await this.client.execute(`
976
+ CREATE INDEX IF NOT EXISTS idx_${historyStepsTableName}_agent_id
977
+ ON ${historyStepsTableName}(agent_id)
978
+ `);
979
+ await this.client.execute(`
980
+ CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_history_id
981
+ ON ${timelineEventsTableName}(history_id)
982
+ `);
983
+ await this.client.execute(`
984
+ CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_agent_id
985
+ ON ${timelineEventsTableName}(agent_id)
986
+ `);
987
+ await this.client.execute(`
988
+ CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_event_type
989
+ ON ${timelineEventsTableName}(event_type)
990
+ `);
991
+ await this.client.execute(`
992
+ CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_event_name
993
+ ON ${timelineEventsTableName}(event_name)
994
+ `);
995
+ await this.client.execute(`
996
+ CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_parent_event_id
997
+ ON ${timelineEventsTableName}(parent_event_id)
998
+ `);
999
+ await this.client.execute(`
1000
+ CREATE INDEX IF NOT EXISTS idx_${timelineEventsTableName}_status
1001
+ ON ${timelineEventsTableName}(status)
1002
+ `);
1003
+ this.debug("Database initialized successfully");
1004
+ try {
1005
+ const migrationResult = await this.migrateConversationSchema({
1006
+ createBackup: true,
1007
+ deleteBackupAfterSuccess: true
1008
+ });
1009
+ if (migrationResult.success) {
1010
+ if ((migrationResult.migratedCount || 0) > 0) {
1011
+ this.logger.info(
1012
+ `${migrationResult.migratedCount} conversation records successfully migrated`
1013
+ );
1014
+ }
1015
+ } else {
1016
+ this.logger.error("Conversation migration error:", migrationResult.error);
1017
+ }
1018
+ } catch (error) {
1019
+ this.debug("Error migrating conversation schema:", error);
1020
+ }
1021
+ try {
1022
+ const migrationResult = await this.migrateAgentHistorySchema();
1023
+ if (!migrationResult.success) {
1024
+ this.logger.error("Agent history schema migration error:", migrationResult.error);
1025
+ }
1026
+ } catch (error) {
1027
+ this.debug("Error migrating agent history schema:", error);
1028
+ }
1029
+ try {
1030
+ const result = await this.migrateAgentHistoryData({
1031
+ restoreFromBackup: false
1032
+ });
1033
+ if (result.success) {
1034
+ if ((result.migratedCount || 0) > 0) {
1035
+ this.logger.info(`${result.migratedCount} records successfully migrated`);
1036
+ }
1037
+ } else {
1038
+ this.logger.error("Migration error:", result.error);
1039
+ const restoreResult = await this.migrateAgentHistoryData({});
1040
+ if (restoreResult.success) {
1041
+ this.logger.info("Successfully restored from backup");
1042
+ }
1043
+ }
1044
+ } catch (error) {
1045
+ this.debug("Error initializing database:", error);
1046
+ }
1047
+ }
1048
+ /**
1049
+ * Generate a unique ID for a message
1050
+ * @returns Unique ID
1051
+ */
1052
+ generateId() {
1053
+ return Math.random().toString(36).substring(2, 15) + Math.random().toString(36).substring(2, 15);
1054
+ }
1055
+ /**
1056
+ * Get messages with filtering options
1057
+ * @param options Filtering options
1058
+ * @returns Filtered messages
1059
+ */
1060
+ async getMessages(options = {}) {
1061
+ await this.initialized;
1062
+ await debugDelay();
1063
+ const {
1064
+ userId = "default",
1065
+ conversationId = "default",
1066
+ limit,
1067
+ before,
1068
+ after,
1069
+ role,
1070
+ types
1071
+ } = options;
1072
+ const messagesTableName = `${this.options.tablePrefix}_messages`;
1073
+ const conversationsTableName = `${this.options.tablePrefix}_conversations`;
1074
+ try {
1075
+ let sql = `
1076
+ SELECT m.message_id, m.role, m.content, m.type, m.created_at, m.conversation_id
1077
+ FROM ${messagesTableName} m
1078
+ `;
1079
+ const args = [];
1080
+ const conditions = [];
1081
+ if (userId !== "default") {
1082
+ sql += ` INNER JOIN ${conversationsTableName} c ON m.conversation_id = c.id`;
1083
+ conditions.push("c.user_id = ?");
1084
+ args.push(userId);
1085
+ }
1086
+ if (conversationId !== "default") {
1087
+ conditions.push("m.conversation_id = ?");
1088
+ args.push(conversationId);
1089
+ }
1090
+ if (before) {
1091
+ conditions.push("m.created_at < ?");
1092
+ args.push(new Date(before).toISOString());
1093
+ }
1094
+ if (after) {
1095
+ conditions.push("m.created_at > ?");
1096
+ args.push(new Date(after).toISOString());
1097
+ }
1098
+ if (role) {
1099
+ conditions.push("m.role = ?");
1100
+ args.push(role);
1101
+ }
1102
+ if (types) {
1103
+ const placeholders = types.map(() => "?").join(", ");
1104
+ conditions.push(`m.type IN (${placeholders})`);
1105
+ args.push(...types);
1106
+ }
1107
+ if (conditions.length > 0) {
1108
+ sql += ` WHERE ${conditions.join(" AND ")}`;
1109
+ }
1110
+ if (limit && limit > 0) {
1111
+ sql += " ORDER BY m.created_at DESC LIMIT ?";
1112
+ args.push(limit);
1113
+ } else {
1114
+ sql += " ORDER BY m.created_at ASC";
1115
+ }
1116
+ const result = await this.client.execute({
1117
+ sql,
1118
+ args
1119
+ });
1120
+ const messages = result.rows.map((row) => {
1121
+ let content = row.content;
1122
+ const parsedContent = (0, import_core.safeJsonParse)(content);
1123
+ if (parsedContent !== null) {
1124
+ content = parsedContent;
1125
+ }
1126
+ return {
1127
+ id: row.message_id,
1128
+ role: row.role,
1129
+ content,
1130
+ type: row.type,
1131
+ createdAt: row.created_at
1132
+ };
1133
+ });
1134
+ if (limit && limit > 0) {
1135
+ return messages.reverse();
1136
+ }
1137
+ return messages;
1138
+ } catch (error) {
1139
+ this.debug("Error getting messages:", error);
1140
+ throw new Error("Failed to get messages from LibSQL database");
1141
+ }
1142
+ }
1143
+ /**
1144
+ * Add a message to the conversation history
1145
+ * @param message Message to add
1146
+ * @param userId User identifier (optional, defaults to "default")
1147
+ * @param conversationId Conversation identifier (optional, defaults to "default")
1148
+ */
1149
+ async addMessage(message, conversationId = "default") {
1150
+ await this.initialized;
1151
+ await debugDelay();
1152
+ const tableName = `${this.options.tablePrefix}_messages`;
1153
+ const contentString = (0, import_utils2.safeStringify)(message.content);
1154
+ await this.executeWithRetryStrategy(async () => {
1155
+ await this.client.execute({
1156
+ sql: `INSERT INTO ${tableName} (conversation_id, message_id, role, content, type, created_at)
1157
+ VALUES (?, ?, ?, ?, ?, ?)`,
1158
+ args: [
1159
+ conversationId,
1160
+ message.id,
1161
+ message.role,
1162
+ contentString,
1163
+ message.type,
1164
+ message.createdAt
1165
+ ]
1166
+ });
1167
+ this.debug("Message added successfully", { conversationId, messageId: message.id });
1168
+ try {
1169
+ await this.pruneOldMessages(conversationId);
1170
+ } catch (pruneError) {
1171
+ this.debug("Error pruning old messages:", pruneError);
1172
+ }
1173
+ }, `addMessage[${message.id}]`);
1174
+ }
1175
+ /**
1176
+ * Prune old messages to respect storage limit
1177
+ * @param conversationId Conversation ID to prune messages for
1178
+ */
1179
+ async pruneOldMessages(conversationId) {
1180
+ const limit = this.options.storageLimit || 100;
1181
+ const tableName = `${this.options.tablePrefix}_messages`;
1182
+ try {
1183
+ const countResult = await this.client.execute({
1184
+ sql: `SELECT COUNT(*) as count FROM ${tableName} WHERE conversation_id = ?`,
1185
+ args: [conversationId]
1186
+ });
1187
+ const messageCount = countResult.rows[0]?.count;
1188
+ if (messageCount > limit) {
1189
+ const deleteCount = messageCount - limit;
1190
+ await this.client.execute({
1191
+ sql: `DELETE FROM ${tableName}
1192
+ WHERE conversation_id = ?
1193
+ AND message_id IN (
1194
+ SELECT message_id FROM ${tableName}
1195
+ WHERE conversation_id = ?
1196
+ ORDER BY created_at ASC
1197
+ LIMIT ?
1198
+ )`,
1199
+ args: [conversationId, conversationId, deleteCount]
1200
+ });
1201
+ this.debug(`Pruned ${deleteCount} old messages for conversation ${conversationId}`);
1202
+ }
1203
+ } catch (error) {
1204
+ this.debug("Error pruning old messages:", error);
1205
+ throw error;
1206
+ }
1207
+ }
1208
+ /**
1209
+ * Clear messages from memory
1210
+ */
1211
+ async clearMessages(options) {
1212
+ await this.initialized;
1213
+ await debugDelay();
1214
+ const { userId, conversationId } = options;
1215
+ const messagesTableName = `${this.options.tablePrefix}_messages`;
1216
+ const conversationsTableName = `${this.options.tablePrefix}_conversations`;
1217
+ try {
1218
+ if (conversationId) {
1219
+ await this.client.execute({
1220
+ sql: `DELETE FROM ${messagesTableName}
1221
+ WHERE conversation_id = ?
1222
+ AND conversation_id IN (
1223
+ SELECT id FROM ${conversationsTableName} WHERE user_id = ?
1224
+ )`,
1225
+ args: [conversationId, userId]
1226
+ });
1227
+ this.debug(`Cleared messages for conversation ${conversationId} for user ${userId}`);
1228
+ } else {
1229
+ await this.client.execute({
1230
+ sql: `DELETE FROM ${messagesTableName}
1231
+ WHERE conversation_id IN (
1232
+ SELECT id FROM ${conversationsTableName} WHERE user_id = ?
1233
+ )`,
1234
+ args: [userId]
1235
+ });
1236
+ this.debug(`Cleared all messages for user ${userId}`);
1237
+ }
1238
+ } catch (error) {
1239
+ this.debug("Error clearing messages:", error);
1240
+ throw new Error("Failed to clear messages from LibSQL database");
1241
+ }
1242
+ }
1243
+ /**
1244
+ * Close the database connection
1245
+ */
1246
+ async close() {
1247
+ try {
1248
+ await this.initialized;
1249
+ } catch {
1250
+ }
1251
+ this.client.close();
1252
+ }
1253
+ /**
1254
+ * Add or update a history entry
1255
+ * @param key Entry ID
1256
+ * @param value Entry data
1257
+ * @param agentId Agent ID for filtering
1258
+ */
1259
+ async addHistoryEntry(key, value, agentId) {
1260
+ await this.initialized;
1261
+ try {
1262
+ const tableName = `${this.options.tablePrefix}_agent_history`;
1263
+ const inputJSON = value.input ? (0, import_utils2.safeStringify)(value.input) : null;
1264
+ const outputJSON = value.output ? (0, import_utils2.safeStringify)(value.output) : null;
1265
+ const usageJSON = value.usage ? (0, import_utils2.safeStringify)(value.usage) : null;
1266
+ const metadataJSON = value.metadata ? (0, import_utils2.safeStringify)(value.metadata) : null;
1267
+ await this.client.execute({
1268
+ sql: `INSERT OR REPLACE INTO ${tableName}
1269
+ (id, agent_id, timestamp, status, input, output, usage, metadata, userId, conversationId)
1270
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
1271
+ args: [
1272
+ key,
1273
+ // id
1274
+ agentId,
1275
+ // agent_id
1276
+ value.timestamp ? value.timestamp.toISOString() : (/* @__PURE__ */ new Date()).toISOString(),
1277
+ // timestamp
1278
+ value.status || null,
1279
+ // status
1280
+ inputJSON,
1281
+ // input
1282
+ outputJSON,
1283
+ // output
1284
+ usageJSON,
1285
+ // usage
1286
+ metadataJSON,
1287
+ // metadata
1288
+ value.userId || null,
1289
+ // userId
1290
+ value.conversationId || null
1291
+ // conversationId
1292
+ ]
1293
+ });
1294
+ this.debug(`Set agent_history entry with ID ${key} for agent ${agentId}`);
1295
+ } catch (error) {
1296
+ this.debug("Error setting agent_history entry:", error);
1297
+ throw new Error("Failed to set value in agent_history");
1298
+ }
1299
+ }
1300
+ /**
1301
+ * Update an existing history entry
1302
+ * @param key Entry ID
1303
+ * @param value Updated entry data
1304
+ * @param agentId Agent ID for filtering
1305
+ */
1306
+ async updateHistoryEntry(key, value, agentId) {
1307
+ return this.addHistoryEntry(key, value, agentId);
1308
+ }
1309
+ /**
1310
+ * Add a history step
1311
+ * @param key Step ID
1312
+ * @param value Step data
1313
+ * @param historyId Related history entry ID
1314
+ * @param agentId Agent ID for filtering
1315
+ */
1316
+ async addHistoryStep(key, value, historyId, agentId) {
1317
+ await this.initialized;
1318
+ try {
1319
+ const tableName = `${this.options.tablePrefix}_agent_history_steps`;
1320
+ const serializedValue = (0, import_utils2.safeStringify)(value);
1321
+ await this.client.execute({
1322
+ sql: `INSERT OR REPLACE INTO ${tableName} (key, value, history_id, agent_id) VALUES (?, ?, ?, ?)`,
1323
+ args: [key, serializedValue, historyId, agentId]
1324
+ });
1325
+ this.debug(`Set agent_history_steps:${key} for history ${historyId} and agent ${agentId}`);
1326
+ } catch (error) {
1327
+ this.debug(`Error setting agent_history_steps:${key}`, error);
1328
+ throw new Error("Failed to set value in agent_history_steps");
1329
+ }
1330
+ }
1331
+ /**
1332
+ * Update a history step
1333
+ * @param key Step ID
1334
+ * @param value Updated step data
1335
+ * @param historyId Related history entry ID
1336
+ * @param agentId Agent ID for filtering
1337
+ */
1338
+ async updateHistoryStep(key, value, historyId, agentId) {
1339
+ return this.addHistoryStep(key, value, historyId, agentId);
1340
+ }
1341
+ /**
1342
+ * Add a timeline event
1343
+ * @param key Event ID (UUID)
1344
+ * @param value Timeline event data
1345
+ * @param historyId Related history entry ID
1346
+ * @param agentId Agent ID for filtering
1347
+ */
1348
+ async addTimelineEvent(key, value, historyId, agentId) {
1349
+ await this.initialized;
1350
+ try {
1351
+ const tableName = `${this.options.tablePrefix}_agent_history_timeline_events`;
1352
+ const inputJSON = value.input ? (0, import_utils2.safeStringify)(value.input) : null;
1353
+ const outputJSON = value.output ? (0, import_utils2.safeStringify)(value.output) : null;
1354
+ const statusMessageJSON = value.statusMessage ? (0, import_utils2.safeStringify)(value.statusMessage) : null;
1355
+ const metadataJSON = value.metadata ? (0, import_utils2.safeStringify)(value.metadata) : null;
1356
+ const tagsJSON = value.tags ? (0, import_utils2.safeStringify)(value.tags) : null;
1357
+ await this.client.execute({
1358
+ sql: `INSERT OR REPLACE INTO ${tableName}
1359
+ (id, history_id, agent_id, event_type, event_name,
1360
+ start_time, end_time, status, status_message, level,
1361
+ version, parent_event_id, tags,
1362
+ input, output, error, metadata)
1363
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
1364
+ args: [
1365
+ key,
1366
+ historyId,
1367
+ agentId,
1368
+ value.type,
1369
+ value.name,
1370
+ value.startTime,
1371
+ value.endTime || null,
1372
+ value.status || null,
1373
+ statusMessageJSON || null,
1374
+ value.level || "INFO",
1375
+ value.version || null,
1376
+ value.parentEventId || null,
1377
+ tagsJSON,
1378
+ inputJSON,
1379
+ outputJSON,
1380
+ statusMessageJSON,
1381
+ metadataJSON
1382
+ ]
1383
+ });
1384
+ this.debug(`Added timeline event ${key} for history ${historyId}`);
1385
+ } catch (error) {
1386
+ this.debug("Error adding timeline event:", error);
1387
+ throw new Error("Failed to add timeline event");
1388
+ }
1389
+ }
1390
+ /**
1391
+ * Get a history entry by ID
1392
+ * @param key Entry ID
1393
+ * @returns The history entry or undefined if not found
1394
+ */
1395
+ async getHistoryEntry(key) {
1396
+ await this.initialized;
1397
+ try {
1398
+ const tableName = `${this.options.tablePrefix}_agent_history`;
1399
+ const result = await this.client.execute({
1400
+ sql: `SELECT id, agent_id, timestamp, status, input, output, usage, metadata, userId, conversationId
1401
+ FROM ${tableName} WHERE id = ?`,
1402
+ args: [key]
1403
+ });
1404
+ if (result.rows.length === 0) {
1405
+ this.debug(`History entry with ID ${key} not found`);
1406
+ return void 0;
1407
+ }
1408
+ const row = result.rows[0];
1409
+ const entry = {
1410
+ id: row.id,
1411
+ _agentId: row.agent_id,
1412
+ // Keep _agentId for compatibility
1413
+ timestamp: new Date(row.timestamp),
1414
+ status: row.status,
1415
+ input: row.input ? (0, import_core.safeJsonParse)(row.input) : null,
1416
+ output: row.output ? (0, import_core.safeJsonParse)(row.output) : null,
1417
+ usage: row.usage ? (0, import_core.safeJsonParse)(row.usage) : null,
1418
+ metadata: row.metadata ? (0, import_core.safeJsonParse)(row.metadata) : null,
1419
+ userId: row.userId,
1420
+ conversationId: row.conversationId
1421
+ };
1422
+ this.debug(`Got history entry with ID ${key}`);
1423
+ const stepsTableName = `${this.options.tablePrefix}_agent_history_steps`;
1424
+ const stepsResult = await this.client.execute({
1425
+ sql: `SELECT value FROM ${stepsTableName} WHERE history_id = ? AND agent_id = ?`,
1426
+ args: [key, entry._agentId]
1427
+ });
1428
+ const steps = stepsResult.rows.map((row2) => {
1429
+ const step = (0, import_core.safeJsonParse)(row2.value);
1430
+ return {
1431
+ type: step.type,
1432
+ name: step.name,
1433
+ content: step.content,
1434
+ arguments: step.arguments
1435
+ };
1436
+ });
1437
+ const timelineEventsTableName = `${this.options.tablePrefix}_agent_history_timeline_events`;
1438
+ const timelineEventsResult = await this.client.execute({
1439
+ sql: `SELECT id, event_type, event_name, start_time, end_time,
1440
+ status, status_message, level, version,
1441
+ parent_event_id, tags, input, output, error, metadata
1442
+ FROM ${timelineEventsTableName}
1443
+ WHERE history_id = ? AND agent_id = ?`,
1444
+ args: [key, entry._agentId]
1445
+ });
1446
+ const events = timelineEventsResult.rows.map((row2) => {
1447
+ const input = row2.input ? (0, import_core.safeJsonParse)(row2.input) : void 0;
1448
+ const output = row2.output ? (0, import_core.safeJsonParse)(row2.output) : void 0;
1449
+ const error = row2.error ? (0, import_core.safeJsonParse)(row2.error) : void 0;
1450
+ const statusMessage = row2.status_message ? (0, import_core.safeJsonParse)(row2.status_message) : void 0;
1451
+ const metadata = row2.metadata ? (0, import_core.safeJsonParse)(row2.metadata) : void 0;
1452
+ const tags = row2.tags ? (0, import_core.safeJsonParse)(row2.tags) : void 0;
1453
+ return {
1454
+ id: row2.id,
1455
+ type: row2.event_type,
1456
+ name: row2.event_name,
1457
+ startTime: row2.start_time,
1458
+ endTime: row2.end_time,
1459
+ status: row2.status,
1460
+ statusMessage,
1461
+ level: row2.level,
1462
+ version: row2.version,
1463
+ parentEventId: row2.parent_event_id,
1464
+ tags,
1465
+ input,
1466
+ output,
1467
+ error: statusMessage ? statusMessage : error,
1468
+ metadata
1469
+ };
1470
+ });
1471
+ entry.steps = steps;
1472
+ entry.events = events;
1473
+ return entry;
1474
+ } catch (error) {
1475
+ this.debug(`Error getting history entry with ID ${key}`, error);
1476
+ return void 0;
1477
+ }
1478
+ }
1479
+ /**
1480
+ * Get a history step by ID
1481
+ * @param key Step ID
1482
+ * @returns The history step or undefined if not found
1483
+ */
1484
+ async getHistoryStep(key) {
1485
+ await this.initialized;
1486
+ try {
1487
+ const tableName = `${this.options.tablePrefix}_agent_history_steps`;
1488
+ const result = await this.client.execute({
1489
+ sql: `SELECT value FROM ${tableName} WHERE key = ?`,
1490
+ args: [key]
1491
+ });
1492
+ if (result.rows.length === 0) {
1493
+ this.debug(`History step with ID ${key} not found`);
1494
+ return void 0;
1495
+ }
1496
+ const value = (0, import_core.safeJsonParse)(result.rows[0].value);
1497
+ this.debug(`Got history step with ID ${key}`);
1498
+ return value;
1499
+ } catch (error) {
1500
+ this.debug(`Error getting history step with ID ${key}`, error);
1501
+ return void 0;
1502
+ }
1503
+ }
1504
+ async createConversation(conversation) {
1505
+ await this.initialized;
1506
+ await debugDelay();
1507
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1508
+ const metadataString = (0, import_utils2.safeStringify)(conversation.metadata);
1509
+ const tableName = `${this.options.tablePrefix}_conversations`;
1510
+ return await this.executeWithRetryStrategy(async () => {
1511
+ await this.client.execute({
1512
+ sql: `INSERT INTO ${tableName} (id, resource_id, user_id, title, metadata, created_at, updated_at)
1513
+ VALUES (?, ?, ?, ?, ?, ?, ?)`,
1514
+ args: [
1515
+ conversation.id,
1516
+ conversation.resourceId,
1517
+ conversation.userId,
1518
+ conversation.title,
1519
+ metadataString,
1520
+ now,
1521
+ now
1522
+ ]
1523
+ });
1524
+ return {
1525
+ id: conversation.id,
1526
+ resourceId: conversation.resourceId,
1527
+ userId: conversation.userId,
1528
+ title: conversation.title,
1529
+ metadata: conversation.metadata,
1530
+ createdAt: now,
1531
+ updatedAt: now
1532
+ };
1533
+ }, `createConversation[${conversation.id}]`);
1534
+ }
1535
+ async getConversation(id) {
1536
+ await this.initialized;
1537
+ await debugDelay();
1538
+ const tableName = `${this.options.tablePrefix}_conversations`;
1539
+ try {
1540
+ const result = await this.client.execute({
1541
+ sql: `SELECT * FROM ${tableName} WHERE id = ?`,
1542
+ args: [id]
1543
+ });
1544
+ if (result.rows.length === 0) {
1545
+ return null;
1546
+ }
1547
+ const row = result.rows[0];
1548
+ return {
1549
+ id: row.id,
1550
+ resourceId: row.resource_id,
1551
+ userId: row.user_id,
1552
+ title: row.title,
1553
+ metadata: row.metadata ? (0, import_core.safeJsonParse)(row.metadata) : {},
1554
+ createdAt: row.created_at,
1555
+ updatedAt: row.updated_at
1556
+ };
1557
+ } catch (error) {
1558
+ this.debug("Error getting conversation:", error);
1559
+ throw new Error("Failed to get conversation from LibSQL database");
1560
+ }
1561
+ }
1562
+ async getConversations(resourceId) {
1563
+ await this.initialized;
1564
+ await debugDelay();
1565
+ const tableName = `${this.options.tablePrefix}_conversations`;
1566
+ try {
1567
+ const result = await this.client.execute({
1568
+ sql: `SELECT * FROM ${tableName} WHERE resource_id = ? ORDER BY updated_at DESC`,
1569
+ args: [resourceId]
1570
+ });
1571
+ return result.rows.map((row) => ({
1572
+ id: row.id,
1573
+ resourceId: row.resource_id,
1574
+ userId: row.user_id,
1575
+ title: row.title,
1576
+ metadata: (0, import_core.safeJsonParse)(row.metadata),
1577
+ createdAt: row.created_at,
1578
+ updatedAt: row.updated_at
1579
+ }));
1580
+ } catch (error) {
1581
+ this.debug("Error getting conversations:", error);
1582
+ throw new Error("Failed to get conversations from LibSQL database");
1583
+ }
1584
+ }
1585
+ async getConversationsByUserId(userId, options = {}) {
1586
+ await this.initialized;
1587
+ await debugDelay();
1588
+ const {
1589
+ resourceId,
1590
+ limit = 50,
1591
+ offset = 0,
1592
+ orderBy = "updated_at",
1593
+ orderDirection = "DESC"
1594
+ } = options;
1595
+ const tableName = `${this.options.tablePrefix}_conversations`;
1596
+ try {
1597
+ let sql = `SELECT * FROM ${tableName} WHERE user_id = ?`;
1598
+ const args = [userId];
1599
+ if (resourceId) {
1600
+ sql += " AND resource_id = ?";
1601
+ args.push(resourceId);
1602
+ }
1603
+ sql += ` ORDER BY ${orderBy} ${orderDirection}`;
1604
+ if (limit > 0) {
1605
+ sql += " LIMIT ? OFFSET ?";
1606
+ args.push(limit, offset);
1607
+ }
1608
+ const result = await this.client.execute({
1609
+ sql,
1610
+ args
1611
+ });
1612
+ return result.rows.map((row) => ({
1613
+ id: row.id,
1614
+ resourceId: row.resource_id,
1615
+ userId: row.user_id,
1616
+ title: row.title,
1617
+ metadata: (0, import_core.safeJsonParse)(row.metadata),
1618
+ createdAt: row.created_at,
1619
+ updatedAt: row.updated_at
1620
+ }));
1621
+ } catch (error) {
1622
+ this.debug("Error getting conversations by user ID:", error);
1623
+ throw new Error("Failed to get conversations by user ID from LibSQL database");
1624
+ }
1625
+ }
1626
+ /**
1627
+ * Query conversations with filtering and pagination options
1628
+ *
1629
+ * @param options Query options for filtering and pagination
1630
+ * @returns Promise that resolves to an array of conversations matching the criteria
1631
+ * @see {@link https://voltagent.dev/docs/agents/memory/libsql#querying-conversations | Querying Conversations}
1632
+ */
1633
+ async queryConversations(options) {
1634
+ await this.initialized;
1635
+ await debugDelay();
1636
+ const {
1637
+ userId,
1638
+ resourceId,
1639
+ limit = 50,
1640
+ offset = 0,
1641
+ orderBy = "updated_at",
1642
+ orderDirection = "DESC"
1643
+ } = options;
1644
+ const tableName = `${this.options.tablePrefix}_conversations`;
1645
+ try {
1646
+ let sql = `SELECT * FROM ${tableName}`;
1647
+ const args = [];
1648
+ const conditions = [];
1649
+ if (userId) {
1650
+ conditions.push("user_id = ?");
1651
+ args.push(userId);
1652
+ }
1653
+ if (resourceId) {
1654
+ conditions.push("resource_id = ?");
1655
+ args.push(resourceId);
1656
+ }
1657
+ if (conditions.length > 0) {
1658
+ sql += ` WHERE ${conditions.join(" AND ")}`;
1659
+ }
1660
+ sql += ` ORDER BY ${orderBy} ${orderDirection}`;
1661
+ if (limit > 0) {
1662
+ sql += " LIMIT ? OFFSET ?";
1663
+ args.push(limit, offset);
1664
+ }
1665
+ const result = await this.client.execute({
1666
+ sql,
1667
+ args
1668
+ });
1669
+ return result.rows.map((row) => ({
1670
+ id: row.id,
1671
+ resourceId: row.resource_id,
1672
+ userId: row.user_id,
1673
+ title: row.title,
1674
+ metadata: (0, import_core.safeJsonParse)(row.metadata),
1675
+ createdAt: row.created_at,
1676
+ updatedAt: row.updated_at
1677
+ }));
1678
+ } catch (error) {
1679
+ this.debug("Error querying conversations:", error);
1680
+ throw new Error("Failed to query conversations from LibSQL database");
1681
+ }
1682
+ }
1683
+ /**
1684
+ * Get messages for a specific conversation with pagination support
1685
+ *
1686
+ * @param conversationId The unique identifier of the conversation to retrieve messages from
1687
+ * @param options Optional pagination and filtering options
1688
+ * @returns Promise that resolves to an array of messages in chronological order (oldest first)
1689
+ * @see {@link https://voltagent.dev/docs/agents/memory/libsql#conversation-messages | Getting Conversation Messages}
1690
+ */
1691
+ async getConversationMessages(conversationId, options = {}) {
1692
+ await this.initialized;
1693
+ await debugDelay();
1694
+ const { limit = 100, offset = 0 } = options;
1695
+ const tableName = `${this.options.tablePrefix}_messages`;
1696
+ try {
1697
+ let sql = `SELECT * FROM ${tableName} WHERE conversation_id = ? ORDER BY created_at ASC`;
1698
+ const args = [conversationId];
1699
+ if (limit > 0) {
1700
+ sql += " LIMIT ? OFFSET ?";
1701
+ args.push(limit, offset);
1702
+ }
1703
+ const result = await this.client.execute({
1704
+ sql,
1705
+ args
1706
+ });
1707
+ return result.rows.map((row) => {
1708
+ let content = row.content;
1709
+ const parsedContent = (0, import_core.safeJsonParse)(content);
1710
+ if (parsedContent !== null) {
1711
+ content = parsedContent;
1712
+ }
1713
+ return {
1714
+ id: row.message_id,
1715
+ role: row.role,
1716
+ content,
1717
+ type: row.type,
1718
+ createdAt: row.created_at
1719
+ };
1720
+ });
1721
+ } catch (error) {
1722
+ this.debug("Error getting conversation messages:", error);
1723
+ throw new Error("Failed to get conversation messages from LibSQL database");
1724
+ }
1725
+ }
1726
+ async updateConversation(id, updates) {
1727
+ await this.initialized;
1728
+ await debugDelay();
1729
+ const tableName = `${this.options.tablePrefix}_conversations`;
1730
+ const now = (/* @__PURE__ */ new Date()).toISOString();
1731
+ try {
1732
+ const updatesList = [];
1733
+ const args = [];
1734
+ if (updates.resourceId !== void 0) {
1735
+ updatesList.push("resource_id = ?");
1736
+ args.push(updates.resourceId);
1737
+ }
1738
+ if (updates.userId !== void 0) {
1739
+ updatesList.push("user_id = ?");
1740
+ args.push(updates.userId);
1741
+ }
1742
+ if (updates.title !== void 0) {
1743
+ updatesList.push("title = ?");
1744
+ args.push(updates.title);
1745
+ }
1746
+ if (updates.metadata !== void 0) {
1747
+ updatesList.push("metadata = ?");
1748
+ args.push((0, import_utils2.safeStringify)(updates.metadata));
1749
+ }
1750
+ updatesList.push("updated_at = ?");
1751
+ args.push(now);
1752
+ args.push(id);
1753
+ await this.client.execute({
1754
+ sql: `UPDATE ${tableName} SET ${updatesList.join(", ")} WHERE id = ?`,
1755
+ args
1756
+ });
1757
+ const updated = await this.getConversation(id);
1758
+ if (!updated) {
1759
+ throw new Error("Conversation not found after update");
1760
+ }
1761
+ return updated;
1762
+ } catch (error) {
1763
+ this.debug("Error updating conversation:", error);
1764
+ throw new Error("Failed to update conversation in LibSQL database");
1765
+ }
1766
+ }
1767
+ async deleteConversation(id) {
1768
+ await this.initialized;
1769
+ await debugDelay();
1770
+ const conversationsTableName = `${this.options.tablePrefix}_conversations`;
1771
+ const messagesTableName = `${this.options.tablePrefix}_messages`;
1772
+ try {
1773
+ await this.client.execute({
1774
+ sql: `DELETE FROM ${messagesTableName} WHERE conversation_id = ?`,
1775
+ args: [id]
1776
+ });
1777
+ await this.client.execute({
1778
+ sql: `DELETE FROM ${conversationsTableName} WHERE id = ?`,
1779
+ args: [id]
1780
+ });
1781
+ } catch (error) {
1782
+ this.debug("Error deleting conversation:", error);
1783
+ throw new Error("Failed to delete conversation from LibSQL database");
1784
+ }
1785
+ }
1786
+ /**
1787
+ * Get all history entries for an agent with pagination
1788
+ * @param agentId Agent ID
1789
+ * @param page Page number (0-based)
1790
+ * @param limit Number of entries per page
1791
+ * @returns Object with entries array and total count
1792
+ */
1793
+ async getAllHistoryEntriesByAgent(agentId, page, limit) {
1794
+ await this.initialized;
1795
+ try {
1796
+ const tableName = `${this.options.tablePrefix}_agent_history`;
1797
+ const offset = page * limit;
1798
+ const countResult = await this.client.execute({
1799
+ sql: `SELECT COUNT(*) as total FROM ${tableName} WHERE agent_id = ?`,
1800
+ args: [agentId]
1801
+ });
1802
+ const total = Number(countResult.rows[0].total);
1803
+ const result = await this.client.execute({
1804
+ sql: `SELECT id, agent_id, timestamp, status, input, output, usage, metadata, userId, conversationId
1805
+ FROM ${tableName} WHERE agent_id = ?
1806
+ ORDER BY timestamp DESC
1807
+ LIMIT ? OFFSET ?`,
1808
+ args: [agentId, limit, offset]
1809
+ });
1810
+ const entries = result.rows.map((row) => ({
1811
+ id: row.id,
1812
+ _agentId: row.agent_id,
1813
+ // Keep _agentId for compatibility
1814
+ timestamp: new Date(row.timestamp),
1815
+ status: row.status,
1816
+ input: row.input ? (0, import_core.safeJsonParse)(row.input) : null,
1817
+ output: row.output ? (0, import_core.safeJsonParse)(row.output) : null,
1818
+ usage: row.usage ? (0, import_core.safeJsonParse)(row.usage) : null,
1819
+ metadata: row.metadata ? (0, import_core.safeJsonParse)(row.metadata) : null,
1820
+ userId: row.userId,
1821
+ conversationId: row.conversationId
1822
+ }));
1823
+ this.debug(`Got all history entries for agent ${agentId} (${entries.length} items)`);
1824
+ const completeEntries = await Promise.all(
1825
+ entries.map(async (entry) => {
1826
+ const stepsTableName = `${this.options.tablePrefix}_agent_history_steps`;
1827
+ const stepsResult = await this.client.execute({
1828
+ sql: `SELECT value FROM ${stepsTableName} WHERE history_id = ? AND agent_id = ?`,
1829
+ args: [entry.id, agentId]
1830
+ });
1831
+ const steps = stepsResult.rows.map((row) => {
1832
+ const step = (0, import_core.safeJsonParse)(row.value);
1833
+ return {
1834
+ type: step.type,
1835
+ name: step.name,
1836
+ content: step.content,
1837
+ arguments: step.arguments
1838
+ };
1839
+ });
1840
+ const timelineEventsTableName = `${this.options.tablePrefix}_agent_history_timeline_events`;
1841
+ const timelineEventsResult = await this.client.execute({
1842
+ sql: `SELECT id, event_type, event_name, start_time, end_time,
1843
+ status, status_message, level, version,
1844
+ parent_event_id, tags, input, output, error, metadata
1845
+ FROM ${timelineEventsTableName}
1846
+ WHERE history_id = ? AND agent_id = ?`,
1847
+ args: [entry.id, agentId]
1848
+ });
1849
+ const events = timelineEventsResult.rows.map((row) => {
1850
+ const input = row.input ? (0, import_core.safeJsonParse)(row.input) : void 0;
1851
+ const output = row.output ? (0, import_core.safeJsonParse)(row.output) : void 0;
1852
+ const error = row.error ? (0, import_core.safeJsonParse)(row.error) : void 0;
1853
+ const statusMessage = row.status_message ? (0, import_core.safeJsonParse)(row.status_message) : void 0;
1854
+ const metadata = row.metadata ? (0, import_core.safeJsonParse)(row.metadata) : void 0;
1855
+ const tags = row.tags ? (0, import_core.safeJsonParse)(row.tags) : void 0;
1856
+ return {
1857
+ id: row.id,
1858
+ type: row.event_type,
1859
+ name: row.event_name,
1860
+ startTime: row.start_time,
1861
+ endTime: row.end_time,
1862
+ status: row.status,
1863
+ statusMessage,
1864
+ level: row.level,
1865
+ version: row.version,
1866
+ parentEventId: row.parent_event_id,
1867
+ tags,
1868
+ input,
1869
+ output,
1870
+ error: statusMessage ? statusMessage : error,
1871
+ metadata
1872
+ };
1873
+ });
1874
+ entry.steps = steps;
1875
+ entry.events = events;
1876
+ return entry;
1877
+ })
1878
+ );
1879
+ return {
1880
+ entries: completeEntries,
1881
+ total
1882
+ };
1883
+ } catch (error) {
1884
+ this.debug(`Error getting history entries for agent ${agentId}`, error);
1885
+ return {
1886
+ entries: [],
1887
+ total: 0
1888
+ };
1889
+ }
1890
+ }
1891
+ /**
1892
+ * Migrates agent history data from old structure to new structure.
1893
+ * If migration fails, it can be rolled back using the backup mechanism.
1894
+ *
1895
+ * Old database structure:
1896
+ * CREATE TABLE voltagent_memory_agent_history (
1897
+ * key TEXT PRIMARY KEY,
1898
+ * value TEXT NOT NULL,
1899
+ * agent_id TEXT
1900
+ * );
1901
+ */
1902
+ async migrateAgentHistoryData(options = {}) {
1903
+ const {
1904
+ createBackup = true,
1905
+ restoreFromBackup = false,
1906
+ deleteBackupAfterSuccess = false
1907
+ } = options;
1908
+ const oldTableName = `${this.options.tablePrefix}_agent_history`;
1909
+ const oldTableBackup = `${oldTableName}_backup`;
1910
+ const timelineEventsTableName = `${this.options.tablePrefix}_agent_history_timeline_events`;
1911
+ try {
1912
+ this.debug("Starting agent history migration...");
1913
+ const flagCheck = await this.checkMigrationFlag("agent_history_data_migration");
1914
+ if (flagCheck.alreadyCompleted) {
1915
+ return { success: true, migratedCount: 0 };
1916
+ }
1917
+ if (restoreFromBackup) {
1918
+ this.debug("Starting restoration from backup...");
1919
+ const backupCheck = await this.client.execute({
1920
+ sql: "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
1921
+ args: [oldTableBackup]
1922
+ });
1923
+ if (backupCheck.rows.length === 0) {
1924
+ throw new Error("No backup found to restore");
1925
+ }
1926
+ await this.client.execute("BEGIN TRANSACTION;");
1927
+ await this.client.execute(`DROP TABLE IF EXISTS ${oldTableName};`);
1928
+ await this.client.execute(`ALTER TABLE ${oldTableBackup} RENAME TO ${oldTableName};`);
1929
+ await this.client.execute("COMMIT;");
1930
+ this.debug("Restoration from backup completed successfully");
1931
+ return {
1932
+ success: true,
1933
+ backupCreated: false
1934
+ };
1935
+ }
1936
+ const tableInfoQuery = await this.client.execute(`PRAGMA table_info(${oldTableName})`);
1937
+ if (tableInfoQuery.rows.length === 0) {
1938
+ this.debug(`${oldTableName} table not found, migration not needed`);
1939
+ return {
1940
+ success: true,
1941
+ migratedCount: 0
1942
+ };
1943
+ }
1944
+ const hasValueColumn = tableInfoQuery.rows.some((row) => row.name === "value");
1945
+ if (!hasValueColumn) {
1946
+ this.debug("Table is already in new format, migration not needed");
1947
+ return {
1948
+ success: true,
1949
+ migratedCount: 0
1950
+ };
1951
+ }
1952
+ if (createBackup) {
1953
+ this.debug("Creating backup...");
1954
+ const backupCheck = await this.client.execute({
1955
+ sql: "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
1956
+ args: [oldTableBackup]
1957
+ });
1958
+ if (backupCheck.rows.length > 0) {
1959
+ await this.client.execute(`DROP TABLE IF EXISTS ${oldTableBackup};`);
1960
+ }
1961
+ await this.client.execute(
1962
+ `CREATE TABLE ${oldTableBackup} AS SELECT * FROM ${oldTableName};`
1963
+ );
1964
+ this.debug("Backup created successfully");
1965
+ }
1966
+ const oldFormatData = await this.client.execute({
1967
+ sql: `SELECT key, value, agent_id FROM ${oldTableName}`
1968
+ });
1969
+ if (oldFormatData.rows.length === 0) {
1970
+ this.debug("No data found to migrate");
1971
+ return {
1972
+ success: true,
1973
+ migratedCount: 0,
1974
+ backupCreated: createBackup
1975
+ };
1976
+ }
1977
+ const tempTableName = `${oldTableName}_temp`;
1978
+ await this.client.execute(`
1979
+ CREATE TABLE ${tempTableName} (
1980
+ id TEXT PRIMARY KEY,
1981
+ agent_id TEXT NOT NULL,
1982
+ timestamp TEXT NOT NULL,
1983
+ status TEXT,
1984
+ input TEXT,
1985
+ output TEXT,
1986
+ usage TEXT,
1987
+ metadata TEXT
1988
+ )
1989
+ `);
1990
+ await this.client.execute("BEGIN TRANSACTION;");
1991
+ let migratedCount = 0;
1992
+ const migratedIds = /* @__PURE__ */ new Set();
1993
+ for (const row of oldFormatData.rows) {
1994
+ const key = row.key;
1995
+ const agentId = row.agent_id;
1996
+ const valueStr = row.value;
1997
+ try {
1998
+ const valueObj = (0, import_core.safeJsonParse)(valueStr);
1999
+ const id = valueObj.id || key;
2000
+ if (migratedIds.has(id)) {
2001
+ continue;
2002
+ }
2003
+ migratedIds.add(id);
2004
+ migratedCount++;
2005
+ const inputJSON = valueObj.input ? (0, import_utils2.safeStringify)(valueObj.input) : null;
2006
+ const outputJSON = valueObj.output ? (0, import_utils2.safeStringify)(valueObj.output) : null;
2007
+ const usageJSON = valueObj.usage ? (0, import_utils2.safeStringify)(valueObj.usage) : null;
2008
+ await this.client.execute({
2009
+ sql: `INSERT INTO ${tempTableName}
2010
+ (id, agent_id, timestamp, status, input, output, usage, metadata)
2011
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)`,
2012
+ args: [
2013
+ id,
2014
+ valueObj._agentId || agentId,
2015
+ valueObj.timestamp || (/* @__PURE__ */ new Date()).toISOString(),
2016
+ valueObj.status || null,
2017
+ inputJSON,
2018
+ outputJSON,
2019
+ usageJSON,
2020
+ null
2021
+ ]
2022
+ });
2023
+ let input = "";
2024
+ if (Array.isArray(valueObj.events)) {
2025
+ for (const event of valueObj.events) {
2026
+ try {
2027
+ if (event.affectedNodeId?.startsWith("message_")) {
2028
+ input = event.data.input;
2029
+ continue;
2030
+ }
2031
+ const eventId = event.id || this.generateId();
2032
+ const eventType = event.type || "unknown";
2033
+ let eventName = event.name || "unknown";
2034
+ const startTime = event.timestamp || event.startTime || (/* @__PURE__ */ new Date()).toISOString();
2035
+ const endTime = event.updatedAt || event.endTime || startTime;
2036
+ let status = event.status || event.data?.status || null;
2037
+ let inputData = null;
2038
+ if (event.input) {
2039
+ inputData = (0, import_utils2.safeStringify)({ input: event.input });
2040
+ } else if (event.data?.input) {
2041
+ inputData = (0, import_utils2.safeStringify)({ input: event.data.input });
2042
+ } else if (input) {
2043
+ inputData = (0, import_utils2.safeStringify)({ input });
2044
+ }
2045
+ input = "";
2046
+ let metadata = null;
2047
+ if (event.metadata) {
2048
+ metadata = (0, import_utils2.safeStringify)(event.metadata);
2049
+ } else if (event.data) {
2050
+ metadata = (0, import_utils2.safeStringify)({
2051
+ id: event.affectedNodeId?.split("_").pop(),
2052
+ agentId: event.data?.metadata?.sourceAgentId,
2053
+ ...event.data
2054
+ });
2055
+ }
2056
+ if (eventType === "agent") {
2057
+ if (eventName === "start") {
2058
+ eventName = "agent:start";
2059
+ status = "running";
2060
+ } else if (eventName === "finished") {
2061
+ if (event.data.status === "error") {
2062
+ eventName = "agent:error";
2063
+ } else {
2064
+ eventName = "agent:success";
2065
+ }
2066
+ }
2067
+ await this.client.execute({
2068
+ sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
2069
+ (id, history_id, agent_id, event_type, event_name, start_time, end_time,
2070
+ status, status_message, level, version, parent_event_id,
2071
+ tags, input, output, error, metadata)
2072
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
2073
+ args: [
2074
+ eventId,
2075
+ id,
2076
+ valueObj._agentId || agentId,
2077
+ eventType,
2078
+ eventName,
2079
+ startTime,
2080
+ endTime,
2081
+ // @ts-ignore
2082
+ status,
2083
+ eventName === "agent:error" ? event.data.error.message : null,
2084
+ event.level || "INFO",
2085
+ event.version || null,
2086
+ event.parentEventId || null,
2087
+ null,
2088
+ // tags
2089
+ inputData,
2090
+ event.data.output ? (0, import_utils2.safeStringify)(event.data.output) : null,
2091
+ eventName === "agent:error" ? (0, import_utils2.safeStringify)(event.data.error) : null,
2092
+ metadata
2093
+ ]
2094
+ });
2095
+ } else if (eventType === "memory") {
2096
+ if (eventName === "memory:saveMessage") {
2097
+ await this.client.execute({
2098
+ sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
2099
+ (id, history_id, agent_id, event_type, event_name, start_time, end_time,
2100
+ status, status_message, level, version, parent_event_id,
2101
+ tags, input, output, error, metadata)
2102
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
2103
+ args: [
2104
+ eventId,
2105
+ id,
2106
+ valueObj._agentId || agentId,
2107
+ eventType,
2108
+ "memory:write_start",
2109
+ startTime,
2110
+ null,
2111
+ // no endTime
2112
+ "running",
2113
+ event.statusMessage || null,
2114
+ event.level || "INFO",
2115
+ event.version || null,
2116
+ event.parentEventId || null,
2117
+ null,
2118
+ // tags
2119
+ inputData,
2120
+ null,
2121
+ // no output
2122
+ null,
2123
+ // no error
2124
+ (0, import_utils2.safeStringify)({
2125
+ id: "memory",
2126
+ agentId: event.affectedNodeId?.split("_").pop()
2127
+ })
2128
+ ]
2129
+ });
2130
+ await this.client.execute({
2131
+ sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
2132
+ (id, history_id, agent_id, event_type, event_name, start_time, end_time,
2133
+ status, status_message, level, version, parent_event_id,
2134
+ tags, input, output, error, metadata)
2135
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
2136
+ args: [
2137
+ this.generateId(),
2138
+ // New ID
2139
+ id,
2140
+ valueObj._agentId || agentId,
2141
+ eventType,
2142
+ "memory:write_success",
2143
+ endTime,
2144
+ // End time
2145
+ endTime,
2146
+ "completed",
2147
+ event.statusMessage || null,
2148
+ event.level || "INFO",
2149
+ event.version || null,
2150
+ eventId,
2151
+ // Parent event ID
2152
+ null,
2153
+ // tags
2154
+ inputData,
2155
+ event.data.output ? (0, import_utils2.safeStringify)(event.data.output) : null,
2156
+ event.error ? (0, import_utils2.safeStringify)(event.error) : null,
2157
+ (0, import_utils2.safeStringify)({
2158
+ id: "memory",
2159
+ agentId: event.affectedNodeId?.split("_").pop()
2160
+ })
2161
+ ]
2162
+ });
2163
+ } else if (eventName === "memory:getMessages") {
2164
+ await this.client.execute({
2165
+ sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
2166
+ (id, history_id, agent_id, event_type, event_name, start_time, end_time,
2167
+ status, status_message, level, version, parent_event_id,
2168
+ tags, input, output, error, metadata)
2169
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
2170
+ args: [
2171
+ eventId,
2172
+ id,
2173
+ valueObj._agentId || agentId,
2174
+ eventType,
2175
+ "memory:read_start",
2176
+ startTime,
2177
+ null,
2178
+ // no endTime
2179
+ "running",
2180
+ event.statusMessage || null,
2181
+ event.level || "INFO",
2182
+ event.version || null,
2183
+ event.parentEventId || null,
2184
+ null,
2185
+ // tags
2186
+ inputData,
2187
+ null,
2188
+ // no output
2189
+ null,
2190
+ // no error
2191
+ (0, import_utils2.safeStringify)({
2192
+ id: "memory",
2193
+ agentId: event.affectedNodeId?.split("_").pop()
2194
+ })
2195
+ ]
2196
+ });
2197
+ await this.client.execute({
2198
+ sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
2199
+ (id, history_id, agent_id, event_type, event_name, start_time, end_time,
2200
+ status, status_message, level, version, parent_event_id,
2201
+ tags, input, output, error, metadata)
2202
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
2203
+ args: [
2204
+ this.generateId(),
2205
+ // New ID
2206
+ id,
2207
+ valueObj._agentId || agentId,
2208
+ eventType,
2209
+ "memory:read_success",
2210
+ endTime,
2211
+ // End time
2212
+ endTime,
2213
+ status,
2214
+ event.statusMessage || null,
2215
+ event.level || "INFO",
2216
+ event.version || null,
2217
+ eventId,
2218
+ // Parent event ID
2219
+ null,
2220
+ // tags
2221
+ inputData,
2222
+ event.data.output ? (0, import_utils2.safeStringify)(event.data.output) : null,
2223
+ event.error ? (0, import_utils2.safeStringify)(event.error) : null,
2224
+ (0, import_utils2.safeStringify)({
2225
+ id: "memory",
2226
+ agentId: event.affectedNodeId?.split("_").pop()
2227
+ })
2228
+ ]
2229
+ });
2230
+ } else {
2231
+ await this.client.execute({
2232
+ sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
2233
+ (id, history_id, agent_id, event_type, event_name, start_time, end_time,
2234
+ status, status_message, level, version, parent_event_id,
2235
+ tags, input, output, error, metadata)
2236
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
2237
+ args: [
2238
+ eventId,
2239
+ id,
2240
+ valueObj._agentId || agentId,
2241
+ eventType,
2242
+ eventName,
2243
+ startTime,
2244
+ endTime,
2245
+ status,
2246
+ event.statusMessage || null,
2247
+ event.level || "INFO",
2248
+ event.version || null,
2249
+ event.parentEventId || null,
2250
+ null,
2251
+ // tags
2252
+ inputData,
2253
+ event.output ? (0, import_utils2.safeStringify)(event.output) : null,
2254
+ event.error ? (0, import_utils2.safeStringify)(event.error) : null,
2255
+ metadata
2256
+ ]
2257
+ });
2258
+ }
2259
+ } else if (eventType === "tool") {
2260
+ if (eventName === "tool_working") {
2261
+ await this.client.execute({
2262
+ sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
2263
+ (id, history_id, agent_id, event_type, event_name, start_time, end_time,
2264
+ status, status_message, level, version, parent_event_id,
2265
+ tags, input, output, error, metadata)
2266
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
2267
+ args: [
2268
+ eventId,
2269
+ id,
2270
+ valueObj._agentId || agentId,
2271
+ eventType,
2272
+ "tool:start",
2273
+ startTime,
2274
+ null,
2275
+ // no endTime
2276
+ "running",
2277
+ event.statusMessage || null,
2278
+ event.level || "INFO",
2279
+ event.version || null,
2280
+ event.parentEventId || null,
2281
+ null,
2282
+ // tags
2283
+ inputData,
2284
+ null,
2285
+ // no output
2286
+ null,
2287
+ // no error
2288
+ (0, import_utils2.safeStringify)({
2289
+ id: event.affectedNodeId?.split("_").pop(),
2290
+ agentId: event.data?.metadata?.sourceAgentId,
2291
+ displayName: event.data.metadata.toolName
2292
+ })
2293
+ ]
2294
+ });
2295
+ await this.client.execute({
2296
+ sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
2297
+ (id, history_id, agent_id, event_type, event_name, start_time, end_time,
2298
+ status, status_message, level, version, parent_event_id,
2299
+ tags, input, output, error, metadata)
2300
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
2301
+ args: [
2302
+ this.generateId(),
2303
+ // New ID
2304
+ id,
2305
+ valueObj._agentId || agentId,
2306
+ eventType,
2307
+ "tool:success",
2308
+ endTime,
2309
+ // End time
2310
+ endTime,
2311
+ "completed",
2312
+ event.statusMessage || null,
2313
+ event.level || "INFO",
2314
+ event.version || null,
2315
+ eventId,
2316
+ // Parent event ID
2317
+ null,
2318
+ // tags
2319
+ inputData,
2320
+ event.data.output ? (0, import_utils2.safeStringify)(event.data.output) : null,
2321
+ event.error ? (0, import_utils2.safeStringify)(event.error) : null,
2322
+ (0, import_utils2.safeStringify)({
2323
+ id: event.affectedNodeId?.split("_").pop(),
2324
+ agentId: event.data?.metadata?.sourceAgentId,
2325
+ displayName: event.data.metadata.toolName
2326
+ })
2327
+ ]
2328
+ });
2329
+ }
2330
+ } else {
2331
+ await this.client.execute({
2332
+ sql: `INSERT OR REPLACE INTO ${timelineEventsTableName}
2333
+ (id, history_id, agent_id, event_type, event_name, start_time, end_time,
2334
+ status, status_message, level, version, parent_event_id,
2335
+ tags, input, output, error, metadata)
2336
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
2337
+ args: [
2338
+ eventId,
2339
+ id,
2340
+ valueObj._agentId || agentId,
2341
+ eventType,
2342
+ eventName,
2343
+ startTime,
2344
+ endTime,
2345
+ status,
2346
+ event.statusMessage || null,
2347
+ event.level || "INFO",
2348
+ event.version || null,
2349
+ event.parentEventId || null,
2350
+ null,
2351
+ // tags
2352
+ inputData,
2353
+ event.output ? (0, import_utils2.safeStringify)(event.output) : null,
2354
+ event.error ? (0, import_utils2.safeStringify)(event.error) : null,
2355
+ (0, import_utils2.safeStringify)({
2356
+ id: eventType === "retriever" ? "retriever" : event.type,
2357
+ agentId: event.affectedNodeId?.split("_").pop()
2358
+ })
2359
+ ]
2360
+ });
2361
+ }
2362
+ } catch (error) {
2363
+ this.debug("Error processing event:", error);
2364
+ }
2365
+ }
2366
+ }
2367
+ } catch (error) {
2368
+ this.debug(`Error processing record with ID ${key}:`, error);
2369
+ }
2370
+ }
2371
+ await this.client.execute(`DROP TABLE ${oldTableName};`);
2372
+ await this.client.execute(`ALTER TABLE ${tempTableName} RENAME TO ${oldTableName};`);
2373
+ await this.client.execute(`
2374
+ CREATE INDEX IF NOT EXISTS idx_${oldTableName}_agent_id
2375
+ ON ${oldTableName}(agent_id)
2376
+ `);
2377
+ await this.client.execute("COMMIT;");
2378
+ this.debug(`Total ${migratedCount} records successfully migrated`);
2379
+ if (createBackup && deleteBackupAfterSuccess) {
2380
+ await this.client.execute(`DROP TABLE IF EXISTS ${oldTableBackup};`);
2381
+ this.debug("Unnecessary backup deleted");
2382
+ }
2383
+ await this.setMigrationFlag("agent_history_data_migration", migratedCount);
2384
+ return {
2385
+ success: true,
2386
+ migratedCount,
2387
+ backupCreated: createBackup && !deleteBackupAfterSuccess
2388
+ };
2389
+ } catch (error) {
2390
+ await this.client.execute("ROLLBACK;");
2391
+ this.debug("Error occurred while migrating agent history data:", error);
2392
+ return {
2393
+ success: false,
2394
+ error: error instanceof Error ? error : new Error(String(error)),
2395
+ backupCreated: options.createBackup
2396
+ };
2397
+ }
2398
+ }
2399
+ /**
2400
+ * Migrate conversation schema to add user_id and update messages table
2401
+ *
2402
+ * ⚠️ **CRITICAL WARNING: DESTRUCTIVE OPERATION** ⚠️
2403
+ *
2404
+ * This method performs a DESTRUCTIVE schema migration that:
2405
+ * - DROPS and recreates existing tables
2406
+ * - Creates temporary tables during migration
2407
+ * - Modifies the primary key structure of the messages table
2408
+ * - Can cause DATA LOSS if interrupted or if errors occur
2409
+ *
2410
+ * **IMPORTANT SAFETY REQUIREMENTS:**
2411
+ * - 🛑 STOP all application instances before running this migration
2412
+ * - 🛑 Ensure NO concurrent database operations are running
2413
+ * - 🛑 Take a full database backup before running (independent of built-in backup)
2414
+ * - 🛑 Test the migration on a copy of production data first
2415
+ * - 🛑 Plan for downtime during migration execution
2416
+ *
2417
+ * **What this migration does:**
2418
+ * 1. Creates backup tables (if createBackup=true)
2419
+ * 2. Creates temporary tables with new schema
2420
+ * 3. Migrates data from old tables to new schema
2421
+ * 4. DROPS original tables
2422
+ * 5. Renames temporary tables to original names
2423
+ * 6. All operations are wrapped in a transaction for atomicity
2424
+ *
2425
+ * @param options Migration configuration options
2426
+ * @param options.createBackup Whether to create backup tables before migration (default: true, HIGHLY RECOMMENDED)
2427
+ * @param options.restoreFromBackup Whether to restore from existing backup instead of migrating (default: false)
2428
+ * @param options.deleteBackupAfterSuccess Whether to delete backup tables after successful migration (default: false)
2429
+ *
2430
+ * @returns Promise resolving to migration result with success status, migrated count, and backup info
2431
+ *
2432
+ * @example
2433
+ * ```typescript
2434
+ * // RECOMMENDED: Run with backup creation (default)
2435
+ * const result = await storage.migrateConversationSchema({
2436
+ * createBackup: true,
2437
+ * deleteBackupAfterSuccess: false // Keep backup for safety
2438
+ * });
2439
+ *
2440
+ * if (result.success) {
2441
+ * console.log(`Migrated ${result.migratedCount} conversations successfully`);
2442
+ * } else {
2443
+ * console.error('Migration failed:', result.error);
2444
+ * // Consider restoring from backup
2445
+ * }
2446
+ *
2447
+ * // If migration fails, restore from backup:
2448
+ * const restoreResult = await storage.migrateConversationSchema({
2449
+ * restoreFromBackup: true
2450
+ * });
2451
+ * ```
2452
+ *
2453
+ * @throws {Error} If migration fails and transaction is rolled back
2454
+ *
2455
+ * @since This migration is typically only needed when upgrading from older schema versions
2456
+ */
2457
+ async migrateConversationSchema(options = {}) {
2458
+ const {
2459
+ createBackup = true,
2460
+ restoreFromBackup = false,
2461
+ deleteBackupAfterSuccess = false
2462
+ } = options;
2463
+ const conversationsTableName = `${this.options.tablePrefix}_conversations`;
2464
+ const messagesTableName = `${this.options.tablePrefix}_messages`;
2465
+ const conversationsBackupName = `${conversationsTableName}_backup`;
2466
+ const messagesBackupName = `${messagesTableName}_backup`;
2467
+ try {
2468
+ this.debug("Starting conversation schema migration...");
2469
+ const flagCheck = await this.checkMigrationFlag("conversation_schema_migration");
2470
+ if (flagCheck.alreadyCompleted) {
2471
+ return { success: true, migratedCount: 0 };
2472
+ }
2473
+ if (restoreFromBackup) {
2474
+ this.debug("Starting restoration from backup...");
2475
+ const convBackupCheck = await this.client.execute({
2476
+ sql: "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
2477
+ args: [conversationsBackupName]
2478
+ });
2479
+ const msgBackupCheck = await this.client.execute({
2480
+ sql: "SELECT name FROM sqlite_master WHERE type='table' AND name=?",
2481
+ args: [messagesBackupName]
2482
+ });
2483
+ if (convBackupCheck.rows.length === 0 || msgBackupCheck.rows.length === 0) {
2484
+ throw new Error("No backup found to restore");
2485
+ }
2486
+ await this.client.execute("BEGIN TRANSACTION;");
2487
+ await this.client.execute(`DROP TABLE IF EXISTS ${conversationsTableName};`);
2488
+ await this.client.execute(`DROP TABLE IF EXISTS ${messagesTableName};`);
2489
+ await this.client.execute(
2490
+ `ALTER TABLE ${conversationsBackupName} RENAME TO ${conversationsTableName};`
2491
+ );
2492
+ await this.client.execute(
2493
+ `ALTER TABLE ${messagesBackupName} RENAME TO ${messagesTableName};`
2494
+ );
2495
+ await this.client.execute("COMMIT;");
2496
+ this.debug("Restoration from backup completed successfully");
2497
+ return { success: true, backupCreated: false };
2498
+ }
2499
+ const convTableInfo = await this.client.execute(
2500
+ `PRAGMA table_info(${conversationsTableName})`
2501
+ );
2502
+ const msgTableInfo = await this.client.execute(`PRAGMA table_info(${messagesTableName})`);
2503
+ const hasUserIdInConversations = convTableInfo.rows.some((row) => row.name === "user_id");
2504
+ const hasUserIdInMessages = msgTableInfo.rows.some((row) => row.name === "user_id");
2505
+ if (hasUserIdInConversations && !hasUserIdInMessages) {
2506
+ this.debug("Tables are already in new format, migration not needed");
2507
+ return { success: true, migratedCount: 0 };
2508
+ }
2509
+ if (convTableInfo.rows.length === 0 && msgTableInfo.rows.length === 0) {
2510
+ this.debug("Tables don't exist, migration not needed");
2511
+ return { success: true, migratedCount: 0 };
2512
+ }
2513
+ if (createBackup) {
2514
+ this.debug("Creating backups...");
2515
+ await this.client.execute(`DROP TABLE IF EXISTS ${conversationsBackupName};`);
2516
+ await this.client.execute(`DROP TABLE IF EXISTS ${messagesBackupName};`);
2517
+ if (convTableInfo.rows.length > 0) {
2518
+ await this.client.execute(
2519
+ `CREATE TABLE ${conversationsBackupName} AS SELECT * FROM ${conversationsTableName};`
2520
+ );
2521
+ }
2522
+ if (msgTableInfo.rows.length > 0) {
2523
+ await this.client.execute(
2524
+ `CREATE TABLE ${messagesBackupName} AS SELECT * FROM ${messagesTableName};`
2525
+ );
2526
+ }
2527
+ this.debug("Backups created successfully");
2528
+ }
2529
+ let conversationData = [];
2530
+ let messageData = [];
2531
+ if (convTableInfo.rows.length > 0) {
2532
+ const convResult = await this.client.execute(`SELECT * FROM ${conversationsTableName}`);
2533
+ conversationData = convResult.rows;
2534
+ }
2535
+ if (msgTableInfo.rows.length > 0) {
2536
+ const msgResult = await this.client.execute(`SELECT * FROM ${messagesTableName}`);
2537
+ messageData = msgResult.rows;
2538
+ }
2539
+ await this.client.execute("BEGIN TRANSACTION;");
2540
+ const tempConversationsTable = `${conversationsTableName}_temp`;
2541
+ const tempMessagesTable = `${messagesTableName}_temp`;
2542
+ await this.client.execute(`
2543
+ CREATE TABLE ${tempConversationsTable} (
2544
+ id TEXT PRIMARY KEY,
2545
+ resource_id TEXT NOT NULL,
2546
+ user_id TEXT NOT NULL,
2547
+ title TEXT NOT NULL,
2548
+ metadata TEXT NOT NULL,
2549
+ created_at TEXT NOT NULL,
2550
+ updated_at TEXT NOT NULL
2551
+ )
2552
+ `);
2553
+ await this.client.execute(`
2554
+ CREATE TABLE ${tempMessagesTable} (
2555
+ conversation_id TEXT NOT NULL,
2556
+ message_id TEXT NOT NULL,
2557
+ role TEXT NOT NULL,
2558
+ content TEXT NOT NULL,
2559
+ type TEXT NOT NULL,
2560
+ created_at TEXT NOT NULL,
2561
+ PRIMARY KEY (conversation_id, message_id)
2562
+ )
2563
+ `);
2564
+ let migratedCount = 0;
2565
+ const createdConversations = /* @__PURE__ */ new Set();
2566
+ for (const row of messageData) {
2567
+ const conversationId = row.conversation_id;
2568
+ let userId = "default";
2569
+ if (hasUserIdInMessages && row.user_id) {
2570
+ userId = row.user_id;
2571
+ }
2572
+ if (!createdConversations.has(conversationId)) {
2573
+ const existingConversation = conversationData.find((conv) => conv.id === conversationId);
2574
+ if (existingConversation) {
2575
+ let convUserId = userId;
2576
+ if (hasUserIdInConversations && existingConversation.user_id) {
2577
+ convUserId = existingConversation.user_id;
2578
+ }
2579
+ await this.client.execute({
2580
+ sql: `INSERT INTO ${tempConversationsTable}
2581
+ (id, resource_id, user_id, title, metadata, created_at, updated_at)
2582
+ VALUES (?, ?, ?, ?, ?, ?, ?)`,
2583
+ args: [
2584
+ existingConversation.id,
2585
+ existingConversation.resource_id,
2586
+ convUserId,
2587
+ existingConversation.title,
2588
+ existingConversation.metadata,
2589
+ existingConversation.created_at,
2590
+ existingConversation.updated_at
2591
+ ]
2592
+ });
2593
+ } else {
2594
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2595
+ await this.client.execute({
2596
+ sql: `INSERT INTO ${tempConversationsTable}
2597
+ (id, resource_id, user_id, title, metadata, created_at, updated_at)
2598
+ VALUES (?, ?, ?, ?, ?, ?, ?)`,
2599
+ args: [
2600
+ conversationId,
2601
+ "default",
2602
+ // Default resource_id for auto-created conversations
2603
+ userId,
2604
+ "Migrated Conversation",
2605
+ // Default title
2606
+ (0, import_utils2.safeStringify)({}),
2607
+ // Empty metadata
2608
+ now,
2609
+ now
2610
+ ]
2611
+ });
2612
+ }
2613
+ createdConversations.add(conversationId);
2614
+ migratedCount++;
2615
+ }
2616
+ await this.client.execute({
2617
+ sql: `INSERT INTO ${tempMessagesTable}
2618
+ (conversation_id, message_id, role, content, type, created_at)
2619
+ VALUES (?, ?, ?, ?, ?, ?)`,
2620
+ args: [
2621
+ row.conversation_id,
2622
+ row.message_id,
2623
+ row.role,
2624
+ row.content,
2625
+ row.type,
2626
+ row.created_at
2627
+ ]
2628
+ });
2629
+ }
2630
+ for (const row of conversationData) {
2631
+ const conversationId = row.id;
2632
+ if (!createdConversations.has(conversationId)) {
2633
+ let userId = "default";
2634
+ if (hasUserIdInConversations && row.user_id) {
2635
+ userId = row.user_id;
2636
+ }
2637
+ await this.client.execute({
2638
+ sql: `INSERT INTO ${tempConversationsTable}
2639
+ (id, resource_id, user_id, title, metadata, created_at, updated_at)
2640
+ VALUES (?, ?, ?, ?, ?, ?, ?)`,
2641
+ args: [
2642
+ row.id,
2643
+ row.resource_id,
2644
+ userId,
2645
+ row.title,
2646
+ row.metadata,
2647
+ row.created_at,
2648
+ row.updated_at
2649
+ ]
2650
+ });
2651
+ migratedCount++;
2652
+ }
2653
+ }
2654
+ await this.client.execute(`DROP TABLE IF EXISTS ${conversationsTableName};`);
2655
+ await this.client.execute(`DROP TABLE IF EXISTS ${messagesTableName};`);
2656
+ await this.client.execute(
2657
+ `ALTER TABLE ${tempConversationsTable} RENAME TO ${conversationsTableName};`
2658
+ );
2659
+ await this.client.execute(`ALTER TABLE ${tempMessagesTable} RENAME TO ${messagesTableName};`);
2660
+ await this.client.execute(`
2661
+ CREATE INDEX IF NOT EXISTS idx_${messagesTableName}_lookup
2662
+ ON ${messagesTableName}(conversation_id, created_at)
2663
+ `);
2664
+ await this.client.execute(`
2665
+ CREATE INDEX IF NOT EXISTS idx_${conversationsTableName}_resource
2666
+ ON ${conversationsTableName}(resource_id)
2667
+ `);
2668
+ await this.client.execute(`
2669
+ CREATE INDEX IF NOT EXISTS idx_${conversationsTableName}_user
2670
+ ON ${conversationsTableName}(user_id)
2671
+ `);
2672
+ await this.client.execute("COMMIT;");
2673
+ if (deleteBackupAfterSuccess) {
2674
+ await this.client.execute(`DROP TABLE IF EXISTS ${conversationsBackupName};`);
2675
+ await this.client.execute(`DROP TABLE IF EXISTS ${messagesBackupName};`);
2676
+ }
2677
+ await this.setMigrationFlag("conversation_schema_migration", migratedCount);
2678
+ this.debug(
2679
+ `Conversation schema migration completed successfully. Migrated ${migratedCount} conversations.`
2680
+ );
2681
+ return {
2682
+ success: true,
2683
+ migratedCount,
2684
+ backupCreated: createBackup
2685
+ };
2686
+ } catch (error) {
2687
+ this.debug("Error during conversation schema migration:", error);
2688
+ try {
2689
+ await this.client.execute("ROLLBACK;");
2690
+ } catch (rollbackError) {
2691
+ this.debug("Error rolling back transaction:", rollbackError);
2692
+ }
2693
+ return {
2694
+ success: false,
2695
+ error,
2696
+ backupCreated: createBackup
2697
+ };
2698
+ }
2699
+ }
2700
+ /**
2701
+ * Get conversations for a user with a fluent query builder interface
2702
+ * @param userId User ID to filter by
2703
+ * @returns Query builder object
2704
+ */
2705
+ getUserConversations(userId) {
2706
+ return {
2707
+ /**
2708
+ * Limit the number of results
2709
+ * @param count Number of conversations to return
2710
+ * @returns Query builder
2711
+ */
2712
+ limit: /* @__PURE__ */ __name((count) => ({
2713
+ /**
2714
+ * Order results by a specific field
2715
+ * @param field Field to order by
2716
+ * @param direction Sort direction
2717
+ * @returns Query builder
2718
+ */
2719
+ orderBy: /* @__PURE__ */ __name((field = "updated_at", direction = "DESC") => ({
2720
+ /**
2721
+ * Execute the query and return results
2722
+ * @returns Promise of conversations
2723
+ */
2724
+ execute: /* @__PURE__ */ __name(() => this.getConversationsByUserId(userId, {
2725
+ limit: count,
2726
+ orderBy: field,
2727
+ orderDirection: direction
2728
+ }), "execute")
2729
+ }), "orderBy"),
2730
+ /**
2731
+ * Execute the query with default ordering
2732
+ * @returns Promise of conversations
2733
+ */
2734
+ execute: /* @__PURE__ */ __name(() => this.getConversationsByUserId(userId, { limit: count }), "execute")
2735
+ }), "limit"),
2736
+ /**
2737
+ * Order results by a specific field
2738
+ * @param field Field to order by
2739
+ * @param direction Sort direction
2740
+ * @returns Query builder
2741
+ */
2742
+ orderBy: /* @__PURE__ */ __name((field = "updated_at", direction = "DESC") => ({
2743
+ /**
2744
+ * Limit the number of results
2745
+ * @param count Number of conversations to return
2746
+ * @returns Query builder
2747
+ */
2748
+ limit: /* @__PURE__ */ __name((count) => ({
2749
+ /**
2750
+ * Execute the query and return results
2751
+ * @returns Promise of conversations
2752
+ */
2753
+ execute: /* @__PURE__ */ __name(() => this.getConversationsByUserId(userId, {
2754
+ limit: count,
2755
+ orderBy: field,
2756
+ orderDirection: direction
2757
+ }), "execute")
2758
+ }), "limit"),
2759
+ /**
2760
+ * Execute the query without limit
2761
+ * @returns Promise of conversations
2762
+ */
2763
+ execute: /* @__PURE__ */ __name(() => this.getConversationsByUserId(userId, {
2764
+ orderBy: field,
2765
+ orderDirection: direction
2766
+ }), "execute")
2767
+ }), "orderBy"),
2768
+ /**
2769
+ * Execute the query with default options
2770
+ * @returns Promise of conversations
2771
+ */
2772
+ execute: /* @__PURE__ */ __name(() => this.getConversationsByUserId(userId), "execute")
2773
+ };
2774
+ }
2775
+ /**
2776
+ * Get conversation by ID and ensure it belongs to the specified user
2777
+ * @param conversationId Conversation ID
2778
+ * @param userId User ID to validate ownership
2779
+ * @returns Conversation or null
2780
+ */
2781
+ async getUserConversation(conversationId, userId) {
2782
+ const conversation = await this.getConversation(conversationId);
2783
+ if (!conversation || conversation.userId !== userId) {
2784
+ return null;
2785
+ }
2786
+ return conversation;
2787
+ }
2788
+ /**
2789
+ * Get paginated conversations for a user
2790
+ * @param userId User ID
2791
+ * @param page Page number (1-based)
2792
+ * @param pageSize Number of items per page
2793
+ * @returns Object with conversations and pagination info
2794
+ */
2795
+ async getPaginatedUserConversations(userId, page = 1, pageSize = 10) {
2796
+ const offset = (page - 1) * pageSize;
2797
+ const conversations = await this.getConversationsByUserId(userId, {
2798
+ limit: pageSize + 1,
2799
+ offset,
2800
+ orderBy: "updated_at",
2801
+ orderDirection: "DESC"
2802
+ });
2803
+ const hasMore = conversations.length > pageSize;
2804
+ const results = hasMore ? conversations.slice(0, pageSize) : conversations;
2805
+ return {
2806
+ conversations: results,
2807
+ page,
2808
+ pageSize,
2809
+ hasMore
2810
+ };
2811
+ }
2812
+ /**
2813
+ * Check and create migration flag table, return if migration already completed
2814
+ * @param migrationType Type of migration to check
2815
+ * @returns Object with completion status and details
2816
+ */
2817
+ async checkMigrationFlag(migrationType) {
2818
+ const conversationsTableName = `${this.options.tablePrefix}_conversations`;
2819
+ const migrationFlagTable = `${conversationsTableName}_migration_flags`;
2820
+ try {
2821
+ const result = await this.client.execute({
2822
+ sql: `SELECT * FROM ${migrationFlagTable} WHERE migration_type = ?`,
2823
+ args: [migrationType]
2824
+ });
2825
+ if (result.rows.length > 0) {
2826
+ const migrationFlag = result.rows[0];
2827
+ this.debug(`${migrationType} migration already completed`);
2828
+ this.debug(`Migration completed on: ${migrationFlag.completed_at}`);
2829
+ this.debug(`Migrated ${migrationFlag.migrated_count || 0} records previously`);
2830
+ return {
2831
+ alreadyCompleted: true,
2832
+ migrationCount: migrationFlag.migrated_count,
2833
+ completedAt: migrationFlag.completed_at
2834
+ };
2835
+ }
2836
+ this.debug("Migration flags table found, but no migration flag exists yet");
2837
+ return { alreadyCompleted: false };
2838
+ } catch (flagError) {
2839
+ this.debug("Migration flag table not found, creating it...");
2840
+ this.debug("Original error:", flagError);
2841
+ try {
2842
+ await this.client.execute(`
2843
+ CREATE TABLE IF NOT EXISTS ${migrationFlagTable} (
2844
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
2845
+ migration_type TEXT NOT NULL UNIQUE,
2846
+ completed_at TEXT NOT NULL DEFAULT (datetime('now')),
2847
+ migrated_count INTEGER DEFAULT 0,
2848
+ metadata TEXT DEFAULT '{}'
2849
+ )
2850
+ `);
2851
+ this.debug("Migration flags table created successfully");
2852
+ } catch (createError) {
2853
+ this.debug("Failed to create migration flags table:", createError);
2854
+ }
2855
+ return { alreadyCompleted: false };
2856
+ }
2857
+ }
2858
+ /**
2859
+ * Set migration flag after successful completion
2860
+ * @param migrationType Type of migration completed
2861
+ * @param migratedCount Number of records migrated
2862
+ */
2863
+ async setMigrationFlag(migrationType, migratedCount) {
2864
+ try {
2865
+ const conversationsTableName = `${this.options.tablePrefix}_conversations`;
2866
+ const migrationFlagTable = `${conversationsTableName}_migration_flags`;
2867
+ await this.client.execute({
2868
+ sql: `INSERT OR REPLACE INTO ${migrationFlagTable}
2869
+ (migration_type, completed_at, migrated_count)
2870
+ VALUES (?, datetime('now'), ?)`,
2871
+ args: [migrationType, migratedCount]
2872
+ });
2873
+ this.debug("Migration flag set successfully");
2874
+ } catch (flagSetError) {
2875
+ this.debug("Could not set migration flag (non-critical):", flagSetError);
2876
+ }
2877
+ }
2878
+ /**
2879
+ * Migrate agent history schema to add userId and conversationId columns
2880
+ */
2881
+ async migrateAgentHistorySchema() {
2882
+ const historyTableName = `${this.options.tablePrefix}_agent_history`;
2883
+ try {
2884
+ this.debug("Starting agent history schema migration...");
2885
+ const flagCheck = await this.checkMigrationFlag("agent_history_schema_migration");
2886
+ if (flagCheck.alreadyCompleted) {
2887
+ return { success: true };
2888
+ }
2889
+ const tableInfo = await this.client.execute(`PRAGMA table_info(${historyTableName})`);
2890
+ if (tableInfo.rows.length === 0) {
2891
+ this.debug("Agent history table doesn't exist, migration not needed");
2892
+ return { success: true };
2893
+ }
2894
+ const hasUserIdColumn = tableInfo.rows.some((row) => row.name === "userId");
2895
+ const hasConversationIdColumn = tableInfo.rows.some((row) => row.name === "conversationId");
2896
+ if (hasUserIdColumn && hasConversationIdColumn) {
2897
+ this.debug("Both userId and conversationId columns already exist, skipping migration");
2898
+ await this.setMigrationFlag("agent_history_schema_migration", 0);
2899
+ return { success: true };
2900
+ }
2901
+ if (!hasUserIdColumn) {
2902
+ await this.client.execute(`ALTER TABLE ${historyTableName} ADD COLUMN userId TEXT`);
2903
+ this.debug("Added userId column to agent history table");
2904
+ }
2905
+ if (!hasConversationIdColumn) {
2906
+ await this.client.execute(`ALTER TABLE ${historyTableName} ADD COLUMN conversationId TEXT`);
2907
+ this.debug("Added conversationId column to agent history table");
2908
+ }
2909
+ if (!hasUserIdColumn) {
2910
+ await this.client.execute(`
2911
+ CREATE INDEX IF NOT EXISTS idx_${historyTableName}_userId
2912
+ ON ${historyTableName}(userId)
2913
+ `);
2914
+ }
2915
+ if (!hasConversationIdColumn) {
2916
+ await this.client.execute(`
2917
+ CREATE INDEX IF NOT EXISTS idx_${historyTableName}_conversationId
2918
+ ON ${historyTableName}(conversationId)
2919
+ `);
2920
+ }
2921
+ await this.setMigrationFlag("agent_history_schema_migration", 0);
2922
+ this.debug("Agent history schema migration completed successfully");
2923
+ return { success: true };
2924
+ } catch (error) {
2925
+ this.debug("Error during agent history schema migration:", error);
2926
+ return {
2927
+ success: false,
2928
+ error
2929
+ };
2930
+ }
2931
+ }
2932
+ // ===== WorkflowMemory Interface Implementation =====
2933
+ // Delegate all workflow operations to the workflow extension
2934
+ async storeWorkflowHistory(entry) {
2935
+ await this.initialized;
2936
+ return this.workflowExtension.storeWorkflowHistory(entry);
2937
+ }
2938
+ async getWorkflowHistory(id) {
2939
+ await this.initialized;
2940
+ return this.workflowExtension.getWorkflowHistory(id);
2941
+ }
2942
+ async getWorkflowHistoryByWorkflowId(workflowId) {
2943
+ await this.initialized;
2944
+ return this.workflowExtension.getWorkflowHistoryByWorkflowId(workflowId);
2945
+ }
2946
+ async updateWorkflowHistory(id, updates) {
2947
+ await this.initialized;
2948
+ return this.workflowExtension.updateWorkflowHistory(id, updates);
2949
+ }
2950
+ async deleteWorkflowHistory(id) {
2951
+ await this.initialized;
2952
+ return this.workflowExtension.deleteWorkflowHistory(id);
2953
+ }
2954
+ async storeWorkflowStep(step) {
2955
+ await this.initialized;
2956
+ return this.workflowExtension.storeWorkflowStep(step);
2957
+ }
2958
+ async getWorkflowStep(id) {
2959
+ await this.initialized;
2960
+ return this.workflowExtension.getWorkflowStep(id);
2961
+ }
2962
+ async getWorkflowSteps(workflowHistoryId) {
2963
+ await this.initialized;
2964
+ return this.workflowExtension.getWorkflowSteps(workflowHistoryId);
2965
+ }
2966
+ async updateWorkflowStep(id, updates) {
2967
+ await this.initialized;
2968
+ return this.workflowExtension.updateWorkflowStep(id, updates);
2969
+ }
2970
+ async deleteWorkflowStep(id) {
2971
+ await this.initialized;
2972
+ return this.workflowExtension.deleteWorkflowStep(id);
2973
+ }
2974
+ async storeWorkflowTimelineEvent(event) {
2975
+ await this.initialized;
2976
+ return this.workflowExtension.storeWorkflowTimelineEvent(event);
2977
+ }
2978
+ async getWorkflowTimelineEvent(id) {
2979
+ await this.initialized;
2980
+ return this.workflowExtension.getWorkflowTimelineEvent(id);
2981
+ }
2982
+ async getWorkflowTimelineEvents(workflowHistoryId) {
2983
+ await this.initialized;
2984
+ return this.workflowExtension.getWorkflowTimelineEvents(workflowHistoryId);
2985
+ }
2986
+ async deleteWorkflowTimelineEvent(id) {
2987
+ await this.initialized;
2988
+ return this.workflowExtension.deleteWorkflowTimelineEvent(id);
2989
+ }
2990
+ async getAllWorkflowIds() {
2991
+ await this.initialized;
2992
+ return this.workflowExtension.getAllWorkflowIds();
2993
+ }
2994
+ async getWorkflowStats(workflowId) {
2995
+ await this.initialized;
2996
+ return this.workflowExtension.getWorkflowStats(workflowId);
2997
+ }
2998
+ async getWorkflowHistoryWithStepsAndEvents(id) {
2999
+ await this.initialized;
3000
+ return this.workflowExtension.getWorkflowHistoryWithStepsAndEvents(id);
3001
+ }
3002
+ async deleteWorkflowHistoryWithRelated(id) {
3003
+ await this.initialized;
3004
+ return this.workflowExtension.deleteWorkflowHistoryWithRelated(id);
3005
+ }
3006
+ async cleanupOldWorkflowHistories(workflowId, maxEntries) {
3007
+ await this.initialized;
3008
+ return this.workflowExtension.cleanupOldWorkflowHistories(workflowId, maxEntries);
3009
+ }
3010
+ /**
3011
+ * Get the workflow extension for advanced workflow operations
3012
+ */
3013
+ getWorkflowExtension() {
3014
+ return this.workflowExtension;
3015
+ }
3016
+ };
3017
+ // Annotate the CommonJS export names for ESM import in node:
3018
+ 0 && (module.exports = {
3019
+ LibSQLStorage
3020
+ });
3021
+ //# sourceMappingURL=index.js.map