@voltagent/libsql 1.0.14 → 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/edge.mjs ADDED
@@ -0,0 +1,2167 @@
1
+ var __defProp = Object.defineProperty;
2
+ var __name = (target, value) => __defProp(target, "name", { value, configurable: true });
3
+
4
+ // src/memory-v2-adapter-edge.ts
5
+ import { createClient } from "@libsql/client/web";
6
+ import { AgentRegistry } from "@voltagent/core";
7
+ import { createPinoLogger } from "@voltagent/logger";
8
+
9
+ // src/memory-core.ts
10
+ import { ConversationAlreadyExistsError, ConversationNotFoundError } from "@voltagent/core";
11
+ import { safeStringify } from "@voltagent/internal";
12
+ var LibSQLMemoryCore = class {
13
+ static {
14
+ __name(this, "LibSQLMemoryCore");
15
+ }
16
+ client;
17
+ tablePrefix;
18
+ initialized = false;
19
+ logger;
20
+ maxRetries;
21
+ retryDelayMs;
22
+ url;
23
+ constructor(client, url, options, logger) {
24
+ this.client = client;
25
+ this.url = url;
26
+ this.tablePrefix = options.tablePrefix ?? "voltagent_memory";
27
+ this.maxRetries = options.maxRetries ?? 3;
28
+ this.retryDelayMs = options.retryDelayMs ?? 100;
29
+ this.logger = logger;
30
+ this.logger.debug("LibSQL Memory adapter core initialized", { url: this.url });
31
+ }
32
+ /**
33
+ * Execute a database operation with retry logic
34
+ */
35
+ async executeWithRetry(operation, operationName) {
36
+ let lastError;
37
+ for (let attempt = 0; attempt < this.maxRetries; attempt++) {
38
+ try {
39
+ return await operation();
40
+ } catch (error) {
41
+ lastError = error;
42
+ if (error?.code === "SQLITE_BUSY" || error?.message?.includes("SQLITE_BUSY") || error?.message?.includes("database is locked")) {
43
+ const delay = this.retryDelayMs * 2 ** attempt;
44
+ this.logger.debug(
45
+ `Database busy, retrying ${operationName} (attempt ${attempt + 1}/${this.maxRetries}) after ${delay}ms`
46
+ );
47
+ await new Promise((resolve) => setTimeout(resolve, delay));
48
+ } else {
49
+ throw error;
50
+ }
51
+ }
52
+ }
53
+ this.logger.error(
54
+ `Failed to execute ${operationName} after ${this.maxRetries} attempts`,
55
+ lastError
56
+ );
57
+ throw lastError;
58
+ }
59
+ /**
60
+ * Initialize database schema
61
+ */
62
+ async initialize() {
63
+ if (this.initialized) return;
64
+ const conversationsTable = `${this.tablePrefix}_conversations`;
65
+ const messagesTable = `${this.tablePrefix}_messages`;
66
+ const usersTable = `${this.tablePrefix}_users`;
67
+ const workflowStatesTable = `${this.tablePrefix}_workflow_states`;
68
+ const stepsTable = `${this.tablePrefix}_steps`;
69
+ const isMemoryDb = this.url === ":memory:" || this.url.includes("mode=memory");
70
+ if (!isMemoryDb && (this.url.startsWith("file:") || this.url.startsWith("libsql:"))) {
71
+ try {
72
+ await this.client.execute("PRAGMA journal_mode=WAL");
73
+ this.logger.debug("Set PRAGMA journal_mode=WAL");
74
+ } catch (err) {
75
+ this.logger.debug("Failed to set PRAGMA journal_mode=WAL (non-critical)", { err });
76
+ }
77
+ }
78
+ try {
79
+ await this.client.execute("PRAGMA busy_timeout=5000");
80
+ this.logger.debug("Set PRAGMA busy_timeout=5000");
81
+ } catch (err) {
82
+ this.logger.debug("Failed to set PRAGMA busy_timeout (non-critical)", { err });
83
+ }
84
+ try {
85
+ await this.client.execute("PRAGMA foreign_keys=ON");
86
+ this.logger.debug("Set PRAGMA foreign_keys=ON");
87
+ } catch (err) {
88
+ this.logger.debug("Failed to set PRAGMA foreign_keys (non-critical)", { err });
89
+ }
90
+ this.logger.debug("Applied PRAGMA settings for better concurrency");
91
+ await this.executeWithRetry(async () => {
92
+ await this.client.batch([
93
+ `CREATE TABLE IF NOT EXISTS ${usersTable} (
94
+ id TEXT PRIMARY KEY,
95
+ metadata TEXT,
96
+ created_at TEXT DEFAULT CURRENT_TIMESTAMP,
97
+ updated_at TEXT DEFAULT CURRENT_TIMESTAMP
98
+ )`,
99
+ `CREATE TABLE IF NOT EXISTS ${conversationsTable} (
100
+ id TEXT PRIMARY KEY,
101
+ resource_id TEXT NOT NULL,
102
+ user_id TEXT NOT NULL,
103
+ title TEXT NOT NULL,
104
+ metadata TEXT NOT NULL,
105
+ created_at TEXT NOT NULL,
106
+ updated_at TEXT NOT NULL
107
+ )`,
108
+ `CREATE TABLE IF NOT EXISTS ${messagesTable} (
109
+ conversation_id TEXT NOT NULL,
110
+ message_id TEXT NOT NULL,
111
+ user_id TEXT NOT NULL,
112
+ role TEXT NOT NULL,
113
+ parts TEXT NOT NULL,
114
+ metadata TEXT,
115
+ format_version INTEGER DEFAULT 2,
116
+ created_at TEXT NOT NULL,
117
+ PRIMARY KEY (conversation_id, message_id)
118
+ )`,
119
+ `CREATE TABLE IF NOT EXISTS ${workflowStatesTable} (
120
+ id TEXT PRIMARY KEY,
121
+ workflow_id TEXT NOT NULL,
122
+ workflow_name TEXT NOT NULL,
123
+ status TEXT NOT NULL,
124
+ suspension TEXT,
125
+ events TEXT,
126
+ output TEXT,
127
+ cancellation TEXT,
128
+ user_id TEXT,
129
+ conversation_id TEXT,
130
+ metadata TEXT,
131
+ created_at TEXT NOT NULL,
132
+ updated_at TEXT NOT NULL
133
+ )`,
134
+ `CREATE TABLE IF NOT EXISTS ${stepsTable} (
135
+ id TEXT PRIMARY KEY,
136
+ conversation_id TEXT NOT NULL,
137
+ user_id TEXT NOT NULL,
138
+ agent_id TEXT NOT NULL,
139
+ agent_name TEXT,
140
+ operation_id TEXT,
141
+ step_index INTEGER NOT NULL,
142
+ type TEXT NOT NULL,
143
+ role TEXT NOT NULL,
144
+ content TEXT,
145
+ arguments TEXT,
146
+ result TEXT,
147
+ usage TEXT,
148
+ sub_agent_id TEXT,
149
+ sub_agent_name TEXT,
150
+ created_at TEXT NOT NULL,
151
+ FOREIGN KEY (conversation_id) REFERENCES ${conversationsTable}(id) ON DELETE CASCADE
152
+ )`,
153
+ `CREATE INDEX IF NOT EXISTS idx_${conversationsTable}_user_id ON ${conversationsTable}(user_id)`,
154
+ `CREATE INDEX IF NOT EXISTS idx_${conversationsTable}_resource_id ON ${conversationsTable}(resource_id)`,
155
+ `CREATE INDEX IF NOT EXISTS idx_${messagesTable}_conversation_id ON ${messagesTable}(conversation_id)`,
156
+ `CREATE INDEX IF NOT EXISTS idx_${messagesTable}_created_at ON ${messagesTable}(created_at)`,
157
+ `CREATE INDEX IF NOT EXISTS idx_${workflowStatesTable}_workflow_id ON ${workflowStatesTable}(workflow_id)`,
158
+ `CREATE INDEX IF NOT EXISTS idx_${workflowStatesTable}_status ON ${workflowStatesTable}(status)`,
159
+ `CREATE INDEX IF NOT EXISTS idx_${stepsTable}_conversation ON ${stepsTable}(conversation_id, step_index)`,
160
+ `CREATE INDEX IF NOT EXISTS idx_${stepsTable}_operation ON ${stepsTable}(conversation_id, operation_id)`
161
+ ]);
162
+ }, "initialize database schema");
163
+ await this.addV2ColumnsToMessagesTable();
164
+ await this.migrateDefaultUserIds();
165
+ await this.addWorkflowStateColumns();
166
+ this.initialized = true;
167
+ this.logger.debug("Database schema initialized");
168
+ }
169
+ async addV2ColumnsToMessagesTable() {
170
+ const messagesTableName = `${this.tablePrefix}_messages`;
171
+ try {
172
+ const tableInfo = await this.client.execute(`PRAGMA table_info(${messagesTableName})`);
173
+ const columns = tableInfo.rows.map((row) => row.name);
174
+ if (!columns.includes("parts")) {
175
+ try {
176
+ await this.client.execute(`ALTER TABLE ${messagesTableName} ADD COLUMN parts TEXT`);
177
+ } catch (_e) {
178
+ }
179
+ }
180
+ if (!columns.includes("metadata")) {
181
+ try {
182
+ await this.client.execute(`ALTER TABLE ${messagesTableName} ADD COLUMN metadata TEXT`);
183
+ } catch (_e) {
184
+ }
185
+ }
186
+ if (!columns.includes("format_version")) {
187
+ try {
188
+ await this.client.execute(
189
+ `ALTER TABLE ${messagesTableName} ADD COLUMN format_version INTEGER DEFAULT 2`
190
+ );
191
+ } catch (_e) {
192
+ }
193
+ }
194
+ if (!columns.includes("user_id")) {
195
+ try {
196
+ await this.client.execute(
197
+ `ALTER TABLE ${messagesTableName} ADD COLUMN user_id TEXT NOT NULL DEFAULT 'default'`
198
+ );
199
+ } catch (_e) {
200
+ }
201
+ }
202
+ const contentInfo = tableInfo.rows.find((row) => row.name === "content");
203
+ if (contentInfo && contentInfo.notnull === 1) {
204
+ try {
205
+ await this.client.execute(
206
+ `ALTER TABLE ${messagesTableName} ADD COLUMN content_temp TEXT`
207
+ );
208
+ await this.client.execute(
209
+ `UPDATE ${messagesTableName} SET content_temp = content WHERE content IS NOT NULL`
210
+ );
211
+ try {
212
+ await this.client.execute(`ALTER TABLE ${messagesTableName} DROP COLUMN content`);
213
+ await this.client.execute(
214
+ `ALTER TABLE ${messagesTableName} RENAME COLUMN content_temp TO content`
215
+ );
216
+ } catch (_) {
217
+ }
218
+ } catch (_) {
219
+ }
220
+ }
221
+ const typeInfo = tableInfo.rows.find((row) => row.name === "type");
222
+ if (typeInfo && typeInfo.notnull === 1) {
223
+ try {
224
+ await this.client.execute(`ALTER TABLE ${messagesTableName} ADD COLUMN type_temp TEXT`);
225
+ await this.client.execute(
226
+ `UPDATE ${messagesTableName} SET type_temp = type WHERE type IS NOT NULL`
227
+ );
228
+ try {
229
+ await this.client.execute(`ALTER TABLE ${messagesTableName} DROP COLUMN type`);
230
+ await this.client.execute(
231
+ `ALTER TABLE ${messagesTableName} RENAME COLUMN type_temp TO type`
232
+ );
233
+ } catch (_) {
234
+ }
235
+ } catch (_) {
236
+ }
237
+ }
238
+ } catch (_) {
239
+ }
240
+ }
241
+ async migrateDefaultUserIds() {
242
+ const messagesTableName = `${this.tablePrefix}_messages`;
243
+ const conversationsTableName = `${this.tablePrefix}_conversations`;
244
+ try {
245
+ const checkResult = await this.client.execute({
246
+ sql: `SELECT COUNT(*) as count FROM ${messagesTableName} WHERE user_id = 'default'`,
247
+ args: []
248
+ });
249
+ const defaultCount = checkResult.rows[0]?.count || 0;
250
+ if (defaultCount === 0) {
251
+ return;
252
+ }
253
+ this.logger.debug(`Found ${defaultCount} messages with default user_id, starting migration`);
254
+ await this.executeWithRetry(async () => {
255
+ const result = await this.client.execute({
256
+ sql: `UPDATE ${messagesTableName}
257
+ SET user_id = (
258
+ SELECT c.user_id
259
+ FROM ${conversationsTableName} c
260
+ WHERE c.id = ${messagesTableName}.conversation_id
261
+ )
262
+ WHERE user_id = 'default'
263
+ AND EXISTS (
264
+ SELECT 1
265
+ FROM ${conversationsTableName} c
266
+ WHERE c.id = ${messagesTableName}.conversation_id
267
+ )`,
268
+ args: []
269
+ });
270
+ const updatedCount = result.rowsAffected || 0;
271
+ this.logger.info(
272
+ `Successfully migrated ${updatedCount} messages from default user_id to actual user_ids`
273
+ );
274
+ const remainingResult = await this.client.execute({
275
+ sql: `SELECT COUNT(*) as count FROM ${messagesTableName} WHERE user_id = 'default'`,
276
+ args: []
277
+ });
278
+ const remainingCount = remainingResult.rows[0]?.count || 0;
279
+ if (remainingCount > 0) {
280
+ this.logger.warn(
281
+ `${remainingCount} messages still have default user_id (possibly orphaned messages without valid conversations)`
282
+ );
283
+ }
284
+ }, "migrate default user_ids");
285
+ } catch (error) {
286
+ this.logger.error("Failed to migrate default user_ids", error);
287
+ }
288
+ }
289
+ async addWorkflowStateColumns() {
290
+ const workflowStatesTable = `${this.tablePrefix}_workflow_states`;
291
+ try {
292
+ const tableInfo = await this.client.execute(`PRAGMA table_info(${workflowStatesTable})`);
293
+ const columns = tableInfo.rows.map((row) => row.name);
294
+ if (!columns.includes("events")) {
295
+ try {
296
+ await this.client.execute(`ALTER TABLE ${workflowStatesTable} ADD COLUMN events TEXT`);
297
+ this.logger.debug("Added 'events' column to workflow_states table");
298
+ } catch (_e) {
299
+ }
300
+ }
301
+ if (!columns.includes("output")) {
302
+ try {
303
+ await this.client.execute(`ALTER TABLE ${workflowStatesTable} ADD COLUMN output TEXT`);
304
+ this.logger.debug("Added 'output' column to workflow_states table");
305
+ } catch (_e) {
306
+ }
307
+ }
308
+ if (!columns.includes("cancellation")) {
309
+ try {
310
+ await this.client.execute(
311
+ `ALTER TABLE ${workflowStatesTable} ADD COLUMN cancellation TEXT`
312
+ );
313
+ this.logger.debug("Added 'cancellation' column to workflow_states table");
314
+ } catch (_e) {
315
+ }
316
+ }
317
+ } catch (error) {
318
+ this.logger.warn("Failed to add workflow state columns (non-critical)", error);
319
+ }
320
+ }
321
+ // ============================================================================
322
+ // Message Operations
323
+ // ============================================================================
324
+ async addMessage(message, userId, conversationId) {
325
+ await this.initialize();
326
+ const messagesTable = `${this.tablePrefix}_messages`;
327
+ const conversation = await this.getConversation(conversationId);
328
+ if (!conversation) {
329
+ throw new ConversationNotFoundError(conversationId);
330
+ }
331
+ await this.executeWithRetry(async () => {
332
+ await this.client.execute({
333
+ sql: `INSERT INTO ${messagesTable} (conversation_id, message_id, user_id, role, parts, metadata, format_version, created_at)
334
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
335
+ ON CONFLICT(conversation_id, message_id) DO UPDATE SET
336
+ user_id = excluded.user_id,
337
+ role = excluded.role,
338
+ parts = excluded.parts,
339
+ metadata = excluded.metadata,
340
+ format_version = excluded.format_version`,
341
+ args: [
342
+ conversationId,
343
+ message.id,
344
+ userId,
345
+ message.role,
346
+ safeStringify(message.parts),
347
+ message.metadata ? safeStringify(message.metadata) : null,
348
+ 2,
349
+ (/* @__PURE__ */ new Date()).toISOString()
350
+ ]
351
+ });
352
+ }, "add message");
353
+ }
354
+ async addMessages(messages, userId, conversationId) {
355
+ await this.initialize();
356
+ const messagesTable = `${this.tablePrefix}_messages`;
357
+ const conversation = await this.getConversation(conversationId);
358
+ if (!conversation) {
359
+ throw new ConversationNotFoundError(conversationId);
360
+ }
361
+ const now = (/* @__PURE__ */ new Date()).toISOString();
362
+ await this.executeWithRetry(async () => {
363
+ await this.client.batch(
364
+ messages.map((message) => ({
365
+ sql: `INSERT INTO ${messagesTable} (conversation_id, message_id, user_id, role, parts, metadata, format_version, created_at)
366
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?)
367
+ ON CONFLICT(conversation_id, message_id) DO UPDATE SET
368
+ user_id = excluded.user_id,
369
+ role = excluded.role,
370
+ parts = excluded.parts,
371
+ metadata = excluded.metadata,
372
+ format_version = excluded.format_version`,
373
+ args: [
374
+ conversationId,
375
+ message.id,
376
+ userId,
377
+ message.role,
378
+ safeStringify(message.parts),
379
+ message.metadata ? safeStringify(message.metadata) : null,
380
+ 2,
381
+ now
382
+ ]
383
+ }))
384
+ );
385
+ }, "add batch messages");
386
+ }
387
+ async saveConversationSteps(steps) {
388
+ if (steps.length === 0) return;
389
+ await this.initialize();
390
+ const stepsTable = `${this.tablePrefix}_steps`;
391
+ await this.executeWithRetry(async () => {
392
+ await this.client.batch(
393
+ steps.map((step) => {
394
+ const createdAt = step.createdAt ?? (/* @__PURE__ */ new Date()).toISOString();
395
+ return {
396
+ sql: `INSERT INTO ${stepsTable} (
397
+ id,
398
+ conversation_id,
399
+ user_id,
400
+ agent_id,
401
+ agent_name,
402
+ operation_id,
403
+ step_index,
404
+ type,
405
+ role,
406
+ content,
407
+ arguments,
408
+ result,
409
+ usage,
410
+ sub_agent_id,
411
+ sub_agent_name,
412
+ created_at
413
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
414
+ ON CONFLICT(id) DO UPDATE SET
415
+ conversation_id = excluded.conversation_id,
416
+ user_id = excluded.user_id,
417
+ agent_id = excluded.agent_id,
418
+ agent_name = excluded.agent_name,
419
+ operation_id = excluded.operation_id,
420
+ step_index = excluded.step_index,
421
+ type = excluded.type,
422
+ role = excluded.role,
423
+ content = excluded.content,
424
+ arguments = excluded.arguments,
425
+ result = excluded.result,
426
+ usage = excluded.usage,
427
+ sub_agent_id = excluded.sub_agent_id,
428
+ sub_agent_name = excluded.sub_agent_name,
429
+ created_at = excluded.created_at`,
430
+ args: [
431
+ step.id,
432
+ step.conversationId,
433
+ step.userId,
434
+ step.agentId,
435
+ step.agentName ?? null,
436
+ step.operationId ?? null,
437
+ step.stepIndex,
438
+ step.type,
439
+ step.role,
440
+ step.content ?? null,
441
+ step.arguments ? safeStringify(step.arguments) : null,
442
+ step.result ? safeStringify(step.result) : null,
443
+ step.usage ? safeStringify(step.usage) : null,
444
+ step.subAgentId ?? null,
445
+ step.subAgentName ?? null,
446
+ createdAt
447
+ ]
448
+ };
449
+ })
450
+ );
451
+ }, "save conversation steps");
452
+ }
453
+ async getMessages(userId, conversationId, options) {
454
+ await this.initialize();
455
+ const messagesTable = `${this.tablePrefix}_messages`;
456
+ const { limit, before, after, roles } = options || {};
457
+ let sql = `SELECT * FROM ${messagesTable}
458
+ WHERE conversation_id = ? AND user_id = ?`;
459
+ const args = [conversationId, userId];
460
+ if (roles && roles.length > 0) {
461
+ const placeholders = roles.map(() => "?").join(",");
462
+ sql += ` AND role IN (${placeholders})`;
463
+ args.push(...roles);
464
+ }
465
+ if (before) {
466
+ sql += " AND created_at < ?";
467
+ args.push(before.toISOString());
468
+ }
469
+ if (after) {
470
+ sql += " AND created_at > ?";
471
+ args.push(after.toISOString());
472
+ }
473
+ sql += " ORDER BY created_at ASC";
474
+ if (limit && limit > 0) {
475
+ sql += " LIMIT ?";
476
+ args.push(limit);
477
+ }
478
+ const result = await this.client.execute({ sql, args });
479
+ return result.rows.map((row) => {
480
+ let parts;
481
+ if (row.parts !== void 0 && row.parts !== null) {
482
+ try {
483
+ parts = JSON.parse(row.parts);
484
+ } catch {
485
+ parts = [];
486
+ }
487
+ } else if (row.content !== void 0 && row.content !== null) {
488
+ try {
489
+ const content = JSON.parse(row.content);
490
+ if (typeof content === "string") {
491
+ parts = [{ type: "text", text: content }];
492
+ } else if (Array.isArray(content)) {
493
+ parts = content;
494
+ } else {
495
+ parts = [];
496
+ }
497
+ } catch {
498
+ parts = [{ type: "text", text: row.content }];
499
+ }
500
+ } else {
501
+ parts = [];
502
+ }
503
+ const metadata = row.metadata ? JSON.parse(row.metadata) : {};
504
+ return {
505
+ id: row.message_id,
506
+ role: row.role,
507
+ parts,
508
+ metadata: {
509
+ ...metadata,
510
+ createdAt: row.created_at ? new Date(row.created_at) : void 0
511
+ }
512
+ };
513
+ });
514
+ }
515
+ async getConversationSteps(userId, conversationId, options) {
516
+ await this.initialize();
517
+ const stepsTable = `${this.tablePrefix}_steps`;
518
+ const limit = options?.limit && options.limit > 0 ? options.limit : void 0;
519
+ let sql = `SELECT * FROM ${stepsTable} WHERE conversation_id = ? AND user_id = ?`;
520
+ const args = [conversationId, userId];
521
+ if (options?.operationId) {
522
+ sql += " AND operation_id = ?";
523
+ args.push(options.operationId);
524
+ }
525
+ sql += " ORDER BY step_index ASC";
526
+ if (limit !== void 0) {
527
+ sql += " LIMIT ?";
528
+ args.push(limit);
529
+ }
530
+ const result = await this.client.execute({ sql, args });
531
+ const parseJsonField = /* @__PURE__ */ __name((value) => {
532
+ if (typeof value !== "string" || value.length === 0) {
533
+ return void 0;
534
+ }
535
+ try {
536
+ return JSON.parse(value);
537
+ } catch {
538
+ return void 0;
539
+ }
540
+ }, "parseJsonField");
541
+ return result.rows.map((row) => ({
542
+ id: row.id,
543
+ conversationId: row.conversation_id,
544
+ userId: row.user_id,
545
+ agentId: row.agent_id,
546
+ agentName: row.agent_name ?? void 0,
547
+ operationId: row.operation_id ?? void 0,
548
+ stepIndex: typeof row.step_index === "number" ? row.step_index : Number(row.step_index ?? 0),
549
+ type: row.type,
550
+ role: row.role,
551
+ content: row.content ?? void 0,
552
+ arguments: parseJsonField(row.arguments),
553
+ result: parseJsonField(row.result),
554
+ usage: parseJsonField(row.usage),
555
+ subAgentId: row.sub_agent_id ?? void 0,
556
+ subAgentName: row.sub_agent_name ?? void 0,
557
+ createdAt: row.created_at ?? (/* @__PURE__ */ new Date()).toISOString()
558
+ }));
559
+ }
560
+ async clearMessages(userId, conversationId) {
561
+ await this.initialize();
562
+ const messagesTable = `${this.tablePrefix}_messages`;
563
+ const conversationsTable = `${this.tablePrefix}_conversations`;
564
+ const stepsTable = `${this.tablePrefix}_steps`;
565
+ if (conversationId) {
566
+ await this.client.execute({
567
+ sql: `DELETE FROM ${messagesTable} WHERE conversation_id = ? AND user_id = ?`,
568
+ args: [conversationId, userId]
569
+ });
570
+ await this.client.execute({
571
+ sql: `DELETE FROM ${stepsTable} WHERE conversation_id = ? AND user_id = ?`,
572
+ args: [conversationId, userId]
573
+ });
574
+ } else {
575
+ await this.client.execute({
576
+ sql: `DELETE FROM ${messagesTable}
577
+ WHERE conversation_id IN (
578
+ SELECT id FROM ${conversationsTable} WHERE user_id = ?
579
+ )`,
580
+ args: [userId]
581
+ });
582
+ await this.client.execute({
583
+ sql: `DELETE FROM ${stepsTable}
584
+ WHERE conversation_id IN (
585
+ SELECT id FROM ${conversationsTable} WHERE user_id = ?
586
+ )`,
587
+ args: [userId]
588
+ });
589
+ }
590
+ }
591
+ // ============================================================================
592
+ // Conversation Operations
593
+ // ============================================================================
594
+ async createConversation(input) {
595
+ await this.initialize();
596
+ const conversationsTable = `${this.tablePrefix}_conversations`;
597
+ const existing = await this.getConversation(input.id);
598
+ if (existing) {
599
+ throw new ConversationAlreadyExistsError(input.id);
600
+ }
601
+ const now = (/* @__PURE__ */ new Date()).toISOString();
602
+ await this.executeWithRetry(async () => {
603
+ await this.client.execute({
604
+ sql: `INSERT INTO ${conversationsTable} (id, resource_id, user_id, title, metadata, created_at, updated_at)
605
+ VALUES (?, ?, ?, ?, ?, ?, ?)`,
606
+ args: [
607
+ input.id,
608
+ input.resourceId,
609
+ input.userId,
610
+ input.title,
611
+ safeStringify(input.metadata || {}),
612
+ now,
613
+ now
614
+ ]
615
+ });
616
+ }, "create conversation");
617
+ return {
618
+ id: input.id,
619
+ userId: input.userId,
620
+ resourceId: input.resourceId,
621
+ title: input.title,
622
+ metadata: input.metadata || {},
623
+ createdAt: now,
624
+ updatedAt: now
625
+ };
626
+ }
627
+ async getConversation(id) {
628
+ await this.initialize();
629
+ const conversationsTable = `${this.tablePrefix}_conversations`;
630
+ const result = await this.client.execute({
631
+ sql: `SELECT * FROM ${conversationsTable} WHERE id = ?`,
632
+ args: [id]
633
+ });
634
+ if (result.rows.length === 0) {
635
+ return null;
636
+ }
637
+ const row = result.rows[0];
638
+ return {
639
+ id: row.id,
640
+ userId: row.user_id,
641
+ resourceId: row.resource_id,
642
+ title: row.title,
643
+ metadata: row.metadata ? JSON.parse(row.metadata) : {},
644
+ createdAt: row.created_at,
645
+ updatedAt: row.updated_at
646
+ };
647
+ }
648
+ async getConversations(resourceId) {
649
+ await this.initialize();
650
+ const conversationsTable = `${this.tablePrefix}_conversations`;
651
+ const result = await this.client.execute({
652
+ sql: `SELECT * FROM ${conversationsTable} WHERE resource_id = ? ORDER BY updated_at DESC`,
653
+ args: [resourceId]
654
+ });
655
+ return result.rows.map((row) => ({
656
+ id: row.id,
657
+ userId: row.user_id,
658
+ resourceId: row.resource_id,
659
+ title: row.title,
660
+ metadata: row.metadata ? JSON.parse(row.metadata) : {},
661
+ createdAt: row.created_at,
662
+ updatedAt: row.updated_at
663
+ }));
664
+ }
665
+ async getConversationsByUserId(userId, options) {
666
+ return this.queryConversations({ ...options, userId });
667
+ }
668
+ async queryConversations(options) {
669
+ await this.initialize();
670
+ const conversationsTable = `${this.tablePrefix}_conversations`;
671
+ let sql = `SELECT * FROM ${conversationsTable} WHERE 1=1`;
672
+ const args = [];
673
+ if (options.userId) {
674
+ sql += " AND user_id = ?";
675
+ args.push(options.userId);
676
+ }
677
+ if (options.resourceId) {
678
+ sql += " AND resource_id = ?";
679
+ args.push(options.resourceId);
680
+ }
681
+ const orderBy = options.orderBy || "updated_at";
682
+ const orderDirection = options.orderDirection || "DESC";
683
+ sql += ` ORDER BY ${orderBy} ${orderDirection}`;
684
+ if (options.limit) {
685
+ sql += " LIMIT ?";
686
+ args.push(options.limit);
687
+ }
688
+ if (options.offset) {
689
+ sql += " OFFSET ?";
690
+ args.push(options.offset);
691
+ }
692
+ const result = await this.client.execute({ sql, args });
693
+ return result.rows.map((row) => ({
694
+ id: row.id,
695
+ userId: row.user_id,
696
+ resourceId: row.resource_id,
697
+ title: row.title,
698
+ metadata: row.metadata ? JSON.parse(row.metadata) : {},
699
+ createdAt: row.created_at,
700
+ updatedAt: row.updated_at
701
+ }));
702
+ }
703
+ async updateConversation(id, updates) {
704
+ await this.initialize();
705
+ const conversationsTable = `${this.tablePrefix}_conversations`;
706
+ const conversation = await this.getConversation(id);
707
+ if (!conversation) {
708
+ throw new ConversationNotFoundError(id);
709
+ }
710
+ const now = (/* @__PURE__ */ new Date()).toISOString();
711
+ const fieldsToUpdate = ["updated_at = ?"];
712
+ const args = [now];
713
+ if (updates.title !== void 0) {
714
+ fieldsToUpdate.push("title = ?");
715
+ args.push(updates.title);
716
+ }
717
+ if (updates.resourceId !== void 0) {
718
+ fieldsToUpdate.push("resource_id = ?");
719
+ args.push(updates.resourceId);
720
+ }
721
+ if (updates.metadata !== void 0) {
722
+ fieldsToUpdate.push("metadata = ?");
723
+ args.push(safeStringify(updates.metadata));
724
+ }
725
+ args.push(id);
726
+ await this.client.execute({
727
+ sql: `UPDATE ${conversationsTable} SET ${fieldsToUpdate.join(", ")} WHERE id = ?`,
728
+ args
729
+ });
730
+ const updated = await this.getConversation(id);
731
+ if (!updated) {
732
+ throw new Error(`Conversation not found after update: ${id}`);
733
+ }
734
+ return updated;
735
+ }
736
+ async deleteConversation(id) {
737
+ await this.initialize();
738
+ const conversationsTable = `${this.tablePrefix}_conversations`;
739
+ await this.client.execute({
740
+ sql: `DELETE FROM ${conversationsTable} WHERE id = ?`,
741
+ args: [id]
742
+ });
743
+ }
744
+ // ============================================================================
745
+ // Working Memory Operations
746
+ // ============================================================================
747
+ async getWorkingMemory(params) {
748
+ await this.initialize();
749
+ if (params.scope === "conversation" && params.conversationId) {
750
+ const conversation = await this.getConversation(params.conversationId);
751
+ return conversation?.metadata?.workingMemory || null;
752
+ }
753
+ if (params.scope === "user" && params.userId) {
754
+ const usersTable = `${this.tablePrefix}_users`;
755
+ const result = await this.client.execute({
756
+ sql: `SELECT metadata FROM ${usersTable} WHERE id = ?`,
757
+ args: [params.userId]
758
+ });
759
+ if (result.rows.length > 0) {
760
+ const metadata = result.rows[0].metadata ? JSON.parse(result.rows[0].metadata) : {};
761
+ return metadata.workingMemory || null;
762
+ }
763
+ }
764
+ return null;
765
+ }
766
+ async setWorkingMemory(params) {
767
+ await this.initialize();
768
+ if (params.scope === "conversation" && params.conversationId) {
769
+ const conversation = await this.getConversation(params.conversationId);
770
+ if (!conversation) {
771
+ throw new ConversationNotFoundError(params.conversationId);
772
+ }
773
+ const metadata = conversation.metadata || {};
774
+ metadata.workingMemory = params.content;
775
+ await this.updateConversation(params.conversationId, { metadata });
776
+ }
777
+ if (params.scope === "user" && params.userId) {
778
+ const usersTable = `${this.tablePrefix}_users`;
779
+ const now = (/* @__PURE__ */ new Date()).toISOString();
780
+ const result = await this.client.execute({
781
+ sql: `SELECT metadata FROM ${usersTable} WHERE id = ?`,
782
+ args: [params.userId]
783
+ });
784
+ if (result.rows.length > 0) {
785
+ const metadata = result.rows[0].metadata ? JSON.parse(result.rows[0].metadata) : {};
786
+ metadata.workingMemory = params.content;
787
+ await this.client.execute({
788
+ sql: `UPDATE ${usersTable} SET metadata = ?, updated_at = ? WHERE id = ?`,
789
+ args: [safeStringify(metadata), now, params.userId]
790
+ });
791
+ } else {
792
+ await this.client.execute({
793
+ sql: `INSERT INTO ${usersTable} (id, metadata, created_at, updated_at) VALUES (?, ?, ?, ?)`,
794
+ args: [params.userId, safeStringify({ workingMemory: params.content }), now, now]
795
+ });
796
+ }
797
+ }
798
+ }
799
+ async deleteWorkingMemory(params) {
800
+ await this.initialize();
801
+ if (params.scope === "conversation" && params.conversationId) {
802
+ const conversation = await this.getConversation(params.conversationId);
803
+ if (conversation?.metadata?.workingMemory) {
804
+ const metadata = { ...conversation.metadata };
805
+ delete metadata.workingMemory;
806
+ await this.updateConversation(params.conversationId, { metadata });
807
+ }
808
+ }
809
+ if (params.scope === "user" && params.userId) {
810
+ const usersTable = `${this.tablePrefix}_users`;
811
+ const result = await this.client.execute({
812
+ sql: `SELECT metadata FROM ${usersTable} WHERE id = ?`,
813
+ args: [params.userId]
814
+ });
815
+ if (result.rows.length > 0 && result.rows[0].metadata) {
816
+ const metadata = JSON.parse(result.rows[0].metadata);
817
+ if (metadata.workingMemory) {
818
+ delete metadata.workingMemory;
819
+ await this.client.execute({
820
+ sql: `UPDATE ${usersTable} SET metadata = ?, updated_at = ? WHERE id = ?`,
821
+ args: [safeStringify(metadata), (/* @__PURE__ */ new Date()).toISOString(), params.userId]
822
+ });
823
+ }
824
+ }
825
+ }
826
+ }
827
+ // ============================================================================
828
+ // Workflow State Operations
829
+ // ============================================================================
830
+ async getWorkflowState(executionId) {
831
+ await this.initialize();
832
+ const workflowStatesTable = `${this.tablePrefix}_workflow_states`;
833
+ const result = await this.client.execute({
834
+ sql: `SELECT * FROM ${workflowStatesTable} WHERE id = ?`,
835
+ args: [executionId]
836
+ });
837
+ if (result.rows.length === 0) {
838
+ return null;
839
+ }
840
+ const row = result.rows[0];
841
+ return {
842
+ id: row.id,
843
+ workflowId: row.workflow_id,
844
+ workflowName: row.workflow_name,
845
+ status: row.status,
846
+ suspension: row.suspension ? JSON.parse(row.suspension) : void 0,
847
+ events: row.events ? JSON.parse(row.events) : void 0,
848
+ output: row.output ? JSON.parse(row.output) : void 0,
849
+ cancellation: row.cancellation ? JSON.parse(row.cancellation) : void 0,
850
+ userId: row.user_id,
851
+ conversationId: row.conversation_id,
852
+ metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
853
+ createdAt: new Date(row.created_at),
854
+ updatedAt: new Date(row.updated_at)
855
+ };
856
+ }
857
+ async queryWorkflowRuns(query) {
858
+ await this.initialize();
859
+ const workflowStatesTable = `${this.tablePrefix}_workflow_states`;
860
+ const conditions = [];
861
+ const args = [];
862
+ if (query.workflowId) {
863
+ conditions.push("workflow_id = ?");
864
+ args.push(query.workflowId);
865
+ }
866
+ if (query.status) {
867
+ conditions.push("status = ?");
868
+ args.push(query.status);
869
+ }
870
+ if (query.from) {
871
+ conditions.push("created_at >= ?");
872
+ args.push(query.from.toISOString());
873
+ }
874
+ if (query.to) {
875
+ conditions.push("created_at <= ?");
876
+ args.push(query.to.toISOString());
877
+ }
878
+ let sql = `SELECT * FROM ${workflowStatesTable}`;
879
+ if (conditions.length > 0) {
880
+ sql += ` WHERE ${conditions.join(" AND ")}`;
881
+ }
882
+ sql += " ORDER BY created_at DESC";
883
+ if (query.limit !== void 0) {
884
+ sql += " LIMIT ?";
885
+ args.push(query.limit);
886
+ }
887
+ if (query.offset !== void 0) {
888
+ sql += " OFFSET ?";
889
+ args.push(query.offset);
890
+ }
891
+ const result = await this.client.execute({
892
+ sql,
893
+ args
894
+ });
895
+ return result.rows.map((row) => ({
896
+ id: row.id,
897
+ workflowId: row.workflow_id,
898
+ workflowName: row.workflow_name,
899
+ status: row.status,
900
+ suspension: row.suspension ? JSON.parse(row.suspension) : void 0,
901
+ events: row.events ? JSON.parse(row.events) : void 0,
902
+ output: row.output ? JSON.parse(row.output) : void 0,
903
+ cancellation: row.cancellation ? JSON.parse(row.cancellation) : void 0,
904
+ userId: row.user_id,
905
+ conversationId: row.conversation_id,
906
+ metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
907
+ createdAt: new Date(row.created_at),
908
+ updatedAt: new Date(row.updated_at)
909
+ }));
910
+ }
911
+ async setWorkflowState(executionId, state) {
912
+ await this.initialize();
913
+ const workflowStatesTable = `${this.tablePrefix}_workflow_states`;
914
+ await this.client.execute({
915
+ sql: `INSERT OR REPLACE INTO ${workflowStatesTable}
916
+ (id, workflow_id, workflow_name, status, suspension, events, output, cancellation, user_id, conversation_id, metadata, created_at, updated_at)
917
+ VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
918
+ args: [
919
+ executionId,
920
+ state.workflowId,
921
+ state.workflowName,
922
+ state.status,
923
+ state.suspension ? safeStringify(state.suspension) : null,
924
+ state.events ? safeStringify(state.events) : null,
925
+ state.output ? safeStringify(state.output) : null,
926
+ state.cancellation ? safeStringify(state.cancellation) : null,
927
+ state.userId || null,
928
+ state.conversationId || null,
929
+ state.metadata ? safeStringify(state.metadata) : null,
930
+ state.createdAt.toISOString(),
931
+ state.updatedAt.toISOString()
932
+ ]
933
+ });
934
+ }
935
+ async updateWorkflowState(executionId, updates) {
936
+ await this.initialize();
937
+ const existing = await this.getWorkflowState(executionId);
938
+ if (!existing) {
939
+ throw new Error(`Workflow state ${executionId} not found`);
940
+ }
941
+ const updated = {
942
+ ...existing,
943
+ ...updates,
944
+ updatedAt: /* @__PURE__ */ new Date()
945
+ };
946
+ await this.setWorkflowState(executionId, updated);
947
+ }
948
+ async getSuspendedWorkflowStates(workflowId) {
949
+ await this.initialize();
950
+ const workflowStatesTable = `${this.tablePrefix}_workflow_states`;
951
+ const result = await this.client.execute({
952
+ sql: `SELECT * FROM ${workflowStatesTable} WHERE workflow_id = ? AND status = 'suspended' ORDER BY created_at DESC`,
953
+ args: [workflowId]
954
+ });
955
+ return result.rows.map((row) => ({
956
+ id: row.id,
957
+ workflowId: row.workflow_id,
958
+ workflowName: row.workflow_name,
959
+ status: "suspended",
960
+ suspension: row.suspension ? JSON.parse(row.suspension) : void 0,
961
+ events: row.events ? JSON.parse(row.events) : void 0,
962
+ output: row.output ? JSON.parse(row.output) : void 0,
963
+ cancellation: row.cancellation ? JSON.parse(row.cancellation) : void 0,
964
+ userId: row.user_id,
965
+ conversationId: row.conversation_id,
966
+ metadata: row.metadata ? JSON.parse(row.metadata) : void 0,
967
+ createdAt: new Date(row.created_at),
968
+ updatedAt: new Date(row.updated_at)
969
+ }));
970
+ }
971
+ async close() {
972
+ this.logger.debug("Closing LibSQL Memory adapter");
973
+ }
974
+ };
975
+
976
+ // src/memory-v2-adapter-edge.ts
977
+ var LibSQLMemoryAdapterEdge = class extends LibSQLMemoryCore {
978
+ static {
979
+ __name(this, "LibSQLMemoryAdapterEdge");
980
+ }
981
+ constructor(options) {
982
+ if (!options.url) {
983
+ throw new Error("LibSQLMemoryAdapterEdge requires a url option");
984
+ }
985
+ if (options.url.startsWith("file:") || options.url === ":memory:" || !options.url.startsWith("libsql://")) {
986
+ throw new Error(
987
+ "LibSQLMemoryAdapterEdge only supports remote Turso URLs (libsql://). File-based databases are not supported in edge environments. Use LibSQLMemoryAdapter from '@voltagent/libsql' for Node.js environments."
988
+ );
989
+ }
990
+ if (!options.authToken) {
991
+ throw new Error("LibSQLMemoryAdapterEdge requires an authToken for remote connections");
992
+ }
993
+ const logger = options.logger || AgentRegistry.getInstance().getGlobalLogger() || createPinoLogger({ name: "libsql-memory-edge", level: options.debug ? "debug" : "info" });
994
+ const client = createClient({
995
+ url: options.url,
996
+ authToken: options.authToken
997
+ });
998
+ super(client, options.url, options, logger);
999
+ }
1000
+ };
1001
+
1002
+ // src/observability-adapter-edge.ts
1003
+ import { createClient as createClient2 } from "@libsql/client/web";
1004
+ import { createPinoLogger as createPinoLogger2 } from "@voltagent/logger";
1005
+
1006
+ // src/observability-core.ts
1007
+ import { safeStringify as safeStringify2 } from "@voltagent/internal/utils";
1008
+ var LibSQLObservabilityCore = class {
1009
+ static {
1010
+ __name(this, "LibSQLObservabilityCore");
1011
+ }
1012
+ client;
1013
+ tablePrefix;
1014
+ debug;
1015
+ logger;
1016
+ initialized;
1017
+ maxSpansPerQuery;
1018
+ constructor(client, options, logger) {
1019
+ this.client = client;
1020
+ this.logger = logger;
1021
+ this.tablePrefix = options.tablePrefix || "observability";
1022
+ this.debug = options.debug || false;
1023
+ this.maxSpansPerQuery = options.maxSpansPerQuery || 1e3;
1024
+ this.debugLog("LibSQL observability adapter core initialized", {
1025
+ tablePrefix: this.tablePrefix,
1026
+ debug: this.debug,
1027
+ maxSpansPerQuery: this.maxSpansPerQuery
1028
+ });
1029
+ this.initialized = this.initializeDatabase();
1030
+ }
1031
+ debugLog(message, data) {
1032
+ if (this.debug) {
1033
+ this.logger.debug(`${message}`, data || "");
1034
+ }
1035
+ }
1036
+ async initializeDatabase() {
1037
+ try {
1038
+ await this.client.execute(`
1039
+ CREATE TABLE IF NOT EXISTS ${this.tablePrefix}_spans (
1040
+ span_id TEXT PRIMARY KEY,
1041
+ trace_id TEXT NOT NULL,
1042
+ parent_span_id TEXT,
1043
+ entity_id TEXT,
1044
+ entity_type TEXT,
1045
+ name TEXT NOT NULL,
1046
+ kind INTEGER DEFAULT 0,
1047
+ start_time TEXT NOT NULL,
1048
+ end_time TEXT,
1049
+ duration REAL,
1050
+ status_code INTEGER DEFAULT 0,
1051
+ status_message TEXT,
1052
+ attributes TEXT,
1053
+ events TEXT,
1054
+ links TEXT,
1055
+ resource TEXT,
1056
+ instrumentation_scope TEXT,
1057
+ created_at TEXT DEFAULT CURRENT_TIMESTAMP,
1058
+ updated_at TEXT DEFAULT CURRENT_TIMESTAMP
1059
+ )
1060
+ `);
1061
+ await this.client.execute(`
1062
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_trace_id
1063
+ ON ${this.tablePrefix}_spans(trace_id)
1064
+ `);
1065
+ await this.client.execute(`
1066
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_parent_span_id
1067
+ ON ${this.tablePrefix}_spans(parent_span_id)
1068
+ `);
1069
+ await this.client.execute(`
1070
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_start_time
1071
+ ON ${this.tablePrefix}_spans(start_time)
1072
+ `);
1073
+ await this.client.execute(`
1074
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_name
1075
+ ON ${this.tablePrefix}_spans(name)
1076
+ `);
1077
+ await this.client.execute(`
1078
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_entity_id
1079
+ ON ${this.tablePrefix}_spans(entity_id)
1080
+ `);
1081
+ await this.client.execute(`
1082
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_spans_entity_type
1083
+ ON ${this.tablePrefix}_spans(entity_type)
1084
+ `);
1085
+ await this.client.execute(`
1086
+ CREATE TABLE IF NOT EXISTS ${this.tablePrefix}_traces (
1087
+ trace_id TEXT PRIMARY KEY,
1088
+ root_span_id TEXT,
1089
+ entity_id TEXT,
1090
+ entity_type TEXT,
1091
+ start_time TEXT NOT NULL,
1092
+ end_time TEXT,
1093
+ span_count INTEGER DEFAULT 1,
1094
+ created_at TEXT DEFAULT CURRENT_TIMESTAMP,
1095
+ updated_at TEXT DEFAULT CURRENT_TIMESTAMP
1096
+ )
1097
+ `);
1098
+ await this.client.execute(`
1099
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_traces_start_time
1100
+ ON ${this.tablePrefix}_traces(start_time DESC)
1101
+ `);
1102
+ await this.client.execute(`
1103
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_traces_entity_id
1104
+ ON ${this.tablePrefix}_traces(entity_id)
1105
+ `);
1106
+ await this.client.execute(`
1107
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_traces_entity_type
1108
+ ON ${this.tablePrefix}_traces(entity_type)
1109
+ `);
1110
+ await this.client.execute(`
1111
+ CREATE TABLE IF NOT EXISTS ${this.tablePrefix}_logs (
1112
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
1113
+ timestamp TEXT NOT NULL,
1114
+ trace_id TEXT,
1115
+ span_id TEXT,
1116
+ trace_flags INTEGER,
1117
+ severity_number INTEGER,
1118
+ severity_text TEXT,
1119
+ body TEXT NOT NULL,
1120
+ attributes TEXT,
1121
+ resource TEXT,
1122
+ instrumentation_scope TEXT,
1123
+ created_at TEXT DEFAULT CURRENT_TIMESTAMP
1124
+ )
1125
+ `);
1126
+ await this.client.execute(`
1127
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_trace_id
1128
+ ON ${this.tablePrefix}_logs(trace_id)
1129
+ `);
1130
+ await this.client.execute(`
1131
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_span_id
1132
+ ON ${this.tablePrefix}_logs(span_id)
1133
+ `);
1134
+ await this.client.execute(`
1135
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_timestamp
1136
+ ON ${this.tablePrefix}_logs(timestamp DESC)
1137
+ `);
1138
+ await this.client.execute(`
1139
+ CREATE INDEX IF NOT EXISTS idx_${this.tablePrefix}_logs_severity
1140
+ ON ${this.tablePrefix}_logs(severity_number)
1141
+ `);
1142
+ this.debugLog("Database tables initialized successfully");
1143
+ } catch (error) {
1144
+ this.logger.error("Failed to initialize database tables", { error });
1145
+ throw error;
1146
+ }
1147
+ }
1148
+ async ensureInitialized() {
1149
+ await this.initialized;
1150
+ }
1151
+ async addSpan(span) {
1152
+ await this.ensureInitialized();
1153
+ try {
1154
+ const entityId = span.attributes?.["entity.id"] || null;
1155
+ const entityType = span.attributes?.["entity.type"] || null;
1156
+ await this.client.batch([
1157
+ {
1158
+ sql: `
1159
+ INSERT INTO ${this.tablePrefix}_spans (
1160
+ span_id, trace_id, parent_span_id, entity_id, entity_type, name, kind,
1161
+ start_time, end_time, duration,
1162
+ status_code, status_message,
1163
+ attributes, events, links,
1164
+ resource, instrumentation_scope
1165
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
1166
+ `,
1167
+ args: [
1168
+ span.spanId,
1169
+ span.traceId,
1170
+ span.parentSpanId || null,
1171
+ entityId,
1172
+ entityType,
1173
+ span.name,
1174
+ span.kind,
1175
+ span.startTime,
1176
+ span.endTime || null,
1177
+ span.duration || null,
1178
+ span.status.code,
1179
+ span.status.message || null,
1180
+ safeStringify2(span.attributes),
1181
+ safeStringify2(span.events),
1182
+ span.links ? safeStringify2(span.links) : null,
1183
+ span.resource ? safeStringify2(span.resource) : null,
1184
+ span.instrumentationScope ? safeStringify2(span.instrumentationScope) : null
1185
+ ]
1186
+ },
1187
+ {
1188
+ sql: `
1189
+ INSERT INTO ${this.tablePrefix}_traces (
1190
+ trace_id, root_span_id, entity_id, entity_type, start_time, end_time, span_count
1191
+ ) VALUES (?, ?, ?, ?, ?, ?, 1)
1192
+ ON CONFLICT(trace_id) DO UPDATE SET
1193
+ span_count = span_count + 1,
1194
+ entity_id = COALESCE(excluded.entity_id, entity_id),
1195
+ entity_type = COALESCE(excluded.entity_type, entity_type),
1196
+ start_time = MIN(start_time, excluded.start_time),
1197
+ end_time = MAX(COALESCE(end_time, excluded.end_time), excluded.end_time),
1198
+ updated_at = CURRENT_TIMESTAMP
1199
+ `,
1200
+ args: [
1201
+ span.traceId,
1202
+ span.parentSpanId ? null : span.spanId,
1203
+ entityId,
1204
+ entityType,
1205
+ span.startTime,
1206
+ span.endTime || null
1207
+ ]
1208
+ }
1209
+ ]);
1210
+ this.debugLog("Span added successfully", {
1211
+ spanId: span.spanId,
1212
+ traceId: span.traceId
1213
+ });
1214
+ } catch (error) {
1215
+ this.logger.error("Failed to add span", { error, span });
1216
+ throw error;
1217
+ }
1218
+ }
1219
+ async updateSpan(spanId, updates) {
1220
+ await this.ensureInitialized();
1221
+ try {
1222
+ const setClauses = [];
1223
+ const args = [];
1224
+ if (updates.endTime !== void 0) {
1225
+ setClauses.push("end_time = ?");
1226
+ args.push(updates.endTime);
1227
+ }
1228
+ if (updates.duration !== void 0) {
1229
+ setClauses.push("duration = ?");
1230
+ args.push(updates.duration);
1231
+ }
1232
+ if (updates.status !== void 0) {
1233
+ setClauses.push("status_code = ?, status_message = ?");
1234
+ args.push(updates.status.code, updates.status.message || null);
1235
+ }
1236
+ if (updates.attributes !== void 0) {
1237
+ setClauses.push("attributes = ?");
1238
+ args.push(safeStringify2(updates.attributes));
1239
+ }
1240
+ if (updates.events !== void 0) {
1241
+ setClauses.push("events = ?");
1242
+ args.push(safeStringify2(updates.events));
1243
+ }
1244
+ if (updates.links !== void 0) {
1245
+ setClauses.push("links = ?");
1246
+ args.push(safeStringify2(updates.links));
1247
+ }
1248
+ if (setClauses.length === 0) {
1249
+ return;
1250
+ }
1251
+ setClauses.push("updated_at = CURRENT_TIMESTAMP");
1252
+ args.push(spanId);
1253
+ await this.client.execute({
1254
+ sql: `
1255
+ UPDATE ${this.tablePrefix}_spans
1256
+ SET ${setClauses.join(", ")}
1257
+ WHERE span_id = ?
1258
+ `,
1259
+ args
1260
+ });
1261
+ if (updates.endTime) {
1262
+ const span = await this.getSpan(spanId);
1263
+ if (span) {
1264
+ await this.client.execute({
1265
+ sql: `
1266
+ UPDATE ${this.tablePrefix}_traces
1267
+ SET end_time = MAX(COALESCE(end_time, ?), ?),
1268
+ updated_at = CURRENT_TIMESTAMP
1269
+ WHERE trace_id = ?
1270
+ `,
1271
+ args: [updates.endTime, updates.endTime, span.traceId]
1272
+ });
1273
+ }
1274
+ }
1275
+ this.debugLog("Span updated successfully", { spanId, updates });
1276
+ } catch (error) {
1277
+ this.logger.error("Failed to update span", { error, spanId, updates });
1278
+ throw error;
1279
+ }
1280
+ }
1281
+ async getSpan(spanId) {
1282
+ await this.ensureInitialized();
1283
+ try {
1284
+ const result = await this.client.execute({
1285
+ sql: `SELECT * FROM ${this.tablePrefix}_spans WHERE span_id = ?`,
1286
+ args: [spanId]
1287
+ });
1288
+ if (result.rows.length === 0) {
1289
+ return null;
1290
+ }
1291
+ return this.rowToSpan(result.rows[0]);
1292
+ } catch (error) {
1293
+ this.logger.error("Failed to get span", { error, spanId });
1294
+ throw error;
1295
+ }
1296
+ }
1297
+ async getTrace(traceId) {
1298
+ await this.ensureInitialized();
1299
+ try {
1300
+ const result = await this.client.execute({
1301
+ sql: `
1302
+ SELECT * FROM ${this.tablePrefix}_spans
1303
+ WHERE trace_id = ?
1304
+ ORDER BY start_time ASC
1305
+ LIMIT ?
1306
+ `,
1307
+ args: [traceId, this.maxSpansPerQuery]
1308
+ });
1309
+ return result.rows.map((row) => this.rowToSpan(row));
1310
+ } catch (error) {
1311
+ this.logger.error("Failed to get trace", { error, traceId });
1312
+ throw error;
1313
+ }
1314
+ }
1315
+ async listTraces(limit = 100, offset = 0, filter) {
1316
+ await this.ensureInitialized();
1317
+ try {
1318
+ let sql;
1319
+ let args = [];
1320
+ const conditions = [];
1321
+ if (filter?.entityId) {
1322
+ conditions.push("entity_id = ?");
1323
+ args.push(filter.entityId);
1324
+ }
1325
+ if (filter?.entityType) {
1326
+ conditions.push("entity_type = ?");
1327
+ args.push(filter.entityType);
1328
+ }
1329
+ if (conditions.length > 0) {
1330
+ sql = `
1331
+ SELECT trace_id FROM ${this.tablePrefix}_traces
1332
+ WHERE ${conditions.join(" AND ")}
1333
+ ORDER BY start_time DESC
1334
+ LIMIT ? OFFSET ?
1335
+ `;
1336
+ args.push(limit, offset);
1337
+ } else {
1338
+ sql = `
1339
+ SELECT trace_id FROM ${this.tablePrefix}_traces
1340
+ ORDER BY start_time DESC
1341
+ LIMIT ? OFFSET ?
1342
+ `;
1343
+ args = [limit, offset];
1344
+ }
1345
+ const result = await this.client.execute({ sql, args });
1346
+ return result.rows.map((row) => row.trace_id);
1347
+ } catch (error) {
1348
+ this.logger.error("Failed to list traces", { error, limit, offset, filter });
1349
+ throw error;
1350
+ }
1351
+ }
1352
+ async deleteOldSpans(beforeTimestamp) {
1353
+ await this.ensureInitialized();
1354
+ try {
1355
+ const beforeDate = new Date(beforeTimestamp).toISOString();
1356
+ const tracesResult = await this.client.execute({
1357
+ sql: `SELECT DISTINCT trace_id FROM ${this.tablePrefix}_spans WHERE start_time < ?`,
1358
+ args: [beforeDate]
1359
+ });
1360
+ const affectedTraceIds = tracesResult.rows.map((row) => row.trace_id);
1361
+ const deleteResult = await this.client.execute({
1362
+ sql: `DELETE FROM ${this.tablePrefix}_spans WHERE start_time < ?`,
1363
+ args: [beforeDate]
1364
+ });
1365
+ if (affectedTraceIds.length > 0) {
1366
+ for (const traceId of affectedTraceIds) {
1367
+ const countResult = await this.client.execute({
1368
+ sql: `SELECT COUNT(*) as count FROM ${this.tablePrefix}_spans WHERE trace_id = ?`,
1369
+ args: [traceId]
1370
+ });
1371
+ const count = countResult.rows[0].count;
1372
+ if (count === 0) {
1373
+ await this.client.execute({
1374
+ sql: `DELETE FROM ${this.tablePrefix}_traces WHERE trace_id = ?`,
1375
+ args: [traceId]
1376
+ });
1377
+ } else {
1378
+ await this.client.execute({
1379
+ sql: `
1380
+ UPDATE ${this.tablePrefix}_traces
1381
+ SET span_count = ?, updated_at = CURRENT_TIMESTAMP
1382
+ WHERE trace_id = ?
1383
+ `,
1384
+ args: [count, traceId]
1385
+ });
1386
+ }
1387
+ }
1388
+ }
1389
+ const deletedCount = deleteResult.rowsAffected || 0;
1390
+ this.debugLog("Old spans deleted", { deletedCount, beforeDate });
1391
+ return deletedCount;
1392
+ } catch (error) {
1393
+ this.logger.error("Failed to delete old spans", { error, beforeTimestamp });
1394
+ throw error;
1395
+ }
1396
+ }
1397
+ async clear() {
1398
+ await this.ensureInitialized();
1399
+ try {
1400
+ await this.client.batch([
1401
+ { sql: `DELETE FROM ${this.tablePrefix}_spans`, args: [] },
1402
+ { sql: `DELETE FROM ${this.tablePrefix}_traces`, args: [] },
1403
+ { sql: `DELETE FROM ${this.tablePrefix}_logs`, args: [] }
1404
+ ]);
1405
+ this.debugLog("All spans, traces, and logs cleared");
1406
+ } catch (error) {
1407
+ this.logger.error("Failed to clear data", { error });
1408
+ throw error;
1409
+ }
1410
+ }
1411
+ rowToSpan(row) {
1412
+ const span = {
1413
+ traceId: row.trace_id,
1414
+ spanId: row.span_id,
1415
+ name: row.name,
1416
+ kind: row.kind,
1417
+ startTime: row.start_time,
1418
+ status: {
1419
+ code: row.status_code
1420
+ },
1421
+ attributes: row.attributes ? JSON.parse(row.attributes) : {},
1422
+ events: row.events ? JSON.parse(row.events) : []
1423
+ };
1424
+ if (row.parent_span_id !== null) {
1425
+ span.parentSpanId = row.parent_span_id;
1426
+ }
1427
+ if (row.end_time !== null) {
1428
+ span.endTime = row.end_time;
1429
+ }
1430
+ if (row.duration !== null) {
1431
+ span.duration = row.duration;
1432
+ }
1433
+ if (row.status_message !== null) {
1434
+ span.status.message = row.status_message;
1435
+ }
1436
+ if (row.links && row.links !== "null") {
1437
+ const links = JSON.parse(row.links);
1438
+ if (links && links.length > 0) {
1439
+ span.links = links;
1440
+ }
1441
+ }
1442
+ if (row.resource && row.resource !== "null") {
1443
+ const resource = JSON.parse(row.resource);
1444
+ if (resource && Object.keys(resource).length > 0) {
1445
+ span.resource = resource;
1446
+ }
1447
+ }
1448
+ if (row.instrumentation_scope && row.instrumentation_scope !== "null") {
1449
+ const scope = JSON.parse(row.instrumentation_scope);
1450
+ if (scope) {
1451
+ span.instrumentationScope = scope;
1452
+ }
1453
+ }
1454
+ return span;
1455
+ }
1456
+ async getStats() {
1457
+ await this.ensureInitialized();
1458
+ try {
1459
+ const [spanCountResult, traceCountResult, timeRangeResult] = await Promise.all([
1460
+ this.client.execute(`SELECT COUNT(*) as count FROM ${this.tablePrefix}_spans`),
1461
+ this.client.execute(`SELECT COUNT(*) as count FROM ${this.tablePrefix}_traces`),
1462
+ this.client.execute(`
1463
+ SELECT MIN(start_time) as oldest, MAX(start_time) as newest
1464
+ FROM ${this.tablePrefix}_spans
1465
+ `)
1466
+ ]);
1467
+ const stats = {
1468
+ spanCount: spanCountResult.rows[0].count,
1469
+ traceCount: traceCountResult.rows[0].count
1470
+ };
1471
+ if (timeRangeResult.rows[0].oldest) {
1472
+ stats.oldestSpan = new Date(timeRangeResult.rows[0].oldest);
1473
+ }
1474
+ if (timeRangeResult.rows[0].newest) {
1475
+ stats.newestSpan = new Date(timeRangeResult.rows[0].newest);
1476
+ }
1477
+ return stats;
1478
+ } catch (error) {
1479
+ this.logger.error("Failed to get stats", { error });
1480
+ throw error;
1481
+ }
1482
+ }
1483
+ async saveLogRecord(logRecord) {
1484
+ await this.ensureInitialized();
1485
+ try {
1486
+ let timestamp;
1487
+ if (Array.isArray(logRecord.hrTime)) {
1488
+ const timeMs = logRecord.hrTime[0] * 1e3 + logRecord.hrTime[1] / 1e6;
1489
+ timestamp = new Date(timeMs).toISOString();
1490
+ } else if (logRecord.timestamp) {
1491
+ timestamp = typeof logRecord.timestamp === "string" ? logRecord.timestamp : new Date(logRecord.timestamp).toISOString();
1492
+ } else {
1493
+ timestamp = (/* @__PURE__ */ new Date()).toISOString();
1494
+ }
1495
+ const spanContext = logRecord.spanContext || {};
1496
+ const traceId = spanContext.traceId || null;
1497
+ const spanId = spanContext.spanId || null;
1498
+ const traceFlags = spanContext.traceFlags ?? null;
1499
+ const severityNumber = logRecord.severityNumber ?? null;
1500
+ const severityText = logRecord.severityText || null;
1501
+ const body = typeof logRecord.body === "string" ? logRecord.body : safeStringify2(logRecord.body);
1502
+ const attributes = logRecord.attributes ? safeStringify2(logRecord.attributes) : null;
1503
+ const resource = logRecord.resource?.attributes ? safeStringify2(logRecord.resource.attributes) : null;
1504
+ const instrumentationScope = logRecord.instrumentationLibrary || logRecord.instrumentationScope ? safeStringify2(logRecord.instrumentationLibrary || logRecord.instrumentationScope) : null;
1505
+ await this.client.execute({
1506
+ sql: `
1507
+ INSERT INTO ${this.tablePrefix}_logs (
1508
+ timestamp, trace_id, span_id, trace_flags,
1509
+ severity_number, severity_text, body,
1510
+ attributes, resource, instrumentation_scope
1511
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
1512
+ `,
1513
+ args: [
1514
+ timestamp,
1515
+ traceId,
1516
+ spanId,
1517
+ traceFlags,
1518
+ severityNumber,
1519
+ severityText,
1520
+ body,
1521
+ attributes,
1522
+ resource,
1523
+ instrumentationScope
1524
+ ]
1525
+ });
1526
+ this.debugLog("Log record saved successfully", {
1527
+ timestamp,
1528
+ traceId,
1529
+ spanId,
1530
+ severityNumber
1531
+ });
1532
+ } catch (error) {
1533
+ this.logger.error("Failed to save log record", { error, logRecord });
1534
+ throw error;
1535
+ }
1536
+ }
1537
+ async getLogsByTraceId(traceId) {
1538
+ await this.ensureInitialized();
1539
+ try {
1540
+ const result = await this.client.execute({
1541
+ sql: `
1542
+ SELECT * FROM ${this.tablePrefix}_logs
1543
+ WHERE trace_id = ?
1544
+ ORDER BY timestamp DESC
1545
+ LIMIT ?
1546
+ `,
1547
+ args: [traceId, this.maxSpansPerQuery]
1548
+ });
1549
+ return result.rows.map((row) => this.rowToLogRecord(row));
1550
+ } catch (error) {
1551
+ this.logger.error("Failed to get logs by trace ID", { error, traceId });
1552
+ throw error;
1553
+ }
1554
+ }
1555
+ async getLogsBySpanId(spanId) {
1556
+ await this.ensureInitialized();
1557
+ try {
1558
+ const result = await this.client.execute({
1559
+ sql: `
1560
+ SELECT * FROM ${this.tablePrefix}_logs
1561
+ WHERE span_id = ?
1562
+ ORDER BY timestamp DESC
1563
+ LIMIT ?
1564
+ `,
1565
+ args: [spanId, this.maxSpansPerQuery]
1566
+ });
1567
+ return result.rows.map((row) => this.rowToLogRecord(row));
1568
+ } catch (error) {
1569
+ this.logger.error("Failed to get logs by span ID", { error, spanId });
1570
+ throw error;
1571
+ }
1572
+ }
1573
+ async queryLogs(filter) {
1574
+ await this.ensureInitialized();
1575
+ try {
1576
+ const whereClauses = [];
1577
+ const args = [];
1578
+ if (filter.traceId) {
1579
+ whereClauses.push("trace_id = ?");
1580
+ args.push(filter.traceId);
1581
+ }
1582
+ if (filter.spanId) {
1583
+ whereClauses.push("span_id = ?");
1584
+ args.push(filter.spanId);
1585
+ }
1586
+ if (filter.severityNumber !== void 0) {
1587
+ whereClauses.push("severity_number >= ?");
1588
+ args.push(filter.severityNumber);
1589
+ }
1590
+ if (filter.severityText) {
1591
+ whereClauses.push("severity_text = ?");
1592
+ args.push(filter.severityText);
1593
+ }
1594
+ if (filter.instrumentationScope) {
1595
+ whereClauses.push("instrumentation_scope LIKE ?");
1596
+ args.push(`%${filter.instrumentationScope}%`);
1597
+ }
1598
+ if (filter.startTimeMin !== void 0) {
1599
+ const minTime = new Date(filter.startTimeMin).toISOString();
1600
+ whereClauses.push("timestamp >= ?");
1601
+ args.push(minTime);
1602
+ }
1603
+ if (filter.startTimeMax !== void 0) {
1604
+ const maxTime = new Date(filter.startTimeMax).toISOString();
1605
+ whereClauses.push("timestamp <= ?");
1606
+ args.push(maxTime);
1607
+ }
1608
+ if (filter.bodyContains) {
1609
+ whereClauses.push("body LIKE ?");
1610
+ args.push(`%${filter.bodyContains}%`);
1611
+ }
1612
+ const whereClause = whereClauses.length > 0 ? `WHERE ${whereClauses.join(" AND ")}` : "";
1613
+ const limit = filter.limit || this.maxSpansPerQuery;
1614
+ args.push(limit);
1615
+ const result = await this.client.execute({
1616
+ sql: `
1617
+ SELECT * FROM ${this.tablePrefix}_logs
1618
+ ${whereClause}
1619
+ ORDER BY timestamp DESC
1620
+ LIMIT ?
1621
+ `,
1622
+ args
1623
+ });
1624
+ const logs = result.rows.map((row) => this.rowToLogRecord(row));
1625
+ if (filter.attributeKey) {
1626
+ const key = filter.attributeKey;
1627
+ return logs.filter((log) => {
1628
+ if (!log.attributes) return false;
1629
+ if (filter.attributeValue !== void 0) {
1630
+ return log.attributes[key] === filter.attributeValue;
1631
+ }
1632
+ return key in log.attributes;
1633
+ });
1634
+ }
1635
+ return logs;
1636
+ } catch (error) {
1637
+ this.logger.error("Failed to query logs", { error, filter });
1638
+ throw error;
1639
+ }
1640
+ }
1641
+ async deleteOldLogs(beforeTimestamp) {
1642
+ await this.ensureInitialized();
1643
+ try {
1644
+ const beforeDate = new Date(beforeTimestamp).toISOString();
1645
+ const result = await this.client.execute({
1646
+ sql: `DELETE FROM ${this.tablePrefix}_logs WHERE timestamp < ?`,
1647
+ args: [beforeDate]
1648
+ });
1649
+ const deletedCount = result.rowsAffected || 0;
1650
+ this.debugLog("Old logs deleted", { deletedCount, beforeDate });
1651
+ return deletedCount;
1652
+ } catch (error) {
1653
+ this.logger.error("Failed to delete old logs", { error, beforeTimestamp });
1654
+ throw error;
1655
+ }
1656
+ }
1657
+ rowToLogRecord(row) {
1658
+ const log = {
1659
+ timestamp: row.timestamp,
1660
+ body: (() => {
1661
+ try {
1662
+ const bodyStr = row.body;
1663
+ if (bodyStr.startsWith("{") || bodyStr.startsWith("[")) {
1664
+ return JSON.parse(bodyStr);
1665
+ }
1666
+ } catch {
1667
+ }
1668
+ return row.body;
1669
+ })()
1670
+ };
1671
+ if (row.trace_id !== null) {
1672
+ log.traceId = row.trace_id;
1673
+ }
1674
+ if (row.span_id !== null) {
1675
+ log.spanId = row.span_id;
1676
+ }
1677
+ if (row.trace_flags !== null) {
1678
+ log.traceFlags = row.trace_flags;
1679
+ }
1680
+ if (row.severity_number !== null) {
1681
+ log.severityNumber = row.severity_number;
1682
+ }
1683
+ if (row.severity_text !== null) {
1684
+ log.severityText = row.severity_text;
1685
+ }
1686
+ if (row.attributes && row.attributes !== "null") {
1687
+ try {
1688
+ const attributes = JSON.parse(row.attributes);
1689
+ if (attributes && Object.keys(attributes).length > 0) {
1690
+ log.attributes = attributes;
1691
+ }
1692
+ } catch {
1693
+ }
1694
+ }
1695
+ if (row.resource && row.resource !== "null") {
1696
+ try {
1697
+ const resource = JSON.parse(row.resource);
1698
+ if (resource && Object.keys(resource).length > 0) {
1699
+ log.resource = resource;
1700
+ }
1701
+ } catch {
1702
+ }
1703
+ }
1704
+ if (row.instrumentation_scope && row.instrumentation_scope !== "null") {
1705
+ try {
1706
+ const scope = JSON.parse(row.instrumentation_scope);
1707
+ if (scope) {
1708
+ log.instrumentationScope = scope;
1709
+ }
1710
+ } catch {
1711
+ }
1712
+ }
1713
+ return log;
1714
+ }
1715
+ getInfo() {
1716
+ return {
1717
+ adapter: this.constructor.name,
1718
+ displayName: "LibSQL Observability Storage",
1719
+ persistent: true,
1720
+ description: "Persists spans and logs to a LibSQL/Turso database for long-term retention."
1721
+ };
1722
+ }
1723
+ async close() {
1724
+ this.debugLog("LibSQL observability adapter closed");
1725
+ }
1726
+ };
1727
+
1728
+ // src/observability-adapter-edge.ts
1729
+ var LibSQLObservabilityAdapterEdge = class extends LibSQLObservabilityCore {
1730
+ static {
1731
+ __name(this, "LibSQLObservabilityAdapterEdge");
1732
+ }
1733
+ constructor(options) {
1734
+ if (!options.url) {
1735
+ throw new Error("LibSQLObservabilityAdapterEdge requires a url option");
1736
+ }
1737
+ if (options.url.startsWith("file:") || options.url === ":memory:" || !options.url.startsWith("libsql://")) {
1738
+ throw new Error(
1739
+ "LibSQLObservabilityAdapterEdge only supports remote Turso URLs (libsql://). File-based databases are not supported in edge environments. Use LibSQLObservabilityAdapter from '@voltagent/libsql' for Node.js environments."
1740
+ );
1741
+ }
1742
+ if (!options.authToken) {
1743
+ throw new Error(
1744
+ "LibSQLObservabilityAdapterEdge requires an authToken for remote connections"
1745
+ );
1746
+ }
1747
+ const logger = options.logger || createPinoLogger2({ name: "libsql-observability-edge" });
1748
+ const client = createClient2({
1749
+ url: options.url,
1750
+ authToken: options.authToken
1751
+ });
1752
+ super(client, options, logger);
1753
+ }
1754
+ };
1755
+
1756
+ // src/vector-adapter-edge.ts
1757
+ import { createClient as createClient3 } from "@libsql/client/web";
1758
+ import { createPinoLogger as createPinoLogger3 } from "@voltagent/logger";
1759
+
1760
+ // src/vector-core.ts
1761
+ import {
1762
+ cosineSimilarity
1763
+ } from "@voltagent/core";
1764
+ import { safeStringify as safeStringify3 } from "@voltagent/internal";
1765
+ var LibSQLVectorCore = class {
1766
+ static {
1767
+ __name(this, "LibSQLVectorCore");
1768
+ }
1769
+ client;
1770
+ tablePrefix;
1771
+ maxVectorDimensions;
1772
+ cacheSize;
1773
+ batchSize;
1774
+ debug;
1775
+ logger;
1776
+ maxRetries;
1777
+ retryDelayMs;
1778
+ initialized = false;
1779
+ vectorCache;
1780
+ dimensions = null;
1781
+ constructor(client, options, logger) {
1782
+ this.client = client;
1783
+ this.tablePrefix = options.tablePrefix ?? "voltagent";
1784
+ this.maxVectorDimensions = options.maxVectorDimensions ?? 1536;
1785
+ this.cacheSize = options.cacheSize ?? 100;
1786
+ this.batchSize = options.batchSize ?? 100;
1787
+ this.maxRetries = options.maxRetries ?? 3;
1788
+ this.retryDelayMs = options.retryDelayMs ?? 100;
1789
+ this.debug = options.debug ?? false;
1790
+ this.logger = logger;
1791
+ this.vectorCache = /* @__PURE__ */ new Map();
1792
+ }
1793
+ /**
1794
+ * Serialize a vector to binary format
1795
+ * Uses ArrayBuffer/DataView for cross-platform compatibility
1796
+ */
1797
+ serializeVector(vector) {
1798
+ const buffer = new ArrayBuffer(vector.length * 4);
1799
+ const view = new DataView(buffer);
1800
+ for (let i = 0; i < vector.length; i++) {
1801
+ view.setFloat32(i * 4, vector[i], true);
1802
+ }
1803
+ return new Uint8Array(buffer);
1804
+ }
1805
+ /**
1806
+ * Deserialize a vector from binary format
1807
+ */
1808
+ deserializeVector(data) {
1809
+ const bytes = data instanceof ArrayBuffer ? new Uint8Array(data) : data;
1810
+ const view = new DataView(bytes.buffer, bytes.byteOffset, bytes.byteLength);
1811
+ const vector = [];
1812
+ for (let i = 0; i < bytes.length; i += 4) {
1813
+ vector.push(view.getFloat32(i, true));
1814
+ }
1815
+ return vector;
1816
+ }
1817
+ /**
1818
+ * Initialize the database schema
1819
+ */
1820
+ async initialize() {
1821
+ if (this.initialized) return;
1822
+ const tableName = `${this.tablePrefix}_vectors`;
1823
+ try {
1824
+ await this.client.execute(`
1825
+ CREATE TABLE IF NOT EXISTS ${tableName} (
1826
+ id TEXT PRIMARY KEY,
1827
+ vector BLOB NOT NULL,
1828
+ dimensions INTEGER NOT NULL,
1829
+ metadata TEXT,
1830
+ content TEXT,
1831
+ created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
1832
+ updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
1833
+ )
1834
+ `);
1835
+ await this.client.execute(
1836
+ `CREATE INDEX IF NOT EXISTS idx_${tableName}_created ON ${tableName}(created_at)`
1837
+ );
1838
+ await this.client.execute(
1839
+ `CREATE INDEX IF NOT EXISTS idx_${tableName}_dimensions ON ${tableName}(dimensions)`
1840
+ );
1841
+ this.initialized = true;
1842
+ this.logger.debug("Vector adapter initialized");
1843
+ } catch (error) {
1844
+ this.logger.error("Failed to initialize vector adapter", error);
1845
+ throw error;
1846
+ }
1847
+ }
1848
+ /**
1849
+ * Execute a database operation with retries
1850
+ */
1851
+ async executeWithRetry(operation, context) {
1852
+ let lastError;
1853
+ let delay = this.retryDelayMs;
1854
+ for (let attempt = 1; attempt <= this.maxRetries; attempt++) {
1855
+ try {
1856
+ return await operation();
1857
+ } catch (error) {
1858
+ lastError = error;
1859
+ this.logger.warn(`Operation failed (attempt ${attempt}): ${context}`, error);
1860
+ if (attempt < this.maxRetries) {
1861
+ await new Promise((resolve) => setTimeout(resolve, delay));
1862
+ delay *= 2;
1863
+ }
1864
+ }
1865
+ }
1866
+ this.logger.error(`Operation failed after ${this.maxRetries} attempts: ${context}`, lastError);
1867
+ throw lastError;
1868
+ }
1869
+ async store(id, vector, metadata) {
1870
+ await this.initialize();
1871
+ if (!Array.isArray(vector) || vector.length === 0) {
1872
+ throw new Error("Vector must be a non-empty array");
1873
+ }
1874
+ if (vector.length > this.maxVectorDimensions) {
1875
+ throw new Error(
1876
+ `Vector dimensions (${vector.length}) exceed maximum (${this.maxVectorDimensions})`
1877
+ );
1878
+ }
1879
+ if (this.dimensions === null) {
1880
+ this.dimensions = vector.length;
1881
+ } else if (vector.length !== this.dimensions) {
1882
+ throw new Error(
1883
+ `Vector dimension mismatch. Expected ${this.dimensions}, got ${vector.length}`
1884
+ );
1885
+ }
1886
+ const tableName = `${this.tablePrefix}_vectors`;
1887
+ const serializedVector = this.serializeVector(vector);
1888
+ const metadataJson = metadata ? safeStringify3(metadata) : null;
1889
+ await this.executeWithRetry(async () => {
1890
+ await this.client.execute({
1891
+ sql: `
1892
+ INSERT OR REPLACE INTO ${tableName}
1893
+ (id, vector, dimensions, metadata, updated_at)
1894
+ VALUES (?, ?, ?, ?, CURRENT_TIMESTAMP)
1895
+ `,
1896
+ args: [id, serializedVector, vector.length, metadataJson]
1897
+ });
1898
+ }, `store vector ${id}`);
1899
+ if (this.vectorCache.size >= this.cacheSize) {
1900
+ const firstKey = this.vectorCache.keys().next().value;
1901
+ if (firstKey) this.vectorCache.delete(firstKey);
1902
+ }
1903
+ this.vectorCache.set(id, { id, vector, metadata });
1904
+ this.logger.debug(`Vector stored: ${id} (${vector.length} dimensions)`);
1905
+ }
1906
+ async storeBatch(items) {
1907
+ await this.initialize();
1908
+ if (items.length === 0) return;
1909
+ const tableName = `${this.tablePrefix}_vectors`;
1910
+ for (let i = 0; i < items.length; i += this.batchSize) {
1911
+ const batch = items.slice(i, i + this.batchSize);
1912
+ await this.executeWithRetry(async () => {
1913
+ const stmts = [];
1914
+ for (const item of batch) {
1915
+ if (!Array.isArray(item.vector) || item.vector.length === 0) {
1916
+ throw new Error("Vector must be a non-empty array");
1917
+ }
1918
+ if (this.dimensions === null) {
1919
+ this.dimensions = item.vector.length;
1920
+ } else if (item.vector.length !== this.dimensions) {
1921
+ throw new Error(
1922
+ `Vector dimension mismatch. Expected ${this.dimensions}, got ${item.vector.length}`
1923
+ );
1924
+ }
1925
+ const serializedVector = this.serializeVector(item.vector);
1926
+ const metadataJson = item.metadata ? safeStringify3(item.metadata) : null;
1927
+ const content = item.content ?? null;
1928
+ stmts.push({
1929
+ sql: `INSERT OR REPLACE INTO ${tableName} (id, vector, dimensions, metadata, content, updated_at) VALUES (?, ?, ?, ?, ?, CURRENT_TIMESTAMP)`,
1930
+ args: [item.id, serializedVector, item.vector.length, metadataJson, content]
1931
+ });
1932
+ }
1933
+ await this.client.batch(stmts, "write");
1934
+ }, `storeBatch ${batch.length} vectors`);
1935
+ this.logger.debug(`Batch of ${batch.length} vectors stored`);
1936
+ }
1937
+ }
1938
+ async search(queryVector, options) {
1939
+ await this.initialize();
1940
+ const { limit = 10, threshold = 0, filter } = options || {};
1941
+ if (this.dimensions !== null && queryVector.length !== this.dimensions) {
1942
+ throw new Error(
1943
+ `Query vector dimension mismatch. Expected ${this.dimensions}, got ${queryVector.length}`
1944
+ );
1945
+ }
1946
+ const tableName = `${this.tablePrefix}_vectors`;
1947
+ let query = `SELECT id, vector, dimensions, metadata, content FROM ${tableName}`;
1948
+ const args = [];
1949
+ if (this.dimensions !== null) {
1950
+ query += " WHERE dimensions = ?";
1951
+ args.push(this.dimensions);
1952
+ }
1953
+ const result = await this.executeWithRetry(
1954
+ async () => await this.client.execute({ sql: query, args }),
1955
+ "search vectors"
1956
+ );
1957
+ const searchResults = [];
1958
+ for (const row of result.rows) {
1959
+ const id = row.id;
1960
+ const vectorBlob = row.vector;
1961
+ const metadataJson = row.metadata;
1962
+ const content = row.content ?? void 0;
1963
+ const metadata = metadataJson ? JSON.parse(metadataJson) : void 0;
1964
+ if (filter && !this.matchesFilter(metadata, filter)) {
1965
+ continue;
1966
+ }
1967
+ const vector = this.deserializeVector(vectorBlob);
1968
+ const similarity = cosineSimilarity(queryVector, vector);
1969
+ const score = (similarity + 1) / 2;
1970
+ if (score >= threshold) {
1971
+ searchResults.push({
1972
+ id,
1973
+ vector,
1974
+ metadata,
1975
+ content,
1976
+ score,
1977
+ distance: 1 - similarity
1978
+ });
1979
+ }
1980
+ }
1981
+ searchResults.sort((a, b) => b.score - a.score);
1982
+ return searchResults.slice(0, limit);
1983
+ }
1984
+ matchesFilter(metadata, filter) {
1985
+ if (!metadata) {
1986
+ return false;
1987
+ }
1988
+ for (const [key, value] of Object.entries(filter)) {
1989
+ if (metadata[key] !== value) {
1990
+ return false;
1991
+ }
1992
+ }
1993
+ return true;
1994
+ }
1995
+ async delete(id) {
1996
+ await this.initialize();
1997
+ const tableName = `${this.tablePrefix}_vectors`;
1998
+ await this.executeWithRetry(async () => {
1999
+ await this.client.execute({
2000
+ sql: `DELETE FROM ${tableName} WHERE id = ?`,
2001
+ args: [id]
2002
+ });
2003
+ }, `delete vector ${id}`);
2004
+ this.vectorCache.delete(id);
2005
+ this.logger.debug(`Vector deleted: ${id}`);
2006
+ }
2007
+ async deleteBatch(ids) {
2008
+ await this.initialize();
2009
+ if (ids.length === 0) return;
2010
+ const tableName = `${this.tablePrefix}_vectors`;
2011
+ for (let i = 0; i < ids.length; i += this.batchSize) {
2012
+ const batch = ids.slice(i, i + this.batchSize);
2013
+ const placeholders = batch.map(() => "?").join(",");
2014
+ await this.executeWithRetry(async () => {
2015
+ await this.client.execute({
2016
+ sql: `DELETE FROM ${tableName} WHERE id IN (${placeholders})`,
2017
+ args: batch
2018
+ });
2019
+ }, `deleteBatch ${batch.length} vectors`);
2020
+ for (const id of batch) {
2021
+ this.vectorCache.delete(id);
2022
+ }
2023
+ this.logger.debug(`Batch of ${batch.length} vectors deleted`);
2024
+ }
2025
+ }
2026
+ async clear() {
2027
+ await this.initialize();
2028
+ const tableName = `${this.tablePrefix}_vectors`;
2029
+ await this.executeWithRetry(async () => {
2030
+ await this.client.execute(`DELETE FROM ${tableName}`);
2031
+ }, "clear all vectors");
2032
+ this.vectorCache.clear();
2033
+ this.dimensions = null;
2034
+ this.logger.debug("All vectors cleared");
2035
+ }
2036
+ async count() {
2037
+ await this.initialize();
2038
+ const tableName = `${this.tablePrefix}_vectors`;
2039
+ const result = await this.executeWithRetry(
2040
+ async () => await this.client.execute(`SELECT COUNT(*) as count FROM ${tableName}`),
2041
+ "count vectors"
2042
+ );
2043
+ const raw = result.rows[0]?.count;
2044
+ if (typeof raw === "bigint") return Number(raw);
2045
+ if (typeof raw === "string") return Number.parseInt(raw, 10) || 0;
2046
+ return raw ?? 0;
2047
+ }
2048
+ async get(id) {
2049
+ await this.initialize();
2050
+ if (this.vectorCache.has(id)) {
2051
+ const cached = this.vectorCache.get(id);
2052
+ if (cached) {
2053
+ return {
2054
+ ...cached,
2055
+ vector: [...cached.vector],
2056
+ metadata: cached.metadata ? { ...cached.metadata } : void 0
2057
+ };
2058
+ }
2059
+ }
2060
+ const tableName = `${this.tablePrefix}_vectors`;
2061
+ const result = await this.executeWithRetry(
2062
+ async () => await this.client.execute({
2063
+ sql: `SELECT id, vector, metadata, content FROM ${tableName} WHERE id = ?`,
2064
+ args: [id]
2065
+ }),
2066
+ `get vector ${id}`
2067
+ );
2068
+ if (result.rows.length === 0) {
2069
+ return null;
2070
+ }
2071
+ const row = result.rows[0];
2072
+ const vectorBlob = row.vector;
2073
+ const metadataJson = row.metadata;
2074
+ const content = row.content;
2075
+ const vector = this.deserializeVector(vectorBlob);
2076
+ const metadata = metadataJson ? JSON.parse(metadataJson) : void 0;
2077
+ const item = {
2078
+ id,
2079
+ vector,
2080
+ metadata,
2081
+ content: content ?? void 0
2082
+ };
2083
+ if (this.vectorCache.size >= this.cacheSize) {
2084
+ const firstKey = this.vectorCache.keys().next().value;
2085
+ if (firstKey) this.vectorCache.delete(firstKey);
2086
+ }
2087
+ this.vectorCache.set(id, item);
2088
+ return item;
2089
+ }
2090
+ async close() {
2091
+ this.vectorCache.clear();
2092
+ this.logger.debug("Vector adapter closed");
2093
+ }
2094
+ async getStats() {
2095
+ await this.initialize();
2096
+ const tableName = `${this.tablePrefix}_vectors`;
2097
+ const [countResult, sizeResult] = await Promise.all([
2098
+ this.executeWithRetry(
2099
+ async () => await this.client.execute(
2100
+ `SELECT COUNT(*) as count, MAX(dimensions) as dims FROM ${tableName}`
2101
+ ),
2102
+ "getStats count"
2103
+ ),
2104
+ this.executeWithRetry(
2105
+ async () => await this.client.execute({
2106
+ sql: `SELECT
2107
+ COALESCE(SUM(LENGTH(id)),0) +
2108
+ COALESCE(SUM(LENGTH(vector)),0) +
2109
+ COALESCE(SUM(LENGTH(metadata)),0) +
2110
+ COALESCE(SUM(LENGTH(content)),0) AS size
2111
+ FROM ${tableName}`
2112
+ }),
2113
+ "getStats size"
2114
+ )
2115
+ ]);
2116
+ const row1 = countResult.rows[0];
2117
+ const row2 = sizeResult.rows[0];
2118
+ const countRaw = row1?.count;
2119
+ const dimsRaw = row1?.dims;
2120
+ const sizeRaw = row2?.size;
2121
+ const normalize = /* @__PURE__ */ __name((v) => typeof v === "bigint" ? Number(v) : typeof v === "string" ? Number.parseInt(v, 10) || 0 : v ?? 0, "normalize");
2122
+ return {
2123
+ count: normalize(countRaw),
2124
+ dimensions: dimsRaw != null ? normalize(dimsRaw) : this.dimensions,
2125
+ cacheSize: this.vectorCache.size,
2126
+ tableSizeBytes: normalize(sizeRaw)
2127
+ };
2128
+ }
2129
+ };
2130
+
2131
+ // src/vector-adapter-edge.ts
2132
+ var LibSQLVectorAdapterEdge = class extends LibSQLVectorCore {
2133
+ static {
2134
+ __name(this, "LibSQLVectorAdapterEdge");
2135
+ }
2136
+ constructor(options) {
2137
+ if (!options.url) {
2138
+ throw new Error("LibSQLVectorAdapterEdge requires a url option");
2139
+ }
2140
+ if (options.url.startsWith("file:") || options.url === ":memory:" || !options.url.startsWith("libsql://")) {
2141
+ throw new Error(
2142
+ "LibSQLVectorAdapterEdge only supports remote Turso URLs (libsql://). File-based databases are not supported in edge environments. Use LibSQLVectorAdapter from '@voltagent/libsql' for Node.js environments."
2143
+ );
2144
+ }
2145
+ if (!options.authToken) {
2146
+ throw new Error("LibSQLVectorAdapterEdge requires an authToken for remote connections");
2147
+ }
2148
+ const logger = options.logger ?? createPinoLogger3({
2149
+ name: "libsql-vector-adapter-edge",
2150
+ level: options.debug ? "debug" : "info"
2151
+ });
2152
+ const client = createClient3({
2153
+ url: options.url,
2154
+ authToken: options.authToken
2155
+ });
2156
+ super(client, options, logger);
2157
+ }
2158
+ };
2159
+ export {
2160
+ LibSQLMemoryAdapterEdge as LibSQLMemoryAdapter,
2161
+ LibSQLMemoryAdapterEdge,
2162
+ LibSQLObservabilityAdapterEdge as LibSQLObservabilityAdapter,
2163
+ LibSQLObservabilityAdapterEdge,
2164
+ LibSQLVectorAdapterEdge as LibSQLVectorAdapter,
2165
+ LibSQLVectorAdapterEdge
2166
+ };
2167
+ //# sourceMappingURL=edge.mjs.map