@mastra/libsql 1.0.0-beta.8 → 1.0.0-beta.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -6,6 +6,7 @@ var storage = require('@mastra/core/storage');
6
6
  var utils = require('@mastra/core/utils');
7
7
  var vector = require('@mastra/core/vector');
8
8
  var filter = require('@mastra/core/vector/filter');
9
+ var base = require('@mastra/core/base');
9
10
  var agent = require('@mastra/core/agent');
10
11
  var evals = require('@mastra/core/evals');
11
12
 
@@ -1094,1807 +1095,2134 @@ var LibSQLVector = class extends vector.MastraVector {
1094
1095
  });
1095
1096
  }
1096
1097
  };
1097
- var AgentsLibSQL = class extends storage.AgentsStorage {
1098
- client;
1099
- constructor({ client, operations: _ }) {
1100
- super();
1101
- this.client = client;
1102
- }
1103
- parseJson(value, fieldName) {
1104
- if (!value) return void 0;
1105
- if (typeof value !== "string") return value;
1106
- try {
1107
- return JSON.parse(value);
1108
- } catch (error$1) {
1109
- const details = {
1110
- value: value.length > 100 ? value.substring(0, 100) + "..." : value
1111
- };
1112
- if (fieldName) {
1113
- details.field = fieldName;
1098
+ function isLockError(error) {
1099
+ return error.code === "SQLITE_BUSY" || error.code === "SQLITE_LOCKED" || error.message?.toLowerCase().includes("database is locked") || error.message?.toLowerCase().includes("database table is locked") || error.message?.toLowerCase().includes("table is locked") || error.constructor.name === "SqliteError" && error.message?.toLowerCase().includes("locked");
1100
+ }
1101
+ function createExecuteWriteOperationWithRetry({
1102
+ logger,
1103
+ maxRetries,
1104
+ initialBackoffMs
1105
+ }) {
1106
+ return async function executeWriteOperationWithRetry(operationFn, operationDescription) {
1107
+ let attempts = 0;
1108
+ let backoff = initialBackoffMs;
1109
+ while (attempts < maxRetries) {
1110
+ try {
1111
+ return await operationFn();
1112
+ } catch (error) {
1113
+ logger.debug(`LibSQLStore: Error caught in retry loop for ${operationDescription}`, {
1114
+ errorType: error.constructor.name,
1115
+ errorCode: error.code,
1116
+ errorMessage: error.message,
1117
+ attempts,
1118
+ maxRetries
1119
+ });
1120
+ if (isLockError(error)) {
1121
+ attempts++;
1122
+ if (attempts >= maxRetries) {
1123
+ logger.error(
1124
+ `LibSQLStore: Operation failed after ${maxRetries} attempts due to database lock: ${error.message}`,
1125
+ { error, attempts, maxRetries }
1126
+ );
1127
+ throw error;
1128
+ }
1129
+ logger.warn(
1130
+ `LibSQLStore: Attempt ${attempts} failed due to database lock during ${operationDescription}. Retrying in ${backoff}ms...`,
1131
+ { errorMessage: error.message, attempts, backoff, maxRetries }
1132
+ );
1133
+ await new Promise((resolve) => setTimeout(resolve, backoff));
1134
+ backoff *= 2;
1135
+ } else {
1136
+ logger.error(`LibSQLStore: Non-lock error during ${operationDescription}, not retrying`, { error });
1137
+ throw error;
1138
+ }
1114
1139
  }
1115
- throw new error.MastraError(
1116
- {
1117
- id: storage.createStorageErrorId("LIBSQL", "PARSE_JSON", "INVALID_JSON"),
1118
- domain: error.ErrorDomain.STORAGE,
1119
- category: error.ErrorCategory.SYSTEM,
1120
- text: `Failed to parse JSON${fieldName ? ` for field "${fieldName}"` : ""}: ${error$1 instanceof Error ? error$1.message : "Unknown error"}`,
1121
- details
1122
- },
1123
- error$1
1124
- );
1125
1140
  }
1141
+ throw new Error(`LibSQLStore: Unexpected exit from retry loop for ${operationDescription}`);
1142
+ };
1143
+ }
1144
+ function prepareStatement({ tableName, record }) {
1145
+ const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
1146
+ const columns = Object.keys(record).map((col) => utils.parseSqlIdentifier(col, "column name"));
1147
+ const values = Object.values(record).map((v) => {
1148
+ if (typeof v === `undefined` || v === null) {
1149
+ return null;
1150
+ }
1151
+ if (v instanceof Date) {
1152
+ return v.toISOString();
1153
+ }
1154
+ return typeof v === "object" ? JSON.stringify(v) : v;
1155
+ });
1156
+ const placeholders = values.map(() => "?").join(", ");
1157
+ return {
1158
+ sql: `INSERT OR REPLACE INTO ${parsedTableName} (${columns.join(", ")}) VALUES (${placeholders})`,
1159
+ args: values
1160
+ };
1161
+ }
1162
+ function prepareUpdateStatement({
1163
+ tableName,
1164
+ updates,
1165
+ keys
1166
+ }) {
1167
+ const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
1168
+ const schema = storage.TABLE_SCHEMAS[tableName];
1169
+ const updateColumns = Object.keys(updates).map((col) => utils.parseSqlIdentifier(col, "column name"));
1170
+ const updateValues = Object.values(updates).map(transformToSqlValue);
1171
+ const setClause = updateColumns.map((col) => `${col} = ?`).join(", ");
1172
+ const whereClause = prepareWhereClause(keys, schema);
1173
+ return {
1174
+ sql: `UPDATE ${parsedTableName} SET ${setClause}${whereClause.sql}`,
1175
+ args: [...updateValues, ...whereClause.args]
1176
+ };
1177
+ }
1178
+ function transformToSqlValue(value) {
1179
+ if (typeof value === "undefined" || value === null) {
1180
+ return null;
1126
1181
  }
1127
- parseRow(row) {
1128
- return {
1129
- id: row.id,
1130
- name: row.name,
1131
- description: row.description,
1132
- instructions: row.instructions,
1133
- model: this.parseJson(row.model, "model"),
1134
- tools: this.parseJson(row.tools, "tools"),
1135
- defaultOptions: this.parseJson(row.defaultOptions, "defaultOptions"),
1136
- workflows: this.parseJson(row.workflows, "workflows"),
1137
- agents: this.parseJson(row.agents, "agents"),
1138
- inputProcessors: this.parseJson(row.inputProcessors, "inputProcessors"),
1139
- outputProcessors: this.parseJson(row.outputProcessors, "outputProcessors"),
1140
- memory: this.parseJson(row.memory, "memory"),
1141
- scorers: this.parseJson(row.scorers, "scorers"),
1142
- metadata: this.parseJson(row.metadata, "metadata"),
1143
- createdAt: new Date(row.createdAt),
1144
- updatedAt: new Date(row.updatedAt)
1145
- };
1182
+ if (value instanceof Date) {
1183
+ return value.toISOString();
1146
1184
  }
1147
- async getAgentById({ id }) {
1148
- try {
1149
- const result = await this.client.execute({
1150
- sql: `SELECT * FROM "${storage.TABLE_AGENTS}" WHERE id = ?`,
1151
- args: [id]
1152
- });
1153
- if (!result.rows || result.rows.length === 0) {
1154
- return null;
1155
- }
1156
- return this.parseRow(result.rows[0]);
1157
- } catch (error$1) {
1158
- throw new error.MastraError(
1159
- {
1160
- id: storage.createStorageErrorId("LIBSQL", "GET_AGENT_BY_ID", "FAILED"),
1161
- domain: error.ErrorDomain.STORAGE,
1162
- category: error.ErrorCategory.THIRD_PARTY,
1163
- details: { agentId: id }
1164
- },
1165
- error$1
1166
- );
1185
+ return typeof value === "object" ? JSON.stringify(value) : value;
1186
+ }
1187
+ function prepareDeleteStatement({ tableName, keys }) {
1188
+ const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
1189
+ const whereClause = prepareWhereClause(keys, storage.TABLE_SCHEMAS[tableName]);
1190
+ return {
1191
+ sql: `DELETE FROM ${parsedTableName}${whereClause.sql}`,
1192
+ args: whereClause.args
1193
+ };
1194
+ }
1195
+ function prepareWhereClause(filters, schema) {
1196
+ const conditions = [];
1197
+ const args = [];
1198
+ for (const [columnName, filterValue] of Object.entries(filters)) {
1199
+ const column = schema[columnName];
1200
+ if (!column) {
1201
+ throw new Error(`Unknown column: ${columnName}`);
1167
1202
  }
1203
+ const parsedColumn = utils.parseSqlIdentifier(columnName, "column name");
1204
+ const result = buildCondition2(parsedColumn, filterValue);
1205
+ conditions.push(result.condition);
1206
+ args.push(...result.args);
1168
1207
  }
1169
- async createAgent({ agent }) {
1170
- try {
1171
- const now = /* @__PURE__ */ new Date();
1172
- const nowIso = now.toISOString();
1173
- await this.client.execute({
1174
- sql: `INSERT INTO "${storage.TABLE_AGENTS}" (id, name, description, instructions, model, tools, "defaultOptions", workflows, agents, "inputProcessors", "outputProcessors", memory, scorers, metadata, "createdAt", "updatedAt")
1175
- VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
1176
- args: [
1177
- agent.id,
1178
- agent.name,
1179
- agent.description ?? null,
1180
- agent.instructions,
1181
- JSON.stringify(agent.model),
1182
- agent.tools ? JSON.stringify(agent.tools) : null,
1183
- agent.defaultOptions ? JSON.stringify(agent.defaultOptions) : null,
1184
- agent.workflows ? JSON.stringify(agent.workflows) : null,
1185
- agent.agents ? JSON.stringify(agent.agents) : null,
1186
- agent.inputProcessors ? JSON.stringify(agent.inputProcessors) : null,
1187
- agent.outputProcessors ? JSON.stringify(agent.outputProcessors) : null,
1188
- agent.memory ? JSON.stringify(agent.memory) : null,
1189
- agent.scorers ? JSON.stringify(agent.scorers) : null,
1190
- agent.metadata ? JSON.stringify(agent.metadata) : null,
1191
- nowIso,
1192
- nowIso
1193
- ]
1194
- });
1195
- return {
1196
- ...agent,
1197
- createdAt: now,
1198
- updatedAt: now
1199
- };
1200
- } catch (error$1) {
1201
- throw new error.MastraError(
1202
- {
1203
- id: storage.createStorageErrorId("LIBSQL", "CREATE_AGENT", "FAILED"),
1204
- domain: error.ErrorDomain.STORAGE,
1205
- category: error.ErrorCategory.THIRD_PARTY,
1206
- details: { agentId: agent.id }
1207
- },
1208
- error$1
1209
- );
1210
- }
1208
+ return {
1209
+ sql: conditions.length > 0 ? ` WHERE ${conditions.join(" AND ")}` : "",
1210
+ args
1211
+ };
1212
+ }
1213
+ function buildCondition2(columnName, filterValue) {
1214
+ if (filterValue === null) {
1215
+ return { condition: `${columnName} IS NULL`, args: [] };
1211
1216
  }
1212
- async updateAgent({ id, ...updates }) {
1213
- try {
1214
- const existingAgent = await this.getAgentById({ id });
1215
- if (!existingAgent) {
1216
- throw new error.MastraError({
1217
- id: storage.createStorageErrorId("LIBSQL", "UPDATE_AGENT", "NOT_FOUND"),
1218
- domain: error.ErrorDomain.STORAGE,
1219
- category: error.ErrorCategory.USER,
1220
- text: `Agent ${id} not found`,
1221
- details: { agentId: id }
1222
- });
1223
- }
1224
- const setClauses = [];
1225
- const args = [];
1226
- if (updates.name !== void 0) {
1227
- setClauses.push("name = ?");
1228
- args.push(updates.name);
1229
- }
1230
- if (updates.description !== void 0) {
1231
- setClauses.push("description = ?");
1232
- args.push(updates.description);
1233
- }
1234
- if (updates.instructions !== void 0) {
1235
- setClauses.push("instructions = ?");
1236
- args.push(updates.instructions);
1237
- }
1238
- if (updates.model !== void 0) {
1239
- setClauses.push("model = ?");
1240
- args.push(JSON.stringify(updates.model));
1241
- }
1242
- if (updates.tools !== void 0) {
1243
- setClauses.push("tools = ?");
1244
- args.push(JSON.stringify(updates.tools));
1245
- }
1246
- if (updates.defaultOptions !== void 0) {
1247
- setClauses.push('"defaultOptions" = ?');
1248
- args.push(JSON.stringify(updates.defaultOptions));
1249
- }
1250
- if (updates.workflows !== void 0) {
1251
- setClauses.push("workflows = ?");
1252
- args.push(JSON.stringify(updates.workflows));
1253
- }
1254
- if (updates.agents !== void 0) {
1255
- setClauses.push("agents = ?");
1256
- args.push(JSON.stringify(updates.agents));
1257
- }
1258
- if (updates.inputProcessors !== void 0) {
1259
- setClauses.push('"inputProcessors" = ?');
1260
- args.push(JSON.stringify(updates.inputProcessors));
1261
- }
1262
- if (updates.outputProcessors !== void 0) {
1263
- setClauses.push('"outputProcessors" = ?');
1264
- args.push(JSON.stringify(updates.outputProcessors));
1265
- }
1266
- if (updates.memory !== void 0) {
1267
- setClauses.push("memory = ?");
1268
- args.push(JSON.stringify(updates.memory));
1269
- }
1270
- if (updates.scorers !== void 0) {
1271
- setClauses.push("scorers = ?");
1272
- args.push(JSON.stringify(updates.scorers));
1273
- }
1274
- if (updates.metadata !== void 0) {
1275
- const mergedMetadata = { ...existingAgent.metadata, ...updates.metadata };
1276
- setClauses.push("metadata = ?");
1277
- args.push(JSON.stringify(mergedMetadata));
1278
- }
1279
- const now = /* @__PURE__ */ new Date();
1280
- setClauses.push('"updatedAt" = ?');
1281
- args.push(now.toISOString());
1282
- args.push(id);
1283
- if (setClauses.length > 1) {
1284
- await this.client.execute({
1285
- sql: `UPDATE "${storage.TABLE_AGENTS}" SET ${setClauses.join(", ")} WHERE id = ?`,
1286
- args
1287
- });
1288
- }
1289
- const updatedAgent = await this.getAgentById({ id });
1290
- if (!updatedAgent) {
1291
- throw new error.MastraError({
1292
- id: storage.createStorageErrorId("LIBSQL", "UPDATE_AGENT", "NOT_FOUND_AFTER_UPDATE"),
1293
- domain: error.ErrorDomain.STORAGE,
1294
- category: error.ErrorCategory.SYSTEM,
1295
- text: `Agent ${id} not found after update`,
1296
- details: { agentId: id }
1297
- });
1298
- }
1299
- return updatedAgent;
1300
- } catch (error$1) {
1301
- if (error$1 instanceof error.MastraError) {
1302
- throw error$1;
1303
- }
1304
- throw new error.MastraError(
1305
- {
1306
- id: storage.createStorageErrorId("LIBSQL", "UPDATE_AGENT", "FAILED"),
1307
- domain: error.ErrorDomain.STORAGE,
1308
- category: error.ErrorCategory.THIRD_PARTY,
1309
- details: { agentId: id }
1310
- },
1311
- error$1
1312
- );
1313
- }
1217
+ if (typeof filterValue === "object" && filterValue !== null && ("startAt" in filterValue || "endAt" in filterValue)) {
1218
+ return buildDateRangeCondition(columnName, filterValue);
1314
1219
  }
1315
- async deleteAgent({ id }) {
1316
- try {
1317
- await this.client.execute({
1318
- sql: `DELETE FROM "${storage.TABLE_AGENTS}" WHERE id = ?`,
1319
- args: [id]
1320
- });
1321
- } catch (error$1) {
1322
- throw new error.MastraError(
1323
- {
1324
- id: storage.createStorageErrorId("LIBSQL", "DELETE_AGENT", "FAILED"),
1325
- domain: error.ErrorDomain.STORAGE,
1326
- category: error.ErrorCategory.THIRD_PARTY,
1327
- details: { agentId: id }
1328
- },
1329
- error$1
1330
- );
1331
- }
1220
+ return {
1221
+ condition: `${columnName} = ?`,
1222
+ args: [transformToSqlValue(filterValue)]
1223
+ };
1224
+ }
1225
+ function buildDateRangeCondition(columnName, range) {
1226
+ const conditions = [];
1227
+ const args = [];
1228
+ if (range.startAt !== void 0) {
1229
+ conditions.push(`${columnName} >= ?`);
1230
+ args.push(transformToSqlValue(range.startAt));
1332
1231
  }
1333
- async listAgents(args) {
1334
- const { page = 0, perPage: perPageInput, orderBy } = args || {};
1335
- const { field, direction } = this.parseOrderBy(orderBy);
1336
- if (page < 0) {
1337
- throw new error.MastraError(
1338
- {
1339
- id: storage.createStorageErrorId("LIBSQL", "LIST_AGENTS", "INVALID_PAGE"),
1340
- domain: error.ErrorDomain.STORAGE,
1341
- category: error.ErrorCategory.USER,
1342
- details: { page }
1343
- },
1344
- new Error("page must be >= 0")
1345
- );
1232
+ if (range.endAt !== void 0) {
1233
+ conditions.push(`${columnName} <= ?`);
1234
+ args.push(transformToSqlValue(range.endAt));
1235
+ }
1236
+ if (conditions.length === 0) {
1237
+ throw new Error("Date range must specify at least startAt or endAt");
1238
+ }
1239
+ return {
1240
+ condition: conditions.join(" AND "),
1241
+ args
1242
+ };
1243
+ }
1244
+ function transformFromSqlRow({
1245
+ tableName,
1246
+ sqlRow
1247
+ }) {
1248
+ const result = {};
1249
+ const jsonColumns = new Set(
1250
+ Object.keys(storage.TABLE_SCHEMAS[tableName]).filter((key) => storage.TABLE_SCHEMAS[tableName][key].type === "jsonb").map((key) => key)
1251
+ );
1252
+ const dateColumns = new Set(
1253
+ Object.keys(storage.TABLE_SCHEMAS[tableName]).filter((key) => storage.TABLE_SCHEMAS[tableName][key].type === "timestamp").map((key) => key)
1254
+ );
1255
+ for (const [key, value] of Object.entries(sqlRow)) {
1256
+ if (value === null || value === void 0) {
1257
+ result[key] = value;
1258
+ continue;
1346
1259
  }
1347
- const perPage = storage.normalizePerPage(perPageInput, 100);
1348
- const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1349
- try {
1350
- const countResult = await this.client.execute({
1351
- sql: `SELECT COUNT(*) as count FROM "${storage.TABLE_AGENTS}"`,
1352
- args: []
1353
- });
1354
- const total = Number(countResult.rows?.[0]?.count ?? 0);
1355
- if (total === 0) {
1356
- return {
1357
- agents: [],
1358
- total: 0,
1359
- page,
1360
- perPage: perPageForResponse,
1361
- hasMore: false
1362
- };
1363
- }
1364
- const limitValue = perPageInput === false ? total : perPage;
1365
- const dataResult = await this.client.execute({
1366
- sql: `SELECT * FROM "${storage.TABLE_AGENTS}" ORDER BY "${field}" ${direction} LIMIT ? OFFSET ?`,
1367
- args: [limitValue, offset]
1368
- });
1369
- const agents = (dataResult.rows || []).map((row) => this.parseRow(row));
1370
- return {
1371
- agents,
1372
- total,
1373
- page,
1374
- perPage: perPageForResponse,
1375
- hasMore: perPageInput === false ? false : offset + perPage < total
1376
- };
1377
- } catch (error$1) {
1378
- throw new error.MastraError(
1379
- {
1380
- id: storage.createStorageErrorId("LIBSQL", "LIST_AGENTS", "FAILED"),
1381
- domain: error.ErrorDomain.STORAGE,
1382
- category: error.ErrorCategory.THIRD_PARTY
1383
- },
1384
- error$1
1385
- );
1260
+ if (dateColumns.has(key) && typeof value === "string") {
1261
+ result[key] = new Date(value);
1262
+ continue;
1263
+ }
1264
+ if (jsonColumns.has(key) && typeof value === "string") {
1265
+ result[key] = storage.safelyParseJSON(value);
1266
+ continue;
1386
1267
  }
1268
+ result[key] = value;
1387
1269
  }
1388
- };
1389
- var MemoryLibSQL = class extends storage.MemoryStorage {
1270
+ return result;
1271
+ }
1272
+
1273
+ // src/storage/db/index.ts
1274
+ function resolveClient(config) {
1275
+ if ("client" in config) {
1276
+ return config.client;
1277
+ }
1278
+ return client.createClient({
1279
+ url: config.url,
1280
+ ...config.authToken ? { authToken: config.authToken } : {}
1281
+ });
1282
+ }
1283
+ var LibSQLDB = class extends base.MastraBase {
1390
1284
  client;
1391
- operations;
1392
- constructor({ client, operations }) {
1393
- super();
1285
+ maxRetries;
1286
+ initialBackoffMs;
1287
+ executeWriteOperationWithRetry;
1288
+ constructor({
1289
+ client,
1290
+ maxRetries,
1291
+ initialBackoffMs
1292
+ }) {
1293
+ super({
1294
+ component: "STORAGE",
1295
+ name: "LIBSQL_DB_LAYER"
1296
+ });
1394
1297
  this.client = client;
1395
- this.operations = operations;
1396
- }
1397
- parseRow(row) {
1398
- let content = row.content;
1399
- try {
1400
- content = JSON.parse(row.content);
1401
- } catch {
1402
- }
1403
- const result = {
1404
- id: row.id,
1405
- content,
1406
- role: row.role,
1407
- createdAt: new Date(row.createdAt),
1408
- threadId: row.thread_id,
1409
- resourceId: row.resourceId
1410
- };
1411
- if (row.type && row.type !== `v2`) result.type = row.type;
1412
- return result;
1298
+ this.maxRetries = maxRetries ?? 5;
1299
+ this.initialBackoffMs = initialBackoffMs ?? 100;
1300
+ this.executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
1301
+ logger: this.logger,
1302
+ maxRetries: this.maxRetries,
1303
+ initialBackoffMs: this.initialBackoffMs
1304
+ });
1413
1305
  }
1414
- async _getIncludedMessages({ include }) {
1415
- if (!include || include.length === 0) return null;
1416
- const unionQueries = [];
1417
- const params = [];
1418
- for (const inc of include) {
1419
- const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
1420
- unionQueries.push(
1421
- `
1422
- SELECT * FROM (
1423
- WITH target_thread AS (
1424
- SELECT thread_id FROM "${storage.TABLE_MESSAGES}" WHERE id = ?
1425
- ),
1426
- numbered_messages AS (
1427
- SELECT
1428
- id, content, role, type, "createdAt", thread_id, "resourceId",
1429
- ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
1430
- FROM "${storage.TABLE_MESSAGES}"
1431
- WHERE thread_id = (SELECT thread_id FROM target_thread)
1432
- ),
1433
- target_positions AS (
1434
- SELECT row_num as target_pos
1435
- FROM numbered_messages
1436
- WHERE id = ?
1437
- )
1438
- SELECT DISTINCT m.*
1439
- FROM numbered_messages m
1440
- CROSS JOIN target_positions t
1441
- WHERE m.row_num BETWEEN (t.target_pos - ?) AND (t.target_pos + ?)
1442
- )
1443
- `
1444
- // Keep ASC for final sorting after fetching context
1445
- );
1446
- params.push(id, id, withPreviousMessages, withNextMessages);
1447
- }
1448
- const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" ASC';
1449
- const includedResult = await this.client.execute({ sql: finalQuery, args: params });
1450
- const includedRows = includedResult.rows?.map((row) => this.parseRow(row));
1451
- const seen = /* @__PURE__ */ new Set();
1452
- const dedupedRows = includedRows.filter((row) => {
1453
- if (seen.has(row.id)) return false;
1454
- seen.add(row.id);
1455
- return true;
1306
+ /**
1307
+ * Checks if a column exists in the specified table.
1308
+ *
1309
+ * @param table - The name of the table to check
1310
+ * @param column - The name of the column to look for
1311
+ * @returns `true` if the column exists in the table, `false` otherwise
1312
+ */
1313
+ async hasColumn(table, column) {
1314
+ const sanitizedTable = utils.parseSqlIdentifier(table, "table name");
1315
+ const result = await this.client.execute({
1316
+ sql: `PRAGMA table_info("${sanitizedTable}")`
1456
1317
  });
1457
- return dedupedRows;
1318
+ return result.rows?.some((row) => row.name === column);
1319
+ }
1320
+ /**
1321
+ * Internal insert implementation without retry logic.
1322
+ */
1323
+ async doInsert({
1324
+ tableName,
1325
+ record
1326
+ }) {
1327
+ await this.client.execute(
1328
+ prepareStatement({
1329
+ tableName,
1330
+ record
1331
+ })
1332
+ );
1333
+ }
1334
+ /**
1335
+ * Inserts or replaces a record in the specified table with automatic retry on lock errors.
1336
+ *
1337
+ * @param args - The insert arguments
1338
+ * @param args.tableName - The name of the table to insert into
1339
+ * @param args.record - The record to insert (key-value pairs)
1340
+ */
1341
+ insert(args) {
1342
+ return this.executeWriteOperationWithRetry(() => this.doInsert(args), `insert into table ${args.tableName}`);
1458
1343
  }
1459
- async listMessagesById({ messageIds }) {
1460
- if (messageIds.length === 0) return { messages: [] };
1461
- try {
1462
- const sql = `
1463
- SELECT
1464
- id,
1465
- content,
1466
- role,
1467
- type,
1468
- "createdAt",
1469
- thread_id,
1470
- "resourceId"
1471
- FROM "${storage.TABLE_MESSAGES}"
1472
- WHERE id IN (${messageIds.map(() => "?").join(", ")})
1473
- ORDER BY "createdAt" DESC
1474
- `;
1475
- const result = await this.client.execute({ sql, args: messageIds });
1476
- if (!result.rows) return { messages: [] };
1477
- const list = new agent.MessageList().add(result.rows.map(this.parseRow), "memory");
1478
- return { messages: list.get.all.db() };
1479
- } catch (error$1) {
1344
+ /**
1345
+ * Internal update implementation without retry logic.
1346
+ */
1347
+ async doUpdate({
1348
+ tableName,
1349
+ keys,
1350
+ data
1351
+ }) {
1352
+ await this.client.execute(prepareUpdateStatement({ tableName, updates: data, keys }));
1353
+ }
1354
+ /**
1355
+ * Updates a record in the specified table with automatic retry on lock errors.
1356
+ *
1357
+ * @param args - The update arguments
1358
+ * @param args.tableName - The name of the table to update
1359
+ * @param args.keys - The key(s) identifying the record to update
1360
+ * @param args.data - The fields to update (key-value pairs)
1361
+ */
1362
+ update(args) {
1363
+ return this.executeWriteOperationWithRetry(() => this.doUpdate(args), `update table ${args.tableName}`);
1364
+ }
1365
+ /**
1366
+ * Internal batch insert implementation without retry logic.
1367
+ */
1368
+ async doBatchInsert({
1369
+ tableName,
1370
+ records
1371
+ }) {
1372
+ if (records.length === 0) return;
1373
+ const batchStatements = records.map((r) => prepareStatement({ tableName, record: r }));
1374
+ await this.client.batch(batchStatements, "write");
1375
+ }
1376
+ /**
1377
+ * Inserts multiple records in a single batch transaction with automatic retry on lock errors.
1378
+ *
1379
+ * @param args - The batch insert arguments
1380
+ * @param args.tableName - The name of the table to insert into
1381
+ * @param args.records - Array of records to insert
1382
+ * @throws {MastraError} When the batch insert fails after retries
1383
+ */
1384
+ async batchInsert(args) {
1385
+ return this.executeWriteOperationWithRetry(
1386
+ () => this.doBatchInsert(args),
1387
+ `batch insert into table ${args.tableName}`
1388
+ ).catch((error$1) => {
1480
1389
  throw new error.MastraError(
1481
1390
  {
1482
- id: storage.createStorageErrorId("LIBSQL", "LIST_MESSAGES_BY_ID", "FAILED"),
1391
+ id: storage.createStorageErrorId("LIBSQL", "BATCH_INSERT", "FAILED"),
1483
1392
  domain: error.ErrorDomain.STORAGE,
1484
1393
  category: error.ErrorCategory.THIRD_PARTY,
1485
- details: { messageIds: JSON.stringify(messageIds) }
1394
+ details: {
1395
+ tableName: args.tableName
1396
+ }
1486
1397
  },
1487
1398
  error$1
1488
1399
  );
1489
- }
1400
+ });
1490
1401
  }
1491
- async listMessages(args) {
1492
- const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
1493
- const threadIds = Array.isArray(threadId) ? threadId : [threadId];
1494
- if (threadIds.length === 0 || threadIds.some((id) => !id.trim())) {
1402
+ /**
1403
+ * Internal batch update implementation without retry logic.
1404
+ * Each record can be updated based on single or composite keys.
1405
+ */
1406
+ async doBatchUpdate({
1407
+ tableName,
1408
+ updates
1409
+ }) {
1410
+ if (updates.length === 0) return;
1411
+ const batchStatements = updates.map(
1412
+ ({ keys, data }) => prepareUpdateStatement({
1413
+ tableName,
1414
+ updates: data,
1415
+ keys
1416
+ })
1417
+ );
1418
+ await this.client.batch(batchStatements, "write");
1419
+ }
1420
+ /**
1421
+ * Updates multiple records in a single batch transaction with automatic retry on lock errors.
1422
+ * Each record can be updated based on single or composite keys.
1423
+ *
1424
+ * @param args - The batch update arguments
1425
+ * @param args.tableName - The name of the table to update
1426
+ * @param args.updates - Array of update operations, each containing keys and data
1427
+ * @throws {MastraError} When the batch update fails after retries
1428
+ */
1429
+ async batchUpdate(args) {
1430
+ return this.executeWriteOperationWithRetry(
1431
+ () => this.doBatchUpdate(args),
1432
+ `batch update in table ${args.tableName}`
1433
+ ).catch((error$1) => {
1495
1434
  throw new error.MastraError(
1496
1435
  {
1497
- id: storage.createStorageErrorId("LIBSQL", "LIST_MESSAGES", "INVALID_THREAD_ID"),
1436
+ id: storage.createStorageErrorId("LIBSQL", "BATCH_UPDATE", "FAILED"),
1498
1437
  domain: error.ErrorDomain.STORAGE,
1499
1438
  category: error.ErrorCategory.THIRD_PARTY,
1500
- details: { threadId: Array.isArray(threadId) ? threadId.join(",") : threadId }
1439
+ details: {
1440
+ tableName: args.tableName
1441
+ }
1501
1442
  },
1502
- new Error("threadId must be a non-empty string or array of non-empty strings")
1443
+ error$1
1503
1444
  );
1504
- }
1505
- if (page < 0) {
1445
+ });
1446
+ }
1447
+ /**
1448
+ * Internal batch delete implementation without retry logic.
1449
+ * Each record can be deleted based on single or composite keys.
1450
+ */
1451
+ async doBatchDelete({
1452
+ tableName,
1453
+ keys
1454
+ }) {
1455
+ if (keys.length === 0) return;
1456
+ const batchStatements = keys.map(
1457
+ (keyObj) => prepareDeleteStatement({
1458
+ tableName,
1459
+ keys: keyObj
1460
+ })
1461
+ );
1462
+ await this.client.batch(batchStatements, "write");
1463
+ }
1464
+ /**
1465
+ * Deletes multiple records in a single batch transaction with automatic retry on lock errors.
1466
+ * Each record can be deleted based on single or composite keys.
1467
+ *
1468
+ * @param args - The batch delete arguments
1469
+ * @param args.tableName - The name of the table to delete from
1470
+ * @param args.keys - Array of key objects identifying records to delete
1471
+ * @throws {MastraError} When the batch delete fails after retries
1472
+ */
1473
+ async batchDelete({
1474
+ tableName,
1475
+ keys
1476
+ }) {
1477
+ return this.executeWriteOperationWithRetry(
1478
+ () => this.doBatchDelete({ tableName, keys }),
1479
+ `batch delete from table ${tableName}`
1480
+ ).catch((error$1) => {
1506
1481
  throw new error.MastraError(
1507
1482
  {
1508
- id: storage.createStorageErrorId("LIBSQL", "LIST_MESSAGES", "INVALID_PAGE"),
1509
- domain: error.ErrorDomain.STORAGE,
1510
- category: error.ErrorCategory.USER,
1511
- details: { page }
1512
- },
1513
- new Error("page must be >= 0")
1514
- );
1515
- }
1516
- const perPage = storage.normalizePerPage(perPageInput, 40);
1517
- const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1518
- try {
1519
- const { field, direction } = this.parseOrderBy(orderBy, "ASC");
1520
- const orderByStatement = `ORDER BY "${field}" ${direction}`;
1521
- const threadPlaceholders = threadIds.map(() => "?").join(", ");
1522
- const conditions = [`thread_id IN (${threadPlaceholders})`];
1523
- const queryParams = [...threadIds];
1524
- if (resourceId) {
1525
- conditions.push(`"resourceId" = ?`);
1526
- queryParams.push(resourceId);
1527
- }
1528
- if (filter?.dateRange?.start) {
1529
- conditions.push(`"createdAt" >= ?`);
1530
- queryParams.push(
1531
- filter.dateRange.start instanceof Date ? filter.dateRange.start.toISOString() : filter.dateRange.start
1532
- );
1533
- }
1534
- if (filter?.dateRange?.end) {
1535
- conditions.push(`"createdAt" <= ?`);
1536
- queryParams.push(
1537
- filter.dateRange.end instanceof Date ? filter.dateRange.end.toISOString() : filter.dateRange.end
1538
- );
1539
- }
1540
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1541
- const countResult = await this.client.execute({
1542
- sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_MESSAGES} ${whereClause}`,
1543
- args: queryParams
1544
- });
1545
- const total = Number(countResult.rows?.[0]?.count ?? 0);
1546
- const limitValue = perPageInput === false ? total : perPage;
1547
- const dataResult = await this.client.execute({
1548
- sql: `SELECT id, content, role, type, "createdAt", "resourceId", "thread_id" FROM ${storage.TABLE_MESSAGES} ${whereClause} ${orderByStatement} LIMIT ? OFFSET ?`,
1549
- args: [...queryParams, limitValue, offset]
1550
- });
1551
- const messages = (dataResult.rows || []).map((row) => this.parseRow(row));
1552
- if (total === 0 && messages.length === 0 && (!include || include.length === 0)) {
1553
- return {
1554
- messages: [],
1555
- total: 0,
1556
- page,
1557
- perPage: perPageForResponse,
1558
- hasMore: false
1559
- };
1560
- }
1561
- const messageIds = new Set(messages.map((m) => m.id));
1562
- if (include && include.length > 0) {
1563
- const includeMessages = await this._getIncludedMessages({ include });
1564
- if (includeMessages) {
1565
- for (const includeMsg of includeMessages) {
1566
- if (!messageIds.has(includeMsg.id)) {
1567
- messages.push(includeMsg);
1568
- messageIds.add(includeMsg.id);
1569
- }
1570
- }
1571
- }
1572
- }
1573
- const list = new agent.MessageList().add(messages, "memory");
1574
- let finalMessages = list.get.all.db();
1575
- finalMessages = finalMessages.sort((a, b) => {
1576
- const isDateField = field === "createdAt" || field === "updatedAt";
1577
- const aValue = isDateField ? new Date(a[field]).getTime() : a[field];
1578
- const bValue = isDateField ? new Date(b[field]).getTime() : b[field];
1579
- if (typeof aValue === "number" && typeof bValue === "number") {
1580
- return direction === "ASC" ? aValue - bValue : bValue - aValue;
1581
- }
1582
- return direction === "ASC" ? String(aValue).localeCompare(String(bValue)) : String(bValue).localeCompare(String(aValue));
1583
- });
1584
- const threadIdSet = new Set(threadIds);
1585
- const returnedThreadMessageIds = new Set(
1586
- finalMessages.filter((m) => m.threadId && threadIdSet.has(m.threadId)).map((m) => m.id)
1587
- );
1588
- const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
1589
- const hasMore = perPageInput !== false && !allThreadMessagesReturned && offset + perPage < total;
1590
- return {
1591
- messages: finalMessages,
1592
- total,
1593
- page,
1594
- perPage: perPageForResponse,
1595
- hasMore
1596
- };
1597
- } catch (error$1) {
1598
- const mastraError = new error.MastraError(
1599
- {
1600
- id: storage.createStorageErrorId("LIBSQL", "LIST_MESSAGES", "FAILED"),
1483
+ id: storage.createStorageErrorId("LIBSQL", "BATCH_DELETE", "FAILED"),
1601
1484
  domain: error.ErrorDomain.STORAGE,
1602
1485
  category: error.ErrorCategory.THIRD_PARTY,
1603
1486
  details: {
1604
- threadId: Array.isArray(threadId) ? threadId.join(",") : threadId,
1605
- resourceId: resourceId ?? ""
1487
+ tableName
1606
1488
  }
1607
1489
  },
1608
1490
  error$1
1609
1491
  );
1610
- this.logger?.error?.(mastraError.toString());
1611
- this.logger?.trackException?.(mastraError);
1612
- return {
1613
- messages: [],
1614
- total: 0,
1615
- page,
1616
- perPage: perPageForResponse,
1617
- hasMore: false
1618
- };
1492
+ });
1493
+ }
1494
+ /**
1495
+ * Internal single-record delete implementation without retry logic.
1496
+ */
1497
+ async doDelete({ tableName, keys }) {
1498
+ await this.client.execute(prepareDeleteStatement({ tableName, keys }));
1499
+ }
1500
+ /**
1501
+ * Deletes a single record from the specified table with automatic retry on lock errors.
1502
+ *
1503
+ * @param args - The delete arguments
1504
+ * @param args.tableName - The name of the table to delete from
1505
+ * @param args.keys - The key(s) identifying the record to delete
1506
+ * @throws {MastraError} When the delete fails after retries
1507
+ */
1508
+ async delete(args) {
1509
+ return this.executeWriteOperationWithRetry(() => this.doDelete(args), `delete from table ${args.tableName}`).catch(
1510
+ (error$1) => {
1511
+ throw new error.MastraError(
1512
+ {
1513
+ id: storage.createStorageErrorId("LIBSQL", "DELETE", "FAILED"),
1514
+ domain: error.ErrorDomain.STORAGE,
1515
+ category: error.ErrorCategory.THIRD_PARTY,
1516
+ details: {
1517
+ tableName: args.tableName
1518
+ }
1519
+ },
1520
+ error$1
1521
+ );
1522
+ }
1523
+ );
1524
+ }
1525
+ /**
1526
+ * Selects a single record from the specified table by key(s).
1527
+ * Returns the most recently created record if multiple matches exist.
1528
+ * Automatically parses JSON string values back to objects/arrays.
1529
+ *
1530
+ * @typeParam R - The expected return type of the record
1531
+ * @param args - The select arguments
1532
+ * @param args.tableName - The name of the table to select from
1533
+ * @param args.keys - The key(s) identifying the record to select
1534
+ * @returns The matching record or `null` if not found
1535
+ */
1536
+ async select({ tableName, keys }) {
1537
+ const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
1538
+ const parsedKeys = Object.keys(keys).map((key) => utils.parseSqlIdentifier(key, "column name"));
1539
+ const conditions = parsedKeys.map((key) => `${key} = ?`).join(" AND ");
1540
+ const values = Object.values(keys);
1541
+ const result = await this.client.execute({
1542
+ sql: `SELECT * FROM ${parsedTableName} WHERE ${conditions} ORDER BY createdAt DESC LIMIT 1`,
1543
+ args: values
1544
+ });
1545
+ if (!result.rows || result.rows.length === 0) {
1546
+ return null;
1547
+ }
1548
+ const row = result.rows[0];
1549
+ const parsed = Object.fromEntries(
1550
+ Object.entries(row || {}).map(([k, v]) => {
1551
+ try {
1552
+ return [k, typeof v === "string" ? v.startsWith("{") || v.startsWith("[") ? JSON.parse(v) : v : v];
1553
+ } catch {
1554
+ return [k, v];
1555
+ }
1556
+ })
1557
+ );
1558
+ return parsed;
1559
+ }
1560
+ /**
1561
+ * Selects multiple records from the specified table with optional filtering, ordering, and pagination.
1562
+ *
1563
+ * @typeParam R - The expected return type of each record
1564
+ * @param args - The select arguments
1565
+ * @param args.tableName - The name of the table to select from
1566
+ * @param args.whereClause - Optional WHERE clause with SQL string and arguments
1567
+ * @param args.orderBy - Optional ORDER BY clause (e.g., "createdAt DESC")
1568
+ * @param args.offset - Optional offset for pagination
1569
+ * @param args.limit - Optional limit for pagination
1570
+ * @param args.args - Optional additional query arguments
1571
+ * @returns Array of matching records
1572
+ */
1573
+ async selectMany({
1574
+ tableName,
1575
+ whereClause,
1576
+ orderBy,
1577
+ offset,
1578
+ limit,
1579
+ args
1580
+ }) {
1581
+ const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
1582
+ let statement = `SELECT * FROM ${parsedTableName}`;
1583
+ if (whereClause?.sql) {
1584
+ statement += ` ${whereClause.sql}`;
1585
+ }
1586
+ if (orderBy) {
1587
+ statement += ` ORDER BY ${orderBy}`;
1588
+ }
1589
+ if (limit) {
1590
+ statement += ` LIMIT ${limit}`;
1591
+ }
1592
+ if (offset) {
1593
+ statement += ` OFFSET ${offset}`;
1594
+ }
1595
+ const result = await this.client.execute({
1596
+ sql: statement,
1597
+ args: [...whereClause?.args ?? [], ...args ?? []]
1598
+ });
1599
+ return result.rows;
1600
+ }
1601
+ /**
1602
+ * Returns the total count of records matching the optional WHERE clause.
1603
+ *
1604
+ * @param args - The count arguments
1605
+ * @param args.tableName - The name of the table to count from
1606
+ * @param args.whereClause - Optional WHERE clause with SQL string and arguments
1607
+ * @returns The total count of matching records
1608
+ */
1609
+ async selectTotalCount({
1610
+ tableName,
1611
+ whereClause
1612
+ }) {
1613
+ const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
1614
+ const statement = `SELECT COUNT(*) as count FROM ${parsedTableName} ${whereClause ? `${whereClause.sql}` : ""}`;
1615
+ const result = await this.client.execute({
1616
+ sql: statement,
1617
+ args: whereClause?.args ?? []
1618
+ });
1619
+ if (!result.rows || result.rows.length === 0) {
1620
+ return 0;
1619
1621
  }
1622
+ return result.rows[0]?.count ?? 0;
1620
1623
  }
1621
- async saveMessages({ messages }) {
1622
- if (messages.length === 0) return { messages };
1624
+ /**
1625
+ * Maps a storage column type to its SQLite equivalent.
1626
+ */
1627
+ getSqlType(type) {
1628
+ switch (type) {
1629
+ case "bigint":
1630
+ return "INTEGER";
1631
+ // SQLite uses INTEGER for all integer sizes
1632
+ case "timestamp":
1633
+ return "TEXT";
1634
+ // Store timestamps as ISO strings in SQLite
1635
+ case "float":
1636
+ return "REAL";
1637
+ // SQLite's floating point type
1638
+ case "boolean":
1639
+ return "INTEGER";
1640
+ // SQLite uses 0/1 for booleans
1641
+ case "jsonb":
1642
+ return "TEXT";
1643
+ // Store JSON as TEXT in SQLite
1644
+ default:
1645
+ return storage.getSqlType(type);
1646
+ }
1647
+ }
1648
+ /**
1649
+ * Creates a table if it doesn't exist based on the provided schema.
1650
+ *
1651
+ * @param args - The create table arguments
1652
+ * @param args.tableName - The name of the table to create
1653
+ * @param args.schema - The schema definition for the table columns
1654
+ */
1655
+ async createTable({
1656
+ tableName,
1657
+ schema
1658
+ }) {
1623
1659
  try {
1624
- const threadId = messages[0]?.threadId;
1625
- if (!threadId) {
1626
- throw new Error("Thread ID is required");
1627
- }
1628
- const batchStatements = messages.map((message) => {
1629
- const time = message.createdAt || /* @__PURE__ */ new Date();
1630
- if (!message.threadId) {
1631
- throw new Error(
1632
- `Expected to find a threadId for message, but couldn't find one. An unexpected error has occurred.`
1633
- );
1634
- }
1635
- if (!message.resourceId) {
1636
- throw new Error(
1637
- `Expected to find a resourceId for message, but couldn't find one. An unexpected error has occurred.`
1638
- );
1639
- }
1640
- return {
1641
- sql: `INSERT INTO "${storage.TABLE_MESSAGES}" (id, thread_id, content, role, type, "createdAt", "resourceId")
1642
- VALUES (?, ?, ?, ?, ?, ?, ?)
1643
- ON CONFLICT(id) DO UPDATE SET
1644
- thread_id=excluded.thread_id,
1645
- content=excluded.content,
1646
- role=excluded.role,
1647
- type=excluded.type,
1648
- "resourceId"=excluded."resourceId"
1649
- `,
1650
- args: [
1651
- message.id,
1652
- message.threadId,
1653
- typeof message.content === "object" ? JSON.stringify(message.content) : message.content,
1654
- message.role,
1655
- message.type || "v2",
1656
- time instanceof Date ? time.toISOString() : time,
1657
- message.resourceId
1658
- ]
1659
- };
1660
- });
1661
- const now = (/* @__PURE__ */ new Date()).toISOString();
1662
- batchStatements.push({
1663
- sql: `UPDATE "${storage.TABLE_THREADS}" SET "updatedAt" = ? WHERE id = ?`,
1664
- args: [now, threadId]
1660
+ const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
1661
+ const columnDefinitions = Object.entries(schema).map(([colName, colDef]) => {
1662
+ const type = this.getSqlType(colDef.type);
1663
+ const nullable = colDef.nullable === false ? "NOT NULL" : "";
1664
+ const primaryKey = colDef.primaryKey ? "PRIMARY KEY" : "";
1665
+ return `"${colName}" ${type} ${nullable} ${primaryKey}`.trim();
1665
1666
  });
1666
- const BATCH_SIZE = 50;
1667
- const messageStatements = batchStatements.slice(0, -1);
1668
- const threadUpdateStatement = batchStatements[batchStatements.length - 1];
1669
- for (let i = 0; i < messageStatements.length; i += BATCH_SIZE) {
1670
- const batch = messageStatements.slice(i, i + BATCH_SIZE);
1671
- if (batch.length > 0) {
1672
- await this.client.batch(batch, "write");
1673
- }
1674
- }
1675
- if (threadUpdateStatement) {
1676
- await this.client.execute(threadUpdateStatement);
1667
+ const tableConstraints = [];
1668
+ if (tableName === storage.TABLE_WORKFLOW_SNAPSHOT) {
1669
+ tableConstraints.push("UNIQUE (workflow_name, run_id)");
1670
+ }
1671
+ const allDefinitions = [...columnDefinitions, ...tableConstraints].join(",\n ");
1672
+ const sql = `CREATE TABLE IF NOT EXISTS ${parsedTableName} (
1673
+ ${allDefinitions}
1674
+ )`;
1675
+ await this.client.execute(sql);
1676
+ this.logger.debug(`LibSQLDB: Created table ${tableName}`);
1677
+ if (tableName === storage.TABLE_SPANS) {
1678
+ await this.migrateSpansTable();
1677
1679
  }
1678
- const list = new agent.MessageList().add(messages, "memory");
1679
- return { messages: list.get.all.db() };
1680
1680
  } catch (error$1) {
1681
1681
  throw new error.MastraError(
1682
1682
  {
1683
- id: storage.createStorageErrorId("LIBSQL", "SAVE_MESSAGES", "FAILED"),
1683
+ id: storage.createStorageErrorId("LIBSQL", "CREATE_TABLE", "FAILED"),
1684
1684
  domain: error.ErrorDomain.STORAGE,
1685
- category: error.ErrorCategory.THIRD_PARTY
1685
+ category: error.ErrorCategory.THIRD_PARTY,
1686
+ details: { tableName }
1686
1687
  },
1687
1688
  error$1
1688
1689
  );
1689
1690
  }
1690
1691
  }
1691
- async updateMessages({
1692
- messages
1693
- }) {
1694
- if (messages.length === 0) {
1695
- return [];
1696
- }
1697
- const messageIds = messages.map((m) => m.id);
1698
- const placeholders = messageIds.map(() => "?").join(",");
1699
- const selectSql = `SELECT * FROM ${storage.TABLE_MESSAGES} WHERE id IN (${placeholders})`;
1700
- const existingResult = await this.client.execute({ sql: selectSql, args: messageIds });
1701
- const existingMessages = existingResult.rows.map((row) => this.parseRow(row));
1702
- if (existingMessages.length === 0) {
1703
- return [];
1704
- }
1705
- const batchStatements = [];
1706
- const threadIdsToUpdate = /* @__PURE__ */ new Set();
1707
- const columnMapping = {
1708
- threadId: "thread_id"
1709
- };
1710
- for (const existingMessage of existingMessages) {
1711
- const updatePayload = messages.find((m) => m.id === existingMessage.id);
1712
- if (!updatePayload) continue;
1713
- const { id, ...fieldsToUpdate } = updatePayload;
1714
- if (Object.keys(fieldsToUpdate).length === 0) continue;
1715
- threadIdsToUpdate.add(existingMessage.threadId);
1716
- if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
1717
- threadIdsToUpdate.add(updatePayload.threadId);
1718
- }
1719
- const setClauses = [];
1720
- const args = [];
1721
- const updatableFields = { ...fieldsToUpdate };
1722
- if (updatableFields.content) {
1723
- const newContent = {
1724
- ...existingMessage.content,
1725
- ...updatableFields.content,
1726
- // Deep merge metadata if it exists on both
1727
- ...existingMessage.content?.metadata && updatableFields.content.metadata ? {
1728
- metadata: {
1729
- ...existingMessage.content.metadata,
1730
- ...updatableFields.content.metadata
1731
- }
1732
- } : {}
1733
- };
1734
- setClauses.push(`${utils.parseSqlIdentifier("content", "column name")} = ?`);
1735
- args.push(JSON.stringify(newContent));
1736
- delete updatableFields.content;
1737
- }
1738
- for (const key in updatableFields) {
1739
- if (Object.prototype.hasOwnProperty.call(updatableFields, key)) {
1740
- const dbKey = columnMapping[key] || key;
1741
- setClauses.push(`${utils.parseSqlIdentifier(dbKey, "column name")} = ?`);
1742
- let value = updatableFields[key];
1743
- if (typeof value === "object" && value !== null) {
1744
- value = JSON.stringify(value);
1745
- }
1746
- args.push(value);
1692
+ /**
1693
+ * Migrates the spans table schema from OLD_SPAN_SCHEMA to current SPAN_SCHEMA.
1694
+ * This adds new columns that don't exist in old schema.
1695
+ */
1696
+ async migrateSpansTable() {
1697
+ const schema = storage.TABLE_SCHEMAS[storage.TABLE_SPANS];
1698
+ try {
1699
+ for (const [columnName, columnDef] of Object.entries(schema)) {
1700
+ const columnExists = await this.hasColumn(storage.TABLE_SPANS, columnName);
1701
+ if (!columnExists) {
1702
+ const sqlType = this.getSqlType(columnDef.type);
1703
+ const alterSql = `ALTER TABLE "${storage.TABLE_SPANS}" ADD COLUMN "${columnName}" ${sqlType}`;
1704
+ await this.client.execute(alterSql);
1705
+ this.logger.debug(`LibSQLDB: Added column '${columnName}' to ${storage.TABLE_SPANS}`);
1747
1706
  }
1748
1707
  }
1749
- if (setClauses.length === 0) continue;
1750
- args.push(id);
1751
- const sql = `UPDATE ${storage.TABLE_MESSAGES} SET ${setClauses.join(", ")} WHERE id = ?`;
1752
- batchStatements.push({ sql, args });
1753
- }
1754
- if (batchStatements.length === 0) {
1755
- return existingMessages;
1756
- }
1757
- const now = (/* @__PURE__ */ new Date()).toISOString();
1758
- for (const threadId of threadIdsToUpdate) {
1759
- if (threadId) {
1760
- batchStatements.push({
1761
- sql: `UPDATE ${storage.TABLE_THREADS} SET updatedAt = ? WHERE id = ?`,
1762
- args: [now, threadId]
1763
- });
1764
- }
1708
+ this.logger.info(`LibSQLDB: Migration completed for ${storage.TABLE_SPANS}`);
1709
+ } catch (error) {
1710
+ this.logger.warn(`LibSQLDB: Failed to migrate spans table ${storage.TABLE_SPANS}:`, error);
1765
1711
  }
1766
- await this.client.batch(batchStatements, "write");
1767
- const updatedResult = await this.client.execute({ sql: selectSql, args: messageIds });
1768
- return updatedResult.rows.map((row) => this.parseRow(row));
1769
1712
  }
1770
- async deleteMessages(messageIds) {
1771
- if (!messageIds || messageIds.length === 0) {
1772
- return;
1713
+ /**
1714
+ * Gets a default value for a column type (used when adding NOT NULL columns).
1715
+ */
1716
+ getDefaultValue(type) {
1717
+ switch (type) {
1718
+ case "text":
1719
+ case "uuid":
1720
+ return "DEFAULT ''";
1721
+ case "integer":
1722
+ case "bigint":
1723
+ case "float":
1724
+ return "DEFAULT 0";
1725
+ case "boolean":
1726
+ return "DEFAULT 0";
1727
+ case "jsonb":
1728
+ return "DEFAULT '{}'";
1729
+ case "timestamp":
1730
+ return "DEFAULT CURRENT_TIMESTAMP";
1731
+ default:
1732
+ return "DEFAULT ''";
1773
1733
  }
1734
+ }
1735
+ /**
1736
+ * Alters an existing table to add missing columns.
1737
+ * Used for schema migrations when new columns are added.
1738
+ *
1739
+ * @param args - The alter table arguments
1740
+ * @param args.tableName - The name of the table to alter
1741
+ * @param args.schema - The full schema definition for the table
1742
+ * @param args.ifNotExists - Array of column names to add if they don't exist
1743
+ */
1744
+ async alterTable({
1745
+ tableName,
1746
+ schema,
1747
+ ifNotExists
1748
+ }) {
1749
+ const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
1774
1750
  try {
1775
- const BATCH_SIZE = 100;
1776
- const threadIds = /* @__PURE__ */ new Set();
1777
- const tx = await this.client.transaction("write");
1778
- try {
1779
- for (let i = 0; i < messageIds.length; i += BATCH_SIZE) {
1780
- const batch = messageIds.slice(i, i + BATCH_SIZE);
1781
- const placeholders = batch.map(() => "?").join(",");
1782
- const result = await tx.execute({
1783
- sql: `SELECT DISTINCT thread_id FROM "${storage.TABLE_MESSAGES}" WHERE id IN (${placeholders})`,
1784
- args: batch
1785
- });
1786
- result.rows?.forEach((row) => {
1787
- if (row.thread_id) threadIds.add(row.thread_id);
1788
- });
1789
- await tx.execute({
1790
- sql: `DELETE FROM "${storage.TABLE_MESSAGES}" WHERE id IN (${placeholders})`,
1791
- args: batch
1792
- });
1793
- }
1794
- if (threadIds.size > 0) {
1795
- const now = (/* @__PURE__ */ new Date()).toISOString();
1796
- for (const threadId of threadIds) {
1797
- await tx.execute({
1798
- sql: `UPDATE "${storage.TABLE_THREADS}" SET "updatedAt" = ? WHERE id = ?`,
1799
- args: [now, threadId]
1800
- });
1801
- }
1751
+ const tableInfo = await this.client.execute({
1752
+ sql: `PRAGMA table_info("${parsedTableName}")`
1753
+ });
1754
+ const existingColumns = new Set((tableInfo.rows || []).map((row) => row.name?.toLowerCase()));
1755
+ for (const columnName of ifNotExists) {
1756
+ if (!existingColumns.has(columnName.toLowerCase()) && schema[columnName]) {
1757
+ const columnDef = schema[columnName];
1758
+ const sqlType = this.getSqlType(columnDef.type);
1759
+ const defaultValue = this.getDefaultValue(columnDef.type);
1760
+ const alterSql = `ALTER TABLE ${parsedTableName} ADD COLUMN "${columnName}" ${sqlType} ${defaultValue}`;
1761
+ await this.client.execute(alterSql);
1762
+ this.logger.debug(`LibSQLDB: Added column ${columnName} to table ${tableName}`);
1802
1763
  }
1803
- await tx.commit();
1804
- } catch (error) {
1805
- await tx.rollback();
1806
- throw error;
1807
1764
  }
1808
1765
  } catch (error$1) {
1809
1766
  throw new error.MastraError(
1810
1767
  {
1811
- id: storage.createStorageErrorId("LIBSQL", "DELETE_MESSAGES", "FAILED"),
1768
+ id: storage.createStorageErrorId("LIBSQL", "ALTER_TABLE", "FAILED"),
1812
1769
  domain: error.ErrorDomain.STORAGE,
1813
1770
  category: error.ErrorCategory.THIRD_PARTY,
1814
- details: { messageIds: messageIds.join(", ") }
1771
+ details: { tableName }
1815
1772
  },
1816
1773
  error$1
1817
1774
  );
1818
1775
  }
1819
1776
  }
1820
- async getResourceById({ resourceId }) {
1821
- const result = await this.operations.load({
1822
- tableName: storage.TABLE_RESOURCES,
1823
- keys: { id: resourceId }
1824
- });
1825
- if (!result) {
1826
- return null;
1777
+ /**
1778
+ * Deletes all records from the specified table.
1779
+ * Errors are logged but not thrown.
1780
+ *
1781
+ * @param args - The delete arguments
1782
+ * @param args.tableName - The name of the table to clear
1783
+ */
1784
+ async deleteData({ tableName }) {
1785
+ const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
1786
+ try {
1787
+ await this.client.execute(`DELETE FROM ${parsedTableName}`);
1788
+ } catch (e) {
1789
+ const mastraError = new error.MastraError(
1790
+ {
1791
+ id: storage.createStorageErrorId("LIBSQL", "CLEAR_TABLE", "FAILED"),
1792
+ domain: error.ErrorDomain.STORAGE,
1793
+ category: error.ErrorCategory.THIRD_PARTY,
1794
+ details: {
1795
+ tableName
1796
+ }
1797
+ },
1798
+ e
1799
+ );
1800
+ this.logger?.trackException?.(mastraError);
1801
+ this.logger?.error?.(mastraError.toString());
1827
1802
  }
1828
- return {
1829
- ...result,
1830
- // Ensure workingMemory is always returned as a string, even if auto-parsed as JSON
1831
- workingMemory: result.workingMemory && typeof result.workingMemory === "object" ? JSON.stringify(result.workingMemory) : result.workingMemory,
1832
- metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata,
1833
- createdAt: new Date(result.createdAt),
1834
- updatedAt: new Date(result.updatedAt)
1835
- };
1836
1803
  }
1837
- async saveResource({ resource }) {
1838
- await this.operations.insert({
1839
- tableName: storage.TABLE_RESOURCES,
1840
- record: {
1841
- ...resource,
1842
- metadata: JSON.stringify(resource.metadata)
1843
- }
1844
- });
1845
- return resource;
1804
+ };
1805
+
1806
+ // src/storage/domains/agents/index.ts
1807
+ var AgentsLibSQL = class extends storage.AgentsStorage {
1808
+ #db;
1809
+ constructor(config) {
1810
+ super();
1811
+ const client = resolveClient(config);
1812
+ this.#db = new LibSQLDB({ client, maxRetries: config.maxRetries, initialBackoffMs: config.initialBackoffMs });
1846
1813
  }
1847
- async updateResource({
1848
- resourceId,
1849
- workingMemory,
1850
- metadata
1851
- }) {
1852
- const existingResource = await this.getResourceById({ resourceId });
1853
- if (!existingResource) {
1854
- const newResource = {
1855
- id: resourceId,
1856
- workingMemory,
1857
- metadata: metadata || {},
1858
- createdAt: /* @__PURE__ */ new Date(),
1859
- updatedAt: /* @__PURE__ */ new Date()
1814
+ async init() {
1815
+ await this.#db.createTable({ tableName: storage.TABLE_AGENTS, schema: storage.AGENTS_SCHEMA });
1816
+ }
1817
+ async dangerouslyClearAll() {
1818
+ await this.#db.deleteData({ tableName: storage.TABLE_AGENTS });
1819
+ }
1820
+ parseJson(value, fieldName) {
1821
+ if (!value) return void 0;
1822
+ if (typeof value !== "string") return value;
1823
+ try {
1824
+ return JSON.parse(value);
1825
+ } catch (error$1) {
1826
+ const details = {
1827
+ value: value.length > 100 ? value.substring(0, 100) + "..." : value
1860
1828
  };
1861
- return this.saveResource({ resource: newResource });
1829
+ if (fieldName) {
1830
+ details.field = fieldName;
1831
+ }
1832
+ throw new error.MastraError(
1833
+ {
1834
+ id: storage.createStorageErrorId("LIBSQL", "PARSE_JSON", "INVALID_JSON"),
1835
+ domain: error.ErrorDomain.STORAGE,
1836
+ category: error.ErrorCategory.SYSTEM,
1837
+ text: `Failed to parse JSON${fieldName ? ` for field "${fieldName}"` : ""}: ${error$1 instanceof Error ? error$1.message : "Unknown error"}`,
1838
+ details
1839
+ },
1840
+ error$1
1841
+ );
1862
1842
  }
1863
- const updatedResource = {
1864
- ...existingResource,
1865
- workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
1866
- metadata: {
1867
- ...existingResource.metadata,
1868
- ...metadata
1869
- },
1870
- updatedAt: /* @__PURE__ */ new Date()
1843
+ }
1844
+ parseRow(row) {
1845
+ return {
1846
+ id: row.id,
1847
+ name: row.name,
1848
+ description: row.description,
1849
+ instructions: row.instructions,
1850
+ model: this.parseJson(row.model, "model"),
1851
+ tools: this.parseJson(row.tools, "tools"),
1852
+ defaultOptions: this.parseJson(row.defaultOptions, "defaultOptions"),
1853
+ workflows: this.parseJson(row.workflows, "workflows"),
1854
+ agents: this.parseJson(row.agents, "agents"),
1855
+ inputProcessors: this.parseJson(row.inputProcessors, "inputProcessors"),
1856
+ outputProcessors: this.parseJson(row.outputProcessors, "outputProcessors"),
1857
+ memory: this.parseJson(row.memory, "memory"),
1858
+ scorers: this.parseJson(row.scorers, "scorers"),
1859
+ metadata: this.parseJson(row.metadata, "metadata"),
1860
+ createdAt: new Date(row.createdAt),
1861
+ updatedAt: new Date(row.updatedAt)
1871
1862
  };
1872
- const updates = [];
1873
- const values = [];
1874
- if (workingMemory !== void 0) {
1875
- updates.push("workingMemory = ?");
1876
- values.push(workingMemory);
1877
- }
1878
- if (metadata) {
1879
- updates.push("metadata = ?");
1880
- values.push(JSON.stringify(updatedResource.metadata));
1881
- }
1882
- updates.push("updatedAt = ?");
1883
- values.push(updatedResource.updatedAt.toISOString());
1884
- values.push(resourceId);
1885
- await this.client.execute({
1886
- sql: `UPDATE ${storage.TABLE_RESOURCES} SET ${updates.join(", ")} WHERE id = ?`,
1887
- args: values
1888
- });
1889
- return updatedResource;
1890
1863
  }
1891
- async getThreadById({ threadId }) {
1864
+ async getAgentById({ id }) {
1892
1865
  try {
1893
- const result = await this.operations.load({
1894
- tableName: storage.TABLE_THREADS,
1895
- keys: { id: threadId }
1866
+ const result = await this.#db.select({
1867
+ tableName: storage.TABLE_AGENTS,
1868
+ keys: { id }
1896
1869
  });
1897
- if (!result) {
1898
- return null;
1899
- }
1900
- return {
1901
- ...result,
1902
- metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata,
1903
- createdAt: new Date(result.createdAt),
1904
- updatedAt: new Date(result.updatedAt)
1905
- };
1870
+ return result ? this.parseRow(result) : null;
1906
1871
  } catch (error$1) {
1907
1872
  throw new error.MastraError(
1908
1873
  {
1909
- id: storage.createStorageErrorId("LIBSQL", "GET_THREAD_BY_ID", "FAILED"),
1874
+ id: storage.createStorageErrorId("LIBSQL", "GET_AGENT_BY_ID", "FAILED"),
1910
1875
  domain: error.ErrorDomain.STORAGE,
1911
1876
  category: error.ErrorCategory.THIRD_PARTY,
1912
- details: { threadId }
1877
+ details: { agentId: id }
1913
1878
  },
1914
1879
  error$1
1915
1880
  );
1916
1881
  }
1917
1882
  }
1918
- async listThreadsByResourceId(args) {
1919
- const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
1920
- if (page < 0) {
1883
+ async createAgent({ agent }) {
1884
+ try {
1885
+ const now = /* @__PURE__ */ new Date();
1886
+ await this.#db.insert({
1887
+ tableName: storage.TABLE_AGENTS,
1888
+ record: {
1889
+ id: agent.id,
1890
+ name: agent.name,
1891
+ description: agent.description ?? null,
1892
+ instructions: agent.instructions,
1893
+ model: agent.model,
1894
+ tools: agent.tools ?? null,
1895
+ defaultOptions: agent.defaultOptions ?? null,
1896
+ workflows: agent.workflows ?? null,
1897
+ agents: agent.agents ?? null,
1898
+ inputProcessors: agent.inputProcessors ?? null,
1899
+ outputProcessors: agent.outputProcessors ?? null,
1900
+ memory: agent.memory ?? null,
1901
+ scorers: agent.scorers ?? null,
1902
+ metadata: agent.metadata ?? null,
1903
+ createdAt: now,
1904
+ updatedAt: now
1905
+ }
1906
+ });
1907
+ return {
1908
+ ...agent,
1909
+ createdAt: now,
1910
+ updatedAt: now
1911
+ };
1912
+ } catch (error$1) {
1921
1913
  throw new error.MastraError(
1922
1914
  {
1923
- id: storage.createStorageErrorId("LIBSQL", "LIST_THREADS_BY_RESOURCE_ID", "INVALID_PAGE"),
1915
+ id: storage.createStorageErrorId("LIBSQL", "CREATE_AGENT", "FAILED"),
1924
1916
  domain: error.ErrorDomain.STORAGE,
1925
- category: error.ErrorCategory.USER,
1926
- details: { page }
1917
+ category: error.ErrorCategory.THIRD_PARTY,
1918
+ details: { agentId: agent.id }
1927
1919
  },
1928
- new Error("page must be >= 0")
1920
+ error$1
1929
1921
  );
1930
1922
  }
1931
- const perPage = storage.normalizePerPage(perPageInput, 100);
1932
- const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1933
- const { field, direction } = this.parseOrderBy(orderBy);
1923
+ }
1924
+ async updateAgent({ id, ...updates }) {
1934
1925
  try {
1935
- const baseQuery = `FROM ${storage.TABLE_THREADS} WHERE resourceId = ?`;
1936
- const queryParams = [resourceId];
1937
- const mapRowToStorageThreadType = (row) => ({
1938
- id: row.id,
1939
- resourceId: row.resourceId,
1940
- title: row.title,
1941
- createdAt: new Date(row.createdAt),
1942
- // Convert string to Date
1943
- updatedAt: new Date(row.updatedAt),
1944
- // Convert string to Date
1945
- metadata: typeof row.metadata === "string" ? JSON.parse(row.metadata) : row.metadata
1946
- });
1947
- const countResult = await this.client.execute({
1948
- sql: `SELECT COUNT(*) as count ${baseQuery}`,
1949
- args: queryParams
1950
- });
1951
- const total = Number(countResult.rows?.[0]?.count ?? 0);
1952
- if (total === 0) {
1953
- return {
1954
- threads: [],
1955
- total: 0,
1956
- page,
1957
- perPage: perPageForResponse,
1958
- hasMore: false
1959
- };
1926
+ const existingAgent = await this.getAgentById({ id });
1927
+ if (!existingAgent) {
1928
+ throw new error.MastraError({
1929
+ id: storage.createStorageErrorId("LIBSQL", "UPDATE_AGENT", "NOT_FOUND"),
1930
+ domain: error.ErrorDomain.STORAGE,
1931
+ category: error.ErrorCategory.USER,
1932
+ text: `Agent ${id} not found`,
1933
+ details: { agentId: id }
1934
+ });
1960
1935
  }
1961
- const limitValue = perPageInput === false ? total : perPage;
1962
- const dataResult = await this.client.execute({
1963
- sql: `SELECT * ${baseQuery} ORDER BY "${field}" ${direction} LIMIT ? OFFSET ?`,
1964
- args: [...queryParams, limitValue, offset]
1965
- });
1966
- const threads = (dataResult.rows || []).map(mapRowToStorageThreadType);
1967
- return {
1968
- threads,
1969
- total,
1970
- page,
1971
- perPage: perPageForResponse,
1972
- hasMore: perPageInput === false ? false : offset + perPage < total
1936
+ const data = {
1937
+ updatedAt: /* @__PURE__ */ new Date()
1973
1938
  };
1939
+ if (updates.name !== void 0) data.name = updates.name;
1940
+ if (updates.description !== void 0) data.description = updates.description;
1941
+ if (updates.instructions !== void 0) data.instructions = updates.instructions;
1942
+ if (updates.model !== void 0) data.model = updates.model;
1943
+ if (updates.tools !== void 0) data.tools = updates.tools;
1944
+ if (updates.defaultOptions !== void 0) data.defaultOptions = updates.defaultOptions;
1945
+ if (updates.workflows !== void 0) data.workflows = updates.workflows;
1946
+ if (updates.agents !== void 0) data.agents = updates.agents;
1947
+ if (updates.inputProcessors !== void 0) data.inputProcessors = updates.inputProcessors;
1948
+ if (updates.outputProcessors !== void 0) data.outputProcessors = updates.outputProcessors;
1949
+ if (updates.memory !== void 0) data.memory = updates.memory;
1950
+ if (updates.scorers !== void 0) data.scorers = updates.scorers;
1951
+ if (updates.metadata !== void 0) {
1952
+ data.metadata = { ...existingAgent.metadata, ...updates.metadata };
1953
+ }
1954
+ if (Object.keys(data).length > 1) {
1955
+ await this.#db.update({
1956
+ tableName: storage.TABLE_AGENTS,
1957
+ keys: { id },
1958
+ data
1959
+ });
1960
+ }
1961
+ const updatedAgent = await this.getAgentById({ id });
1962
+ if (!updatedAgent) {
1963
+ throw new error.MastraError({
1964
+ id: storage.createStorageErrorId("LIBSQL", "UPDATE_AGENT", "NOT_FOUND_AFTER_UPDATE"),
1965
+ domain: error.ErrorDomain.STORAGE,
1966
+ category: error.ErrorCategory.SYSTEM,
1967
+ text: `Agent ${id} not found after update`,
1968
+ details: { agentId: id }
1969
+ });
1970
+ }
1971
+ return updatedAgent;
1974
1972
  } catch (error$1) {
1975
- const mastraError = new error.MastraError(
1973
+ if (error$1 instanceof error.MastraError) {
1974
+ throw error$1;
1975
+ }
1976
+ throw new error.MastraError(
1976
1977
  {
1977
- id: storage.createStorageErrorId("LIBSQL", "LIST_THREADS_BY_RESOURCE_ID", "FAILED"),
1978
+ id: storage.createStorageErrorId("LIBSQL", "UPDATE_AGENT", "FAILED"),
1978
1979
  domain: error.ErrorDomain.STORAGE,
1979
1980
  category: error.ErrorCategory.THIRD_PARTY,
1980
- details: { resourceId }
1981
+ details: { agentId: id }
1981
1982
  },
1982
1983
  error$1
1983
1984
  );
1984
- this.logger?.trackException?.(mastraError);
1985
- this.logger?.error?.(mastraError.toString());
1986
- return {
1987
- threads: [],
1988
- total: 0,
1989
- page,
1990
- perPage: perPageForResponse,
1991
- hasMore: false
1992
- };
1993
1985
  }
1994
1986
  }
1995
- async saveThread({ thread }) {
1987
+ async deleteAgent({ id }) {
1996
1988
  try {
1997
- await this.operations.insert({
1998
- tableName: storage.TABLE_THREADS,
1999
- record: {
2000
- ...thread,
2001
- metadata: JSON.stringify(thread.metadata)
2002
- }
1989
+ await this.#db.delete({
1990
+ tableName: storage.TABLE_AGENTS,
1991
+ keys: { id }
2003
1992
  });
2004
- return thread;
2005
1993
  } catch (error$1) {
2006
- const mastraError = new error.MastraError(
1994
+ throw new error.MastraError(
2007
1995
  {
2008
- id: storage.createStorageErrorId("LIBSQL", "SAVE_THREAD", "FAILED"),
1996
+ id: storage.createStorageErrorId("LIBSQL", "DELETE_AGENT", "FAILED"),
2009
1997
  domain: error.ErrorDomain.STORAGE,
2010
1998
  category: error.ErrorCategory.THIRD_PARTY,
2011
- details: { threadId: thread.id }
1999
+ details: { agentId: id }
2012
2000
  },
2013
2001
  error$1
2014
2002
  );
2015
- this.logger?.trackException?.(mastraError);
2016
- this.logger?.error?.(mastraError.toString());
2017
- throw mastraError;
2018
2003
  }
2019
2004
  }
2020
- async updateThread({
2021
- id,
2022
- title,
2023
- metadata
2024
- }) {
2025
- const thread = await this.getThreadById({ threadId: id });
2026
- if (!thread) {
2027
- throw new error.MastraError({
2028
- id: storage.createStorageErrorId("LIBSQL", "UPDATE_THREAD", "NOT_FOUND"),
2029
- domain: error.ErrorDomain.STORAGE,
2030
- category: error.ErrorCategory.USER,
2031
- text: `Thread ${id} not found`,
2032
- details: {
2033
- status: 404,
2034
- threadId: id
2035
- }
2036
- });
2037
- }
2038
- const updatedThread = {
2039
- ...thread,
2040
- title,
2041
- metadata: {
2042
- ...thread.metadata,
2043
- ...metadata
2044
- }
2045
- };
2046
- try {
2047
- await this.client.execute({
2048
- sql: `UPDATE ${storage.TABLE_THREADS} SET title = ?, metadata = ? WHERE id = ?`,
2049
- args: [title, JSON.stringify(updatedThread.metadata), id]
2050
- });
2051
- return updatedThread;
2052
- } catch (error$1) {
2005
+ async listAgents(args) {
2006
+ const { page = 0, perPage: perPageInput, orderBy } = args || {};
2007
+ const { field, direction } = this.parseOrderBy(orderBy);
2008
+ if (page < 0) {
2053
2009
  throw new error.MastraError(
2054
2010
  {
2055
- id: storage.createStorageErrorId("LIBSQL", "UPDATE_THREAD", "FAILED"),
2011
+ id: storage.createStorageErrorId("LIBSQL", "LIST_AGENTS", "INVALID_PAGE"),
2056
2012
  domain: error.ErrorDomain.STORAGE,
2057
- category: error.ErrorCategory.THIRD_PARTY,
2058
- text: `Failed to update thread ${id}`,
2059
- details: { threadId: id }
2013
+ category: error.ErrorCategory.USER,
2014
+ details: { page }
2060
2015
  },
2061
- error$1
2016
+ new Error("page must be >= 0")
2062
2017
  );
2063
2018
  }
2064
- }
2065
- async deleteThread({ threadId }) {
2019
+ const perPage = storage.normalizePerPage(perPageInput, 100);
2020
+ const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
2066
2021
  try {
2067
- await this.client.execute({
2068
- sql: `DELETE FROM ${storage.TABLE_MESSAGES} WHERE thread_id = ?`,
2069
- args: [threadId]
2070
- });
2071
- await this.client.execute({
2072
- sql: `DELETE FROM ${storage.TABLE_THREADS} WHERE id = ?`,
2073
- args: [threadId]
2022
+ const total = await this.#db.selectTotalCount({ tableName: storage.TABLE_AGENTS });
2023
+ if (total === 0) {
2024
+ return {
2025
+ agents: [],
2026
+ total: 0,
2027
+ page,
2028
+ perPage: perPageForResponse,
2029
+ hasMore: false
2030
+ };
2031
+ }
2032
+ const limitValue = perPageInput === false ? total : perPage;
2033
+ const rows = await this.#db.selectMany({
2034
+ tableName: storage.TABLE_AGENTS,
2035
+ orderBy: `"${field}" ${direction}`,
2036
+ limit: limitValue,
2037
+ offset
2074
2038
  });
2039
+ const agents = rows.map((row) => this.parseRow(row));
2040
+ return {
2041
+ agents,
2042
+ total,
2043
+ page,
2044
+ perPage: perPageForResponse,
2045
+ hasMore: perPageInput === false ? false : offset + perPage < total
2046
+ };
2075
2047
  } catch (error$1) {
2076
2048
  throw new error.MastraError(
2077
2049
  {
2078
- id: storage.createStorageErrorId("LIBSQL", "DELETE_THREAD", "FAILED"),
2050
+ id: storage.createStorageErrorId("LIBSQL", "LIST_AGENTS", "FAILED"),
2079
2051
  domain: error.ErrorDomain.STORAGE,
2080
- category: error.ErrorCategory.THIRD_PARTY,
2081
- details: { threadId }
2052
+ category: error.ErrorCategory.THIRD_PARTY
2082
2053
  },
2083
2054
  error$1
2084
2055
  );
2085
2056
  }
2086
2057
  }
2087
2058
  };
2088
- function createExecuteWriteOperationWithRetry({
2089
- logger,
2090
- maxRetries,
2091
- initialBackoffMs
2092
- }) {
2093
- return async function executeWriteOperationWithRetry(operationFn, operationDescription) {
2094
- let retries = 0;
2095
- while (true) {
2096
- try {
2097
- return await operationFn();
2098
- } catch (error) {
2099
- if (error.message && (error.message.includes("SQLITE_BUSY") || error.message.includes("database is locked")) && retries < maxRetries) {
2100
- retries++;
2101
- const backoffTime = initialBackoffMs * Math.pow(2, retries - 1);
2102
- logger.warn(
2103
- `LibSQLStore: Encountered SQLITE_BUSY during ${operationDescription}. Retrying (${retries}/${maxRetries}) in ${backoffTime}ms...`
2104
- );
2105
- await new Promise((resolve) => setTimeout(resolve, backoffTime));
2106
- } else {
2107
- logger.error(`LibSQLStore: Error during ${operationDescription} after ${retries} retries: ${error}`);
2108
- throw error;
2109
- }
2110
- }
2111
- }
2112
- };
2113
- }
2114
- function prepareStatement({ tableName, record }) {
2115
- const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
2116
- const columns = Object.keys(record).map((col) => utils.parseSqlIdentifier(col, "column name"));
2117
- const values = Object.values(record).map((v) => {
2118
- if (typeof v === `undefined` || v === null) {
2119
- return null;
2120
- }
2121
- if (v instanceof Date) {
2122
- return v.toISOString();
2123
- }
2124
- return typeof v === "object" ? JSON.stringify(v) : v;
2125
- });
2126
- const placeholders = values.map(() => "?").join(", ");
2127
- return {
2128
- sql: `INSERT OR REPLACE INTO ${parsedTableName} (${columns.join(", ")}) VALUES (${placeholders})`,
2129
- args: values
2130
- };
2131
- }
2132
- function prepareUpdateStatement({
2133
- tableName,
2134
- updates,
2135
- keys
2136
- }) {
2137
- const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
2138
- const schema = storage.TABLE_SCHEMAS[tableName];
2139
- const updateColumns = Object.keys(updates).map((col) => utils.parseSqlIdentifier(col, "column name"));
2140
- const updateValues = Object.values(updates).map(transformToSqlValue);
2141
- const setClause = updateColumns.map((col) => `${col} = ?`).join(", ");
2142
- const whereClause = prepareWhereClause(keys, schema);
2143
- return {
2144
- sql: `UPDATE ${parsedTableName} SET ${setClause}${whereClause.sql}`,
2145
- args: [...updateValues, ...whereClause.args]
2146
- };
2147
- }
2148
- function transformToSqlValue(value) {
2149
- if (typeof value === "undefined" || value === null) {
2150
- return null;
2059
+ var MemoryLibSQL = class extends storage.MemoryStorage {
2060
+ #client;
2061
+ #db;
2062
+ constructor(config) {
2063
+ super();
2064
+ const client = resolveClient(config);
2065
+ this.#client = client;
2066
+ this.#db = new LibSQLDB({ client, maxRetries: config.maxRetries, initialBackoffMs: config.initialBackoffMs });
2067
+ }
2068
+ async init() {
2069
+ await this.#db.createTable({ tableName: storage.TABLE_THREADS, schema: storage.TABLE_SCHEMAS[storage.TABLE_THREADS] });
2070
+ await this.#db.createTable({ tableName: storage.TABLE_MESSAGES, schema: storage.TABLE_SCHEMAS[storage.TABLE_MESSAGES] });
2071
+ await this.#db.createTable({ tableName: storage.TABLE_RESOURCES, schema: storage.TABLE_SCHEMAS[storage.TABLE_RESOURCES] });
2072
+ await this.#db.alterTable({
2073
+ tableName: storage.TABLE_MESSAGES,
2074
+ schema: storage.TABLE_SCHEMAS[storage.TABLE_MESSAGES],
2075
+ ifNotExists: ["resourceId"]
2076
+ });
2151
2077
  }
2152
- if (value instanceof Date) {
2153
- return value.toISOString();
2078
+ async dangerouslyClearAll() {
2079
+ await this.#db.deleteData({ tableName: storage.TABLE_MESSAGES });
2080
+ await this.#db.deleteData({ tableName: storage.TABLE_THREADS });
2081
+ await this.#db.deleteData({ tableName: storage.TABLE_RESOURCES });
2154
2082
  }
2155
- return typeof value === "object" ? JSON.stringify(value) : value;
2156
- }
2157
- function prepareDeleteStatement({ tableName, keys }) {
2158
- const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
2159
- const whereClause = prepareWhereClause(keys, storage.TABLE_SCHEMAS[tableName]);
2160
- return {
2161
- sql: `DELETE FROM ${parsedTableName}${whereClause.sql}`,
2162
- args: whereClause.args
2163
- };
2164
- }
2165
- function prepareWhereClause(filters, schema) {
2166
- const conditions = [];
2167
- const args = [];
2168
- for (const [columnName, filterValue] of Object.entries(filters)) {
2169
- const column = schema[columnName];
2170
- if (!column) {
2171
- throw new Error(`Unknown column: ${columnName}`);
2083
+ parseRow(row) {
2084
+ let content = row.content;
2085
+ try {
2086
+ content = JSON.parse(row.content);
2087
+ } catch {
2172
2088
  }
2173
- const parsedColumn = utils.parseSqlIdentifier(columnName, "column name");
2174
- const result = buildCondition2(parsedColumn, filterValue);
2175
- conditions.push(result.condition);
2176
- args.push(...result.args);
2177
- }
2178
- return {
2179
- sql: conditions.length > 0 ? ` WHERE ${conditions.join(" AND ")}` : "",
2180
- args
2181
- };
2182
- }
2183
- function buildCondition2(columnName, filterValue) {
2184
- if (filterValue === null) {
2185
- return { condition: `${columnName} IS NULL`, args: [] };
2186
- }
2187
- if (typeof filterValue === "object" && filterValue !== null && ("startAt" in filterValue || "endAt" in filterValue)) {
2188
- return buildDateRangeCondition(columnName, filterValue);
2189
- }
2190
- return {
2191
- condition: `${columnName} = ?`,
2192
- args: [transformToSqlValue(filterValue)]
2193
- };
2194
- }
2195
- function buildDateRangeCondition(columnName, range) {
2196
- const conditions = [];
2197
- const args = [];
2198
- if (range.startAt !== void 0) {
2199
- conditions.push(`${columnName} >= ?`);
2200
- args.push(transformToSqlValue(range.startAt));
2201
- }
2202
- if (range.endAt !== void 0) {
2203
- conditions.push(`${columnName} <= ?`);
2204
- args.push(transformToSqlValue(range.endAt));
2205
- }
2206
- if (conditions.length === 0) {
2207
- throw new Error("Date range must specify at least startAt or endAt");
2208
- }
2209
- return {
2210
- condition: conditions.join(" AND "),
2211
- args
2212
- };
2213
- }
2214
- function buildDateRangeFilter(dateRange, columnName = "createdAt") {
2215
- if (!dateRange?.start && !dateRange?.end) {
2216
- return {};
2217
- }
2218
- const filter = {};
2219
- if (dateRange.start) {
2220
- filter.startAt = new Date(dateRange.start).toISOString();
2089
+ const result = {
2090
+ id: row.id,
2091
+ content,
2092
+ role: row.role,
2093
+ createdAt: new Date(row.createdAt),
2094
+ threadId: row.thread_id,
2095
+ resourceId: row.resourceId
2096
+ };
2097
+ if (row.type && row.type !== `v2`) result.type = row.type;
2098
+ return result;
2221
2099
  }
2222
- if (dateRange.end) {
2223
- filter.endAt = new Date(dateRange.end).toISOString();
2100
+ async _getIncludedMessages({ include }) {
2101
+ if (!include || include.length === 0) return null;
2102
+ const unionQueries = [];
2103
+ const params = [];
2104
+ for (const inc of include) {
2105
+ const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
2106
+ unionQueries.push(
2107
+ `
2108
+ SELECT * FROM (
2109
+ WITH target_thread AS (
2110
+ SELECT thread_id FROM "${storage.TABLE_MESSAGES}" WHERE id = ?
2111
+ ),
2112
+ numbered_messages AS (
2113
+ SELECT
2114
+ id, content, role, type, "createdAt", thread_id, "resourceId",
2115
+ ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
2116
+ FROM "${storage.TABLE_MESSAGES}"
2117
+ WHERE thread_id = (SELECT thread_id FROM target_thread)
2118
+ ),
2119
+ target_positions AS (
2120
+ SELECT row_num as target_pos
2121
+ FROM numbered_messages
2122
+ WHERE id = ?
2123
+ )
2124
+ SELECT DISTINCT m.*
2125
+ FROM numbered_messages m
2126
+ CROSS JOIN target_positions t
2127
+ WHERE m.row_num BETWEEN (t.target_pos - ?) AND (t.target_pos + ?)
2128
+ )
2129
+ `
2130
+ // Keep ASC for final sorting after fetching context
2131
+ );
2132
+ params.push(id, id, withPreviousMessages, withNextMessages);
2133
+ }
2134
+ const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" ASC';
2135
+ const includedResult = await this.#client.execute({ sql: finalQuery, args: params });
2136
+ const includedRows = includedResult.rows?.map((row) => this.parseRow(row));
2137
+ const seen = /* @__PURE__ */ new Set();
2138
+ const dedupedRows = includedRows.filter((row) => {
2139
+ if (seen.has(row.id)) return false;
2140
+ seen.add(row.id);
2141
+ return true;
2142
+ });
2143
+ return dedupedRows;
2224
2144
  }
2225
- return { [columnName]: filter };
2226
- }
2227
- function transformFromSqlRow({
2228
- tableName,
2229
- sqlRow
2230
- }) {
2231
- const result = {};
2232
- const jsonColumns = new Set(
2233
- Object.keys(storage.TABLE_SCHEMAS[tableName]).filter((key) => storage.TABLE_SCHEMAS[tableName][key].type === "jsonb").map((key) => key)
2234
- );
2235
- const dateColumns = new Set(
2236
- Object.keys(storage.TABLE_SCHEMAS[tableName]).filter((key) => storage.TABLE_SCHEMAS[tableName][key].type === "timestamp").map((key) => key)
2237
- );
2238
- for (const [key, value] of Object.entries(sqlRow)) {
2239
- if (value === null || value === void 0) {
2240
- result[key] = value;
2241
- continue;
2145
+ async listMessagesById({ messageIds }) {
2146
+ if (messageIds.length === 0) return { messages: [] };
2147
+ try {
2148
+ const sql = `
2149
+ SELECT
2150
+ id,
2151
+ content,
2152
+ role,
2153
+ type,
2154
+ "createdAt",
2155
+ thread_id,
2156
+ "resourceId"
2157
+ FROM "${storage.TABLE_MESSAGES}"
2158
+ WHERE id IN (${messageIds.map(() => "?").join(", ")})
2159
+ ORDER BY "createdAt" DESC
2160
+ `;
2161
+ const result = await this.#client.execute({ sql, args: messageIds });
2162
+ if (!result.rows) return { messages: [] };
2163
+ const list = new agent.MessageList().add(result.rows.map(this.parseRow), "memory");
2164
+ return { messages: list.get.all.db() };
2165
+ } catch (error$1) {
2166
+ throw new error.MastraError(
2167
+ {
2168
+ id: storage.createStorageErrorId("LIBSQL", "LIST_MESSAGES_BY_ID", "FAILED"),
2169
+ domain: error.ErrorDomain.STORAGE,
2170
+ category: error.ErrorCategory.THIRD_PARTY,
2171
+ details: { messageIds: JSON.stringify(messageIds) }
2172
+ },
2173
+ error$1
2174
+ );
2242
2175
  }
2243
- if (dateColumns.has(key) && typeof value === "string") {
2244
- result[key] = new Date(value);
2245
- continue;
2176
+ }
2177
+ async listMessages(args) {
2178
+ const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
2179
+ const threadIds = Array.isArray(threadId) ? threadId : [threadId];
2180
+ if (threadIds.length === 0 || threadIds.some((id) => !id.trim())) {
2181
+ throw new error.MastraError(
2182
+ {
2183
+ id: storage.createStorageErrorId("LIBSQL", "LIST_MESSAGES", "INVALID_THREAD_ID"),
2184
+ domain: error.ErrorDomain.STORAGE,
2185
+ category: error.ErrorCategory.THIRD_PARTY,
2186
+ details: { threadId: Array.isArray(threadId) ? threadId.join(",") : threadId }
2187
+ },
2188
+ new Error("threadId must be a non-empty string or array of non-empty strings")
2189
+ );
2246
2190
  }
2247
- if (jsonColumns.has(key) && typeof value === "string") {
2248
- result[key] = storage.safelyParseJSON(value);
2249
- continue;
2191
+ if (page < 0) {
2192
+ throw new error.MastraError(
2193
+ {
2194
+ id: storage.createStorageErrorId("LIBSQL", "LIST_MESSAGES", "INVALID_PAGE"),
2195
+ domain: error.ErrorDomain.STORAGE,
2196
+ category: error.ErrorCategory.USER,
2197
+ details: { page }
2198
+ },
2199
+ new Error("page must be >= 0")
2200
+ );
2250
2201
  }
2251
- result[key] = value;
2252
- }
2253
- return result;
2254
- }
2255
-
2256
- // src/storage/domains/observability/index.ts
2257
- var ObservabilityLibSQL = class extends storage.ObservabilityStorage {
2258
- operations;
2259
- constructor({ operations }) {
2260
- super();
2261
- this.operations = operations;
2262
- }
2263
- async createSpan(span) {
2202
+ const perPage = storage.normalizePerPage(perPageInput, 40);
2203
+ const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
2264
2204
  try {
2265
- const now = (/* @__PURE__ */ new Date()).toISOString();
2266
- const record = {
2267
- ...span,
2268
- createdAt: now,
2269
- updatedAt: now
2205
+ const { field, direction } = this.parseOrderBy(orderBy, "ASC");
2206
+ const orderByStatement = `ORDER BY "${field}" ${direction}`;
2207
+ const threadPlaceholders = threadIds.map(() => "?").join(", ");
2208
+ const conditions = [`thread_id IN (${threadPlaceholders})`];
2209
+ const queryParams = [...threadIds];
2210
+ if (resourceId) {
2211
+ conditions.push(`"resourceId" = ?`);
2212
+ queryParams.push(resourceId);
2213
+ }
2214
+ if (filter?.dateRange?.start) {
2215
+ conditions.push(`"createdAt" >= ?`);
2216
+ queryParams.push(
2217
+ filter.dateRange.start instanceof Date ? filter.dateRange.start.toISOString() : filter.dateRange.start
2218
+ );
2219
+ }
2220
+ if (filter?.dateRange?.end) {
2221
+ conditions.push(`"createdAt" <= ?`);
2222
+ queryParams.push(
2223
+ filter.dateRange.end instanceof Date ? filter.dateRange.end.toISOString() : filter.dateRange.end
2224
+ );
2225
+ }
2226
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2227
+ const countResult = await this.#client.execute({
2228
+ sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_MESSAGES} ${whereClause}`,
2229
+ args: queryParams
2230
+ });
2231
+ const total = Number(countResult.rows?.[0]?.count ?? 0);
2232
+ const limitValue = perPageInput === false ? total : perPage;
2233
+ const dataResult = await this.#client.execute({
2234
+ sql: `SELECT id, content, role, type, "createdAt", "resourceId", "thread_id" FROM ${storage.TABLE_MESSAGES} ${whereClause} ${orderByStatement} LIMIT ? OFFSET ?`,
2235
+ args: [...queryParams, limitValue, offset]
2236
+ });
2237
+ const messages = (dataResult.rows || []).map((row) => this.parseRow(row));
2238
+ if (total === 0 && messages.length === 0 && (!include || include.length === 0)) {
2239
+ return {
2240
+ messages: [],
2241
+ total: 0,
2242
+ page,
2243
+ perPage: perPageForResponse,
2244
+ hasMore: false
2245
+ };
2246
+ }
2247
+ const messageIds = new Set(messages.map((m) => m.id));
2248
+ if (include && include.length > 0) {
2249
+ const includeMessages = await this._getIncludedMessages({ include });
2250
+ if (includeMessages) {
2251
+ for (const includeMsg of includeMessages) {
2252
+ if (!messageIds.has(includeMsg.id)) {
2253
+ messages.push(includeMsg);
2254
+ messageIds.add(includeMsg.id);
2255
+ }
2256
+ }
2257
+ }
2258
+ }
2259
+ const list = new agent.MessageList().add(messages, "memory");
2260
+ let finalMessages = list.get.all.db();
2261
+ finalMessages = finalMessages.sort((a, b) => {
2262
+ const isDateField = field === "createdAt" || field === "updatedAt";
2263
+ const aValue = isDateField ? new Date(a[field]).getTime() : a[field];
2264
+ const bValue = isDateField ? new Date(b[field]).getTime() : b[field];
2265
+ if (typeof aValue === "number" && typeof bValue === "number") {
2266
+ return direction === "ASC" ? aValue - bValue : bValue - aValue;
2267
+ }
2268
+ return direction === "ASC" ? String(aValue).localeCompare(String(bValue)) : String(bValue).localeCompare(String(aValue));
2269
+ });
2270
+ const threadIdSet = new Set(threadIds);
2271
+ const returnedThreadMessageIds = new Set(
2272
+ finalMessages.filter((m) => m.threadId && threadIdSet.has(m.threadId)).map((m) => m.id)
2273
+ );
2274
+ const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
2275
+ const hasMore = perPageInput !== false && !allThreadMessagesReturned && offset + perPage < total;
2276
+ return {
2277
+ messages: finalMessages,
2278
+ total,
2279
+ page,
2280
+ perPage: perPageForResponse,
2281
+ hasMore
2270
2282
  };
2271
- return this.operations.insert({ tableName: storage.TABLE_SPANS, record });
2272
2283
  } catch (error$1) {
2273
- throw new error.MastraError(
2284
+ const mastraError = new error.MastraError(
2274
2285
  {
2275
- id: storage.createStorageErrorId("LIBSQL", "CREATE_SPAN", "FAILED"),
2286
+ id: storage.createStorageErrorId("LIBSQL", "LIST_MESSAGES", "FAILED"),
2276
2287
  domain: error.ErrorDomain.STORAGE,
2277
- category: error.ErrorCategory.USER,
2288
+ category: error.ErrorCategory.THIRD_PARTY,
2278
2289
  details: {
2279
- spanId: span.spanId,
2280
- traceId: span.traceId,
2281
- spanType: span.spanType,
2282
- spanName: span.name
2290
+ threadId: Array.isArray(threadId) ? threadId.join(",") : threadId,
2291
+ resourceId: resourceId ?? ""
2283
2292
  }
2284
2293
  },
2285
2294
  error$1
2286
2295
  );
2296
+ this.logger?.error?.(mastraError.toString());
2297
+ this.logger?.trackException?.(mastraError);
2298
+ return {
2299
+ messages: [],
2300
+ total: 0,
2301
+ page,
2302
+ perPage: perPageForResponse,
2303
+ hasMore: false
2304
+ };
2287
2305
  }
2288
2306
  }
2289
- async getTrace(traceId) {
2307
+ async saveMessages({ messages }) {
2308
+ if (messages.length === 0) return { messages };
2290
2309
  try {
2291
- const spans = await this.operations.loadMany({
2292
- tableName: storage.TABLE_SPANS,
2293
- whereClause: { sql: " WHERE traceId = ?", args: [traceId] },
2294
- orderBy: "startedAt DESC"
2310
+ const threadId = messages[0]?.threadId;
2311
+ if (!threadId) {
2312
+ throw new Error("Thread ID is required");
2313
+ }
2314
+ const batchStatements = messages.map((message) => {
2315
+ const time = message.createdAt || /* @__PURE__ */ new Date();
2316
+ if (!message.threadId) {
2317
+ throw new Error(
2318
+ `Expected to find a threadId for message, but couldn't find one. An unexpected error has occurred.`
2319
+ );
2320
+ }
2321
+ if (!message.resourceId) {
2322
+ throw new Error(
2323
+ `Expected to find a resourceId for message, but couldn't find one. An unexpected error has occurred.`
2324
+ );
2325
+ }
2326
+ return {
2327
+ sql: `INSERT INTO "${storage.TABLE_MESSAGES}" (id, thread_id, content, role, type, "createdAt", "resourceId")
2328
+ VALUES (?, ?, ?, ?, ?, ?, ?)
2329
+ ON CONFLICT(id) DO UPDATE SET
2330
+ thread_id=excluded.thread_id,
2331
+ content=excluded.content,
2332
+ role=excluded.role,
2333
+ type=excluded.type,
2334
+ "resourceId"=excluded."resourceId"
2335
+ `,
2336
+ args: [
2337
+ message.id,
2338
+ message.threadId,
2339
+ typeof message.content === "object" ? JSON.stringify(message.content) : message.content,
2340
+ message.role,
2341
+ message.type || "v2",
2342
+ time instanceof Date ? time.toISOString() : time,
2343
+ message.resourceId
2344
+ ]
2345
+ };
2295
2346
  });
2296
- if (!spans || spans.length === 0) {
2297
- return null;
2347
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2348
+ batchStatements.push({
2349
+ sql: `UPDATE "${storage.TABLE_THREADS}" SET "updatedAt" = ? WHERE id = ?`,
2350
+ args: [now, threadId]
2351
+ });
2352
+ const BATCH_SIZE = 50;
2353
+ const messageStatements = batchStatements.slice(0, -1);
2354
+ const threadUpdateStatement = batchStatements[batchStatements.length - 1];
2355
+ for (let i = 0; i < messageStatements.length; i += BATCH_SIZE) {
2356
+ const batch = messageStatements.slice(i, i + BATCH_SIZE);
2357
+ if (batch.length > 0) {
2358
+ await this.#client.batch(batch, "write");
2359
+ }
2298
2360
  }
2299
- return {
2300
- traceId,
2301
- spans: spans.map((span) => transformFromSqlRow({ tableName: storage.TABLE_SPANS, sqlRow: span }))
2302
- };
2361
+ if (threadUpdateStatement) {
2362
+ await this.#client.execute(threadUpdateStatement);
2363
+ }
2364
+ const list = new agent.MessageList().add(messages, "memory");
2365
+ return { messages: list.get.all.db() };
2303
2366
  } catch (error$1) {
2304
2367
  throw new error.MastraError(
2305
2368
  {
2306
- id: storage.createStorageErrorId("LIBSQL", "GET_TRACE", "FAILED"),
2369
+ id: storage.createStorageErrorId("LIBSQL", "SAVE_MESSAGES", "FAILED"),
2307
2370
  domain: error.ErrorDomain.STORAGE,
2308
- category: error.ErrorCategory.USER,
2309
- details: {
2310
- traceId
2311
- }
2371
+ category: error.ErrorCategory.THIRD_PARTY
2312
2372
  },
2313
2373
  error$1
2314
2374
  );
2315
2375
  }
2316
2376
  }
2317
- async updateSpan({
2318
- spanId,
2319
- traceId,
2320
- updates
2377
+ async updateMessages({
2378
+ messages
2321
2379
  }) {
2322
- try {
2323
- await this.operations.update({
2324
- tableName: storage.TABLE_SPANS,
2325
- keys: { spanId, traceId },
2326
- data: { ...updates, updatedAt: (/* @__PURE__ */ new Date()).toISOString() }
2327
- });
2328
- } catch (error$1) {
2329
- throw new error.MastraError(
2330
- {
2331
- id: storage.createStorageErrorId("LIBSQL", "UPDATE_SPAN", "FAILED"),
2332
- domain: error.ErrorDomain.STORAGE,
2333
- category: error.ErrorCategory.USER,
2334
- details: {
2335
- spanId,
2336
- traceId
2337
- }
2338
- },
2339
- error$1
2340
- );
2380
+ if (messages.length === 0) {
2381
+ return [];
2341
2382
  }
2342
- }
2343
- async getTracesPaginated({
2344
- filters,
2345
- pagination
2346
- }) {
2347
- const page = pagination?.page ?? 0;
2348
- const perPage = pagination?.perPage ?? 10;
2349
- const { entityId, entityType, ...actualFilters } = filters || {};
2350
- const filtersWithDateRange = {
2351
- ...actualFilters,
2352
- ...buildDateRangeFilter(pagination?.dateRange, "startedAt"),
2353
- parentSpanId: null
2383
+ const messageIds = messages.map((m) => m.id);
2384
+ const placeholders = messageIds.map(() => "?").join(",");
2385
+ const selectSql = `SELECT * FROM ${storage.TABLE_MESSAGES} WHERE id IN (${placeholders})`;
2386
+ const existingResult = await this.#client.execute({ sql: selectSql, args: messageIds });
2387
+ const existingMessages = existingResult.rows.map((row) => this.parseRow(row));
2388
+ if (existingMessages.length === 0) {
2389
+ return [];
2390
+ }
2391
+ const batchStatements = [];
2392
+ const threadIdsToUpdate = /* @__PURE__ */ new Set();
2393
+ const columnMapping = {
2394
+ threadId: "thread_id"
2354
2395
  };
2355
- const whereClause = prepareWhereClause(filtersWithDateRange, storage.SPAN_SCHEMA);
2356
- let actualWhereClause = whereClause.sql || "";
2357
- if (entityId && entityType) {
2358
- const statement = `name = ?`;
2359
- let name = "";
2360
- if (entityType === "workflow") {
2361
- name = `workflow run: '${entityId}'`;
2362
- } else if (entityType === "agent") {
2363
- name = `agent run: '${entityId}'`;
2364
- } else {
2365
- const error$1 = new error.MastraError({
2366
- id: storage.createStorageErrorId("LIBSQL", "GET_TRACES_PAGINATED", "INVALID_ENTITY_TYPE"),
2367
- domain: error.ErrorDomain.STORAGE,
2368
- category: error.ErrorCategory.USER,
2369
- details: {
2370
- entityType
2371
- },
2372
- text: `Cannot filter by entity type: ${entityType}`
2373
- });
2374
- this.logger?.trackException(error$1);
2375
- throw error$1;
2396
+ for (const existingMessage of existingMessages) {
2397
+ const updatePayload = messages.find((m) => m.id === existingMessage.id);
2398
+ if (!updatePayload) continue;
2399
+ const { id, ...fieldsToUpdate } = updatePayload;
2400
+ if (Object.keys(fieldsToUpdate).length === 0) continue;
2401
+ threadIdsToUpdate.add(existingMessage.threadId);
2402
+ if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
2403
+ threadIdsToUpdate.add(updatePayload.threadId);
2376
2404
  }
2377
- whereClause.args.push(name);
2378
- if (actualWhereClause) {
2379
- actualWhereClause += ` AND ${statement}`;
2380
- } else {
2381
- actualWhereClause += `WHERE ${statement}`;
2405
+ const setClauses = [];
2406
+ const args = [];
2407
+ const updatableFields = { ...fieldsToUpdate };
2408
+ if (updatableFields.content) {
2409
+ const newContent = {
2410
+ ...existingMessage.content,
2411
+ ...updatableFields.content,
2412
+ // Deep merge metadata if it exists on both
2413
+ ...existingMessage.content?.metadata && updatableFields.content.metadata ? {
2414
+ metadata: {
2415
+ ...existingMessage.content.metadata,
2416
+ ...updatableFields.content.metadata
2417
+ }
2418
+ } : {}
2419
+ };
2420
+ setClauses.push(`${utils.parseSqlIdentifier("content", "column name")} = ?`);
2421
+ args.push(JSON.stringify(newContent));
2422
+ delete updatableFields.content;
2423
+ }
2424
+ for (const key in updatableFields) {
2425
+ if (Object.prototype.hasOwnProperty.call(updatableFields, key)) {
2426
+ const dbKey = columnMapping[key] || key;
2427
+ setClauses.push(`${utils.parseSqlIdentifier(dbKey, "column name")} = ?`);
2428
+ let value = updatableFields[key];
2429
+ if (typeof value === "object" && value !== null) {
2430
+ value = JSON.stringify(value);
2431
+ }
2432
+ args.push(value);
2433
+ }
2434
+ }
2435
+ if (setClauses.length === 0) continue;
2436
+ args.push(id);
2437
+ const sql = `UPDATE ${storage.TABLE_MESSAGES} SET ${setClauses.join(", ")} WHERE id = ?`;
2438
+ batchStatements.push({ sql, args });
2439
+ }
2440
+ if (batchStatements.length === 0) {
2441
+ return existingMessages;
2442
+ }
2443
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2444
+ for (const threadId of threadIdsToUpdate) {
2445
+ if (threadId) {
2446
+ batchStatements.push({
2447
+ sql: `UPDATE ${storage.TABLE_THREADS} SET updatedAt = ? WHERE id = ?`,
2448
+ args: [now, threadId]
2449
+ });
2382
2450
  }
2383
2451
  }
2384
- const orderBy = "startedAt DESC";
2385
- let count = 0;
2452
+ await this.#client.batch(batchStatements, "write");
2453
+ const updatedResult = await this.#client.execute({ sql: selectSql, args: messageIds });
2454
+ return updatedResult.rows.map((row) => this.parseRow(row));
2455
+ }
2456
+ async deleteMessages(messageIds) {
2457
+ if (!messageIds || messageIds.length === 0) {
2458
+ return;
2459
+ }
2386
2460
  try {
2387
- count = await this.operations.loadTotalCount({
2388
- tableName: storage.TABLE_SPANS,
2389
- whereClause: { sql: actualWhereClause, args: whereClause.args }
2390
- });
2461
+ const BATCH_SIZE = 100;
2462
+ const threadIds = /* @__PURE__ */ new Set();
2463
+ const tx = await this.#client.transaction("write");
2464
+ try {
2465
+ for (let i = 0; i < messageIds.length; i += BATCH_SIZE) {
2466
+ const batch = messageIds.slice(i, i + BATCH_SIZE);
2467
+ const placeholders = batch.map(() => "?").join(",");
2468
+ const result = await tx.execute({
2469
+ sql: `SELECT DISTINCT thread_id FROM "${storage.TABLE_MESSAGES}" WHERE id IN (${placeholders})`,
2470
+ args: batch
2471
+ });
2472
+ result.rows?.forEach((row) => {
2473
+ if (row.thread_id) threadIds.add(row.thread_id);
2474
+ });
2475
+ await tx.execute({
2476
+ sql: `DELETE FROM "${storage.TABLE_MESSAGES}" WHERE id IN (${placeholders})`,
2477
+ args: batch
2478
+ });
2479
+ }
2480
+ if (threadIds.size > 0) {
2481
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2482
+ for (const threadId of threadIds) {
2483
+ await tx.execute({
2484
+ sql: `UPDATE "${storage.TABLE_THREADS}" SET "updatedAt" = ? WHERE id = ?`,
2485
+ args: [now, threadId]
2486
+ });
2487
+ }
2488
+ }
2489
+ await tx.commit();
2490
+ } catch (error) {
2491
+ await tx.rollback();
2492
+ throw error;
2493
+ }
2391
2494
  } catch (error$1) {
2392
2495
  throw new error.MastraError(
2393
2496
  {
2394
- id: storage.createStorageErrorId("LIBSQL", "GET_TRACES_PAGINATED", "COUNT_FAILED"),
2497
+ id: storage.createStorageErrorId("LIBSQL", "DELETE_MESSAGES", "FAILED"),
2395
2498
  domain: error.ErrorDomain.STORAGE,
2396
- category: error.ErrorCategory.USER
2499
+ category: error.ErrorCategory.THIRD_PARTY,
2500
+ details: { messageIds: messageIds.join(", ") }
2397
2501
  },
2398
2502
  error$1
2399
2503
  );
2400
2504
  }
2401
- if (count === 0) {
2402
- return {
2403
- pagination: {
2404
- total: 0,
2405
- page,
2406
- perPage,
2407
- hasMore: false
2408
- },
2409
- spans: []
2505
+ }
2506
+ async getResourceById({ resourceId }) {
2507
+ const result = await this.#db.select({
2508
+ tableName: storage.TABLE_RESOURCES,
2509
+ keys: { id: resourceId }
2510
+ });
2511
+ if (!result) {
2512
+ return null;
2513
+ }
2514
+ return {
2515
+ ...result,
2516
+ // Ensure workingMemory is always returned as a string, even if auto-parsed as JSON
2517
+ workingMemory: result.workingMemory && typeof result.workingMemory === "object" ? JSON.stringify(result.workingMemory) : result.workingMemory,
2518
+ metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata,
2519
+ createdAt: new Date(result.createdAt),
2520
+ updatedAt: new Date(result.updatedAt)
2521
+ };
2522
+ }
2523
+ async saveResource({ resource }) {
2524
+ await this.#db.insert({
2525
+ tableName: storage.TABLE_RESOURCES,
2526
+ record: {
2527
+ ...resource,
2528
+ metadata: JSON.stringify(resource.metadata)
2529
+ }
2530
+ });
2531
+ return resource;
2532
+ }
2533
+ async updateResource({
2534
+ resourceId,
2535
+ workingMemory,
2536
+ metadata
2537
+ }) {
2538
+ const existingResource = await this.getResourceById({ resourceId });
2539
+ if (!existingResource) {
2540
+ const newResource = {
2541
+ id: resourceId,
2542
+ workingMemory,
2543
+ metadata: metadata || {},
2544
+ createdAt: /* @__PURE__ */ new Date(),
2545
+ updatedAt: /* @__PURE__ */ new Date()
2410
2546
  };
2547
+ return this.saveResource({ resource: newResource });
2548
+ }
2549
+ const updatedResource = {
2550
+ ...existingResource,
2551
+ workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
2552
+ metadata: {
2553
+ ...existingResource.metadata,
2554
+ ...metadata
2555
+ },
2556
+ updatedAt: /* @__PURE__ */ new Date()
2557
+ };
2558
+ const updates = [];
2559
+ const values = [];
2560
+ if (workingMemory !== void 0) {
2561
+ updates.push("workingMemory = ?");
2562
+ values.push(workingMemory);
2563
+ }
2564
+ if (metadata) {
2565
+ updates.push("metadata = ?");
2566
+ values.push(JSON.stringify(updatedResource.metadata));
2411
2567
  }
2568
+ updates.push("updatedAt = ?");
2569
+ values.push(updatedResource.updatedAt.toISOString());
2570
+ values.push(resourceId);
2571
+ await this.#client.execute({
2572
+ sql: `UPDATE ${storage.TABLE_RESOURCES} SET ${updates.join(", ")} WHERE id = ?`,
2573
+ args: values
2574
+ });
2575
+ return updatedResource;
2576
+ }
2577
+ async getThreadById({ threadId }) {
2412
2578
  try {
2413
- const spans = await this.operations.loadMany({
2414
- tableName: storage.TABLE_SPANS,
2415
- whereClause: {
2416
- sql: actualWhereClause,
2417
- args: whereClause.args
2418
- },
2419
- orderBy,
2420
- offset: page * perPage,
2421
- limit: perPage
2579
+ const result = await this.#db.select({
2580
+ tableName: storage.TABLE_THREADS,
2581
+ keys: { id: threadId }
2422
2582
  });
2583
+ if (!result) {
2584
+ return null;
2585
+ }
2423
2586
  return {
2424
- pagination: {
2425
- total: count,
2426
- page,
2427
- perPage,
2428
- hasMore: spans.length === perPage
2429
- },
2430
- spans: spans.map((span) => transformFromSqlRow({ tableName: storage.TABLE_SPANS, sqlRow: span }))
2587
+ ...result,
2588
+ metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata,
2589
+ createdAt: new Date(result.createdAt),
2590
+ updatedAt: new Date(result.updatedAt)
2431
2591
  };
2432
2592
  } catch (error$1) {
2433
2593
  throw new error.MastraError(
2434
2594
  {
2435
- id: storage.createStorageErrorId("LIBSQL", "GET_TRACES_PAGINATED", "FAILED"),
2595
+ id: storage.createStorageErrorId("LIBSQL", "GET_THREAD_BY_ID", "FAILED"),
2436
2596
  domain: error.ErrorDomain.STORAGE,
2437
- category: error.ErrorCategory.USER
2597
+ category: error.ErrorCategory.THIRD_PARTY,
2598
+ details: { threadId }
2438
2599
  },
2439
2600
  error$1
2440
2601
  );
2441
2602
  }
2442
2603
  }
2443
- async batchCreateSpans(args) {
2444
- try {
2445
- const now = (/* @__PURE__ */ new Date()).toISOString();
2446
- return this.operations.batchInsert({
2447
- tableName: storage.TABLE_SPANS,
2448
- records: args.records.map((record) => ({
2449
- ...record,
2450
- createdAt: now,
2451
- updatedAt: now
2452
- }))
2453
- });
2454
- } catch (error$1) {
2604
+ async listThreadsByResourceId(args) {
2605
+ const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
2606
+ if (page < 0) {
2455
2607
  throw new error.MastraError(
2456
2608
  {
2457
- id: storage.createStorageErrorId("LIBSQL", "BATCH_CREATE_SPANS", "FAILED"),
2609
+ id: storage.createStorageErrorId("LIBSQL", "LIST_THREADS_BY_RESOURCE_ID", "INVALID_PAGE"),
2458
2610
  domain: error.ErrorDomain.STORAGE,
2459
- category: error.ErrorCategory.USER
2611
+ category: error.ErrorCategory.USER,
2612
+ details: { page }
2460
2613
  },
2461
- error$1
2614
+ new Error("page must be >= 0")
2462
2615
  );
2463
2616
  }
2464
- }
2465
- async batchUpdateSpans(args) {
2617
+ const perPage = storage.normalizePerPage(perPageInput, 100);
2618
+ const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
2619
+ const { field, direction } = this.parseOrderBy(orderBy);
2466
2620
  try {
2467
- return this.operations.batchUpdate({
2468
- tableName: storage.TABLE_SPANS,
2469
- updates: args.records.map((record) => ({
2470
- keys: { spanId: record.spanId, traceId: record.traceId },
2471
- data: { ...record.updates, updatedAt: (/* @__PURE__ */ new Date()).toISOString() }
2472
- }))
2621
+ const baseQuery = `FROM ${storage.TABLE_THREADS} WHERE resourceId = ?`;
2622
+ const queryParams = [resourceId];
2623
+ const mapRowToStorageThreadType = (row) => ({
2624
+ id: row.id,
2625
+ resourceId: row.resourceId,
2626
+ title: row.title,
2627
+ createdAt: new Date(row.createdAt),
2628
+ // Convert string to Date
2629
+ updatedAt: new Date(row.updatedAt),
2630
+ // Convert string to Date
2631
+ metadata: typeof row.metadata === "string" ? JSON.parse(row.metadata) : row.metadata
2632
+ });
2633
+ const countResult = await this.#client.execute({
2634
+ sql: `SELECT COUNT(*) as count ${baseQuery}`,
2635
+ args: queryParams
2636
+ });
2637
+ const total = Number(countResult.rows?.[0]?.count ?? 0);
2638
+ if (total === 0) {
2639
+ return {
2640
+ threads: [],
2641
+ total: 0,
2642
+ page,
2643
+ perPage: perPageForResponse,
2644
+ hasMore: false
2645
+ };
2646
+ }
2647
+ const limitValue = perPageInput === false ? total : perPage;
2648
+ const dataResult = await this.#client.execute({
2649
+ sql: `SELECT * ${baseQuery} ORDER BY "${field}" ${direction} LIMIT ? OFFSET ?`,
2650
+ args: [...queryParams, limitValue, offset]
2473
2651
  });
2652
+ const threads = (dataResult.rows || []).map(mapRowToStorageThreadType);
2653
+ return {
2654
+ threads,
2655
+ total,
2656
+ page,
2657
+ perPage: perPageForResponse,
2658
+ hasMore: perPageInput === false ? false : offset + perPage < total
2659
+ };
2474
2660
  } catch (error$1) {
2475
- throw new error.MastraError(
2661
+ const mastraError = new error.MastraError(
2476
2662
  {
2477
- id: storage.createStorageErrorId("LIBSQL", "BATCH_UPDATE_SPANS", "FAILED"),
2663
+ id: storage.createStorageErrorId("LIBSQL", "LIST_THREADS_BY_RESOURCE_ID", "FAILED"),
2478
2664
  domain: error.ErrorDomain.STORAGE,
2479
- category: error.ErrorCategory.USER
2665
+ category: error.ErrorCategory.THIRD_PARTY,
2666
+ details: { resourceId }
2480
2667
  },
2481
2668
  error$1
2482
2669
  );
2670
+ this.logger?.trackException?.(mastraError);
2671
+ this.logger?.error?.(mastraError.toString());
2672
+ return {
2673
+ threads: [],
2674
+ total: 0,
2675
+ page,
2676
+ perPage: perPageForResponse,
2677
+ hasMore: false
2678
+ };
2483
2679
  }
2484
2680
  }
2485
- async batchDeleteTraces(args) {
2681
+ async saveThread({ thread }) {
2486
2682
  try {
2487
- const keys = args.traceIds.map((traceId) => ({ traceId }));
2488
- return this.operations.batchDelete({
2489
- tableName: storage.TABLE_SPANS,
2490
- keys
2683
+ await this.#db.insert({
2684
+ tableName: storage.TABLE_THREADS,
2685
+ record: {
2686
+ ...thread,
2687
+ metadata: JSON.stringify(thread.metadata)
2688
+ }
2491
2689
  });
2690
+ return thread;
2492
2691
  } catch (error$1) {
2493
- throw new error.MastraError(
2692
+ const mastraError = new error.MastraError(
2494
2693
  {
2495
- id: storage.createStorageErrorId("LIBSQL", "BATCH_DELETE_TRACES", "FAILED"),
2694
+ id: storage.createStorageErrorId("LIBSQL", "SAVE_THREAD", "FAILED"),
2496
2695
  domain: error.ErrorDomain.STORAGE,
2497
- category: error.ErrorCategory.USER
2696
+ category: error.ErrorCategory.THIRD_PARTY,
2697
+ details: { threadId: thread.id }
2498
2698
  },
2499
2699
  error$1
2500
2700
  );
2701
+ this.logger?.trackException?.(mastraError);
2702
+ this.logger?.error?.(mastraError.toString());
2703
+ throw mastraError;
2501
2704
  }
2502
2705
  }
2503
- };
2504
- var StoreOperationsLibSQL = class extends storage.StoreOperations {
2505
- client;
2506
- /**
2507
- * Maximum number of retries for write operations if an SQLITE_BUSY error occurs.
2508
- * @default 5
2509
- */
2510
- maxRetries;
2511
- /**
2512
- * Initial backoff time in milliseconds for retrying write operations on SQLITE_BUSY.
2513
- * The backoff time will double with each retry (exponential backoff).
2514
- * @default 100
2515
- */
2516
- initialBackoffMs;
2517
- constructor({
2518
- client,
2519
- maxRetries,
2520
- initialBackoffMs
2521
- }) {
2522
- super();
2523
- this.client = client;
2524
- this.maxRetries = maxRetries ?? 5;
2525
- this.initialBackoffMs = initialBackoffMs ?? 100;
2526
- }
2527
- async hasColumn(table, column) {
2528
- const result = await this.client.execute({
2529
- sql: `PRAGMA table_info(${table})`
2530
- });
2531
- return (await result.rows)?.some((row) => row.name === column);
2532
- }
2533
- getCreateTableSQL(tableName, schema) {
2534
- const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
2535
- const columns = Object.entries(schema).map(([name, col]) => {
2536
- const parsedColumnName = utils.parseSqlIdentifier(name, "column name");
2537
- const type = this.getSqlType(col.type);
2538
- const nullable = col.nullable ? "" : "NOT NULL";
2539
- const primaryKey = col.primaryKey ? "PRIMARY KEY" : "";
2540
- return `${parsedColumnName} ${type} ${nullable} ${primaryKey}`.trim();
2541
- });
2542
- if (tableName === storage.TABLE_WORKFLOW_SNAPSHOT) {
2543
- const stmnt = `CREATE TABLE IF NOT EXISTS ${parsedTableName} (
2544
- ${columns.join(",\n")},
2545
- PRIMARY KEY (workflow_name, run_id)
2546
- )`;
2547
- return stmnt;
2548
- }
2549
- if (tableName === storage.TABLE_SPANS) {
2550
- const stmnt = `CREATE TABLE IF NOT EXISTS ${parsedTableName} (
2551
- ${columns.join(",\n")},
2552
- PRIMARY KEY (traceId, spanId)
2553
- )`;
2554
- return stmnt;
2555
- }
2556
- return `CREATE TABLE IF NOT EXISTS ${parsedTableName} (${columns.join(", ")})`;
2557
- }
2558
- async createTable({
2559
- tableName,
2560
- schema
2706
+ async updateThread({
2707
+ id,
2708
+ title,
2709
+ metadata
2561
2710
  }) {
2711
+ const thread = await this.getThreadById({ threadId: id });
2712
+ if (!thread) {
2713
+ throw new error.MastraError({
2714
+ id: storage.createStorageErrorId("LIBSQL", "UPDATE_THREAD", "NOT_FOUND"),
2715
+ domain: error.ErrorDomain.STORAGE,
2716
+ category: error.ErrorCategory.USER,
2717
+ text: `Thread ${id} not found`,
2718
+ details: {
2719
+ status: 404,
2720
+ threadId: id
2721
+ }
2722
+ });
2723
+ }
2724
+ const updatedThread = {
2725
+ ...thread,
2726
+ title,
2727
+ metadata: {
2728
+ ...thread.metadata,
2729
+ ...metadata
2730
+ }
2731
+ };
2562
2732
  try {
2563
- this.logger.debug(`Creating database table`, { tableName, operation: "schema init" });
2564
- const sql = this.getCreateTableSQL(tableName, schema);
2565
- await this.client.execute(sql);
2733
+ await this.#client.execute({
2734
+ sql: `UPDATE ${storage.TABLE_THREADS} SET title = ?, metadata = ? WHERE id = ?`,
2735
+ args: [title, JSON.stringify(updatedThread.metadata), id]
2736
+ });
2737
+ return updatedThread;
2566
2738
  } catch (error$1) {
2567
2739
  throw new error.MastraError(
2568
2740
  {
2569
- id: storage.createStorageErrorId("LIBSQL", "CREATE_TABLE", "FAILED"),
2741
+ id: storage.createStorageErrorId("LIBSQL", "UPDATE_THREAD", "FAILED"),
2570
2742
  domain: error.ErrorDomain.STORAGE,
2571
2743
  category: error.ErrorCategory.THIRD_PARTY,
2572
- details: {
2573
- tableName
2574
- }
2744
+ text: `Failed to update thread ${id}`,
2745
+ details: { threadId: id }
2575
2746
  },
2576
2747
  error$1
2577
2748
  );
2578
2749
  }
2579
2750
  }
2580
- getSqlType(type) {
2581
- switch (type) {
2582
- case "bigint":
2583
- return "INTEGER";
2584
- // SQLite uses INTEGER for all integer sizes
2585
- case "timestamp":
2586
- return "TEXT";
2587
- // Store timestamps as ISO strings in SQLite
2588
- // jsonb falls through to base class which returns 'JSONB'
2589
- // SQLite's flexible type system treats JSONB as TEXT affinity
2590
- default:
2591
- return super.getSqlType(type);
2592
- }
2593
- }
2594
- async doInsert({
2595
- tableName,
2596
- record
2597
- }) {
2598
- await this.client.execute(
2599
- prepareStatement({
2600
- tableName,
2601
- record
2602
- })
2603
- );
2604
- }
2605
- insert(args) {
2606
- const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
2607
- logger: this.logger,
2608
- maxRetries: this.maxRetries,
2609
- initialBackoffMs: this.initialBackoffMs
2610
- });
2611
- return executeWriteOperationWithRetry(() => this.doInsert(args), `insert into table ${args.tableName}`);
2612
- }
2613
- async load({ tableName, keys }) {
2614
- const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
2615
- const parsedKeys = Object.keys(keys).map((key) => utils.parseSqlIdentifier(key, "column name"));
2616
- const conditions = parsedKeys.map((key) => `${key} = ?`).join(" AND ");
2617
- const values = Object.values(keys);
2618
- const result = await this.client.execute({
2619
- sql: `SELECT * FROM ${parsedTableName} WHERE ${conditions} ORDER BY createdAt DESC LIMIT 1`,
2620
- args: values
2621
- });
2622
- if (!result.rows || result.rows.length === 0) {
2623
- return null;
2624
- }
2625
- const row = result.rows[0];
2626
- const parsed = Object.fromEntries(
2627
- Object.entries(row || {}).map(([k, v]) => {
2628
- try {
2629
- return [k, typeof v === "string" ? v.startsWith("{") || v.startsWith("[") ? JSON.parse(v) : v : v];
2630
- } catch {
2631
- return [k, v];
2632
- }
2633
- })
2634
- );
2635
- return parsed;
2636
- }
2637
- async loadMany({
2638
- tableName,
2639
- whereClause,
2640
- orderBy,
2641
- offset,
2642
- limit,
2643
- args
2644
- }) {
2645
- const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
2646
- let statement = `SELECT * FROM ${parsedTableName}`;
2647
- if (whereClause?.sql) {
2648
- statement += `${whereClause.sql}`;
2649
- }
2650
- if (orderBy) {
2651
- statement += ` ORDER BY ${orderBy}`;
2652
- }
2653
- if (limit) {
2654
- statement += ` LIMIT ${limit}`;
2655
- }
2656
- if (offset) {
2657
- statement += ` OFFSET ${offset}`;
2751
+ async deleteThread({ threadId }) {
2752
+ try {
2753
+ await this.#client.execute({
2754
+ sql: `DELETE FROM ${storage.TABLE_MESSAGES} WHERE thread_id = ?`,
2755
+ args: [threadId]
2756
+ });
2757
+ await this.#client.execute({
2758
+ sql: `DELETE FROM ${storage.TABLE_THREADS} WHERE id = ?`,
2759
+ args: [threadId]
2760
+ });
2761
+ } catch (error$1) {
2762
+ throw new error.MastraError(
2763
+ {
2764
+ id: storage.createStorageErrorId("LIBSQL", "DELETE_THREAD", "FAILED"),
2765
+ domain: error.ErrorDomain.STORAGE,
2766
+ category: error.ErrorCategory.THIRD_PARTY,
2767
+ details: { threadId }
2768
+ },
2769
+ error$1
2770
+ );
2658
2771
  }
2659
- const result = await this.client.execute({
2660
- sql: statement,
2661
- args: [...whereClause?.args ?? [], ...args ?? []]
2662
- });
2663
- return result.rows;
2664
2772
  }
2665
- async loadTotalCount({
2666
- tableName,
2667
- whereClause
2668
- }) {
2669
- const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
2670
- const statement = `SELECT COUNT(*) as count FROM ${parsedTableName} ${whereClause ? `${whereClause.sql}` : ""}`;
2671
- const result = await this.client.execute({
2672
- sql: statement,
2673
- args: whereClause?.args ?? []
2674
- });
2675
- if (!result.rows || result.rows.length === 0) {
2676
- return 0;
2677
- }
2678
- return result.rows[0]?.count ?? 0;
2773
+ };
2774
+ var ObservabilityLibSQL = class extends storage.ObservabilityStorage {
2775
+ #db;
2776
+ constructor(config) {
2777
+ super();
2778
+ const client = resolveClient(config);
2779
+ this.#db = new LibSQLDB({ client, maxRetries: config.maxRetries, initialBackoffMs: config.initialBackoffMs });
2679
2780
  }
2680
- update(args) {
2681
- const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
2682
- logger: this.logger,
2683
- maxRetries: this.maxRetries,
2684
- initialBackoffMs: this.initialBackoffMs
2685
- });
2686
- return executeWriteOperationWithRetry(() => this.executeUpdate(args), `update table ${args.tableName}`);
2781
+ async init() {
2782
+ await this.#db.createTable({ tableName: storage.TABLE_SPANS, schema: storage.SPAN_SCHEMA });
2687
2783
  }
2688
- async executeUpdate({
2689
- tableName,
2690
- keys,
2691
- data
2692
- }) {
2693
- await this.client.execute(prepareUpdateStatement({ tableName, updates: data, keys }));
2784
+ async dangerouslyClearAll() {
2785
+ await this.#db.deleteData({ tableName: storage.TABLE_SPANS });
2694
2786
  }
2695
- async doBatchInsert({
2696
- tableName,
2697
- records
2698
- }) {
2699
- if (records.length === 0) return;
2700
- const batchStatements = records.map((r) => prepareStatement({ tableName, record: r }));
2701
- await this.client.batch(batchStatements, "write");
2787
+ get tracingStrategy() {
2788
+ return {
2789
+ preferred: "batch-with-updates",
2790
+ supported: ["batch-with-updates", "insert-only"]
2791
+ };
2702
2792
  }
2703
- batchInsert(args) {
2704
- const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
2705
- logger: this.logger,
2706
- maxRetries: this.maxRetries,
2707
- initialBackoffMs: this.initialBackoffMs
2708
- });
2709
- return executeWriteOperationWithRetry(
2710
- () => this.doBatchInsert(args),
2711
- `batch insert into table ${args.tableName}`
2712
- ).catch((error$1) => {
2793
+ async createSpan(args) {
2794
+ const { span } = args;
2795
+ try {
2796
+ const startedAt = span.startedAt instanceof Date ? span.startedAt.toISOString() : span.startedAt;
2797
+ const endedAt = span.endedAt instanceof Date ? span.endedAt.toISOString() : span.endedAt;
2798
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2799
+ const record = {
2800
+ ...span,
2801
+ startedAt,
2802
+ endedAt,
2803
+ createdAt: now,
2804
+ updatedAt: now
2805
+ };
2806
+ return this.#db.insert({ tableName: storage.TABLE_SPANS, record });
2807
+ } catch (error$1) {
2713
2808
  throw new error.MastraError(
2714
2809
  {
2715
- id: storage.createStorageErrorId("LIBSQL", "BATCH_INSERT", "FAILED"),
2810
+ id: storage.createStorageErrorId("LIBSQL", "CREATE_SPAN", "FAILED"),
2716
2811
  domain: error.ErrorDomain.STORAGE,
2717
- category: error.ErrorCategory.THIRD_PARTY,
2812
+ category: error.ErrorCategory.USER,
2718
2813
  details: {
2719
- tableName: args.tableName
2814
+ spanId: span.spanId,
2815
+ traceId: span.traceId,
2816
+ spanType: span.spanType,
2817
+ name: span.name
2720
2818
  }
2721
2819
  },
2722
2820
  error$1
2723
2821
  );
2724
- });
2822
+ }
2725
2823
  }
2726
- /**
2727
- * Public batch update method with retry logic
2728
- */
2729
- batchUpdate(args) {
2730
- const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
2731
- logger: this.logger,
2732
- maxRetries: this.maxRetries,
2733
- initialBackoffMs: this.initialBackoffMs
2734
- });
2735
- return executeWriteOperationWithRetry(
2736
- () => this.executeBatchUpdate(args),
2737
- `batch update in table ${args.tableName}`
2738
- ).catch((error$1) => {
2824
+ async getSpan(args) {
2825
+ const { traceId, spanId } = args;
2826
+ try {
2827
+ const rows = await this.#db.selectMany({
2828
+ tableName: storage.TABLE_SPANS,
2829
+ whereClause: { sql: " WHERE traceId = ? AND spanId = ?", args: [traceId, spanId] },
2830
+ limit: 1
2831
+ });
2832
+ if (!rows || rows.length === 0) {
2833
+ return null;
2834
+ }
2835
+ return {
2836
+ span: transformFromSqlRow({ tableName: storage.TABLE_SPANS, sqlRow: rows[0] })
2837
+ };
2838
+ } catch (error$1) {
2739
2839
  throw new error.MastraError(
2740
2840
  {
2741
- id: storage.createStorageErrorId("LIBSQL", "BATCH_UPDATE", "FAILED"),
2841
+ id: storage.createStorageErrorId("LIBSQL", "GET_SPAN", "FAILED"),
2742
2842
  domain: error.ErrorDomain.STORAGE,
2743
- category: error.ErrorCategory.THIRD_PARTY,
2744
- details: {
2745
- tableName: args.tableName
2746
- }
2843
+ category: error.ErrorCategory.USER,
2844
+ details: { traceId, spanId }
2747
2845
  },
2748
2846
  error$1
2749
2847
  );
2750
- });
2848
+ }
2751
2849
  }
2752
- /**
2753
- * Updates multiple records in batch. Each record can be updated based on single or composite keys.
2754
- */
2755
- async executeBatchUpdate({
2756
- tableName,
2757
- updates
2758
- }) {
2759
- if (updates.length === 0) return;
2760
- const batchStatements = updates.map(
2761
- ({ keys, data }) => prepareUpdateStatement({
2762
- tableName,
2763
- updates: data,
2764
- keys
2765
- })
2766
- );
2767
- await this.client.batch(batchStatements, "write");
2850
+ async getRootSpan(args) {
2851
+ const { traceId } = args;
2852
+ try {
2853
+ const rows = await this.#db.selectMany({
2854
+ tableName: storage.TABLE_SPANS,
2855
+ whereClause: { sql: " WHERE traceId = ? AND parentSpanId IS NULL", args: [traceId] },
2856
+ limit: 1
2857
+ });
2858
+ if (!rows || rows.length === 0) {
2859
+ return null;
2860
+ }
2861
+ return {
2862
+ span: transformFromSqlRow({ tableName: storage.TABLE_SPANS, sqlRow: rows[0] })
2863
+ };
2864
+ } catch (error$1) {
2865
+ throw new error.MastraError(
2866
+ {
2867
+ id: storage.createStorageErrorId("LIBSQL", "GET_ROOT_SPAN", "FAILED"),
2868
+ domain: error.ErrorDomain.STORAGE,
2869
+ category: error.ErrorCategory.USER,
2870
+ details: { traceId }
2871
+ },
2872
+ error$1
2873
+ );
2874
+ }
2768
2875
  }
2769
- /**
2770
- * Public batch delete method with retry logic
2771
- */
2772
- batchDelete({ tableName, keys }) {
2773
- const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
2774
- logger: this.logger,
2775
- maxRetries: this.maxRetries,
2776
- initialBackoffMs: this.initialBackoffMs
2777
- });
2778
- return executeWriteOperationWithRetry(
2779
- () => this.executeBatchDelete({ tableName, keys }),
2780
- `batch delete from table ${tableName}`
2781
- ).catch((error$1) => {
2876
+ async getTrace(args) {
2877
+ const { traceId } = args;
2878
+ try {
2879
+ const spans = await this.#db.selectMany({
2880
+ tableName: storage.TABLE_SPANS,
2881
+ whereClause: { sql: " WHERE traceId = ?", args: [traceId] },
2882
+ orderBy: "startedAt ASC"
2883
+ });
2884
+ if (!spans || spans.length === 0) {
2885
+ return null;
2886
+ }
2887
+ return {
2888
+ traceId,
2889
+ spans: spans.map((span) => transformFromSqlRow({ tableName: storage.TABLE_SPANS, sqlRow: span }))
2890
+ };
2891
+ } catch (error$1) {
2782
2892
  throw new error.MastraError(
2783
2893
  {
2784
- id: storage.createStorageErrorId("LIBSQL", "BATCH_DELETE", "FAILED"),
2894
+ id: storage.createStorageErrorId("LIBSQL", "GET_TRACE", "FAILED"),
2785
2895
  domain: error.ErrorDomain.STORAGE,
2786
- category: error.ErrorCategory.THIRD_PARTY,
2896
+ category: error.ErrorCategory.USER,
2787
2897
  details: {
2788
- tableName
2898
+ traceId
2789
2899
  }
2790
2900
  },
2791
2901
  error$1
2792
2902
  );
2793
- });
2903
+ }
2794
2904
  }
2795
- /**
2796
- * Deletes multiple records in batch. Each record can be deleted based on single or composite keys.
2797
- */
2798
- async executeBatchDelete({
2799
- tableName,
2800
- keys
2801
- }) {
2802
- if (keys.length === 0) return;
2803
- const batchStatements = keys.map(
2804
- (keyObj) => prepareDeleteStatement({
2805
- tableName,
2806
- keys: keyObj
2807
- })
2808
- );
2809
- await this.client.batch(batchStatements, "write");
2905
+ async updateSpan(args) {
2906
+ const { traceId, spanId, updates } = args;
2907
+ try {
2908
+ const data = { ...updates };
2909
+ if (data.endedAt instanceof Date) {
2910
+ data.endedAt = data.endedAt.toISOString();
2911
+ }
2912
+ if (data.startedAt instanceof Date) {
2913
+ data.startedAt = data.startedAt.toISOString();
2914
+ }
2915
+ data.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
2916
+ await this.#db.update({
2917
+ tableName: storage.TABLE_SPANS,
2918
+ keys: { spanId, traceId },
2919
+ data
2920
+ });
2921
+ } catch (error$1) {
2922
+ throw new error.MastraError(
2923
+ {
2924
+ id: storage.createStorageErrorId("LIBSQL", "UPDATE_SPAN", "FAILED"),
2925
+ domain: error.ErrorDomain.STORAGE,
2926
+ category: error.ErrorCategory.USER,
2927
+ details: {
2928
+ spanId,
2929
+ traceId
2930
+ }
2931
+ },
2932
+ error$1
2933
+ );
2934
+ }
2810
2935
  }
2811
- /**
2812
- * Alters table schema to add columns if they don't exist
2813
- * @param tableName Name of the table
2814
- * @param schema Schema of the table
2815
- * @param ifNotExists Array of column names to add if they don't exist
2816
- */
2817
- async alterTable({
2818
- tableName,
2819
- schema,
2820
- ifNotExists
2821
- }) {
2822
- const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
2936
+ async listTraces(args) {
2937
+ const { filters, pagination, orderBy } = storage.listTracesArgsSchema.parse(args);
2938
+ const { page, perPage } = pagination;
2939
+ const tableName = utils.parseSqlIdentifier(storage.TABLE_SPANS, "table name");
2823
2940
  try {
2824
- const pragmaQuery = `PRAGMA table_info(${parsedTableName})`;
2825
- const result = await this.client.execute(pragmaQuery);
2826
- const existingColumnNames = new Set(result.rows.map((row) => row.name.toLowerCase()));
2827
- for (const columnName of ifNotExists) {
2828
- if (!existingColumnNames.has(columnName.toLowerCase()) && schema[columnName]) {
2829
- const columnDef = schema[columnName];
2830
- const sqlType = this.getSqlType(columnDef.type);
2831
- const nullable = columnDef.nullable === false ? "NOT NULL" : "";
2832
- const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : "";
2833
- const alterSql = `ALTER TABLE ${parsedTableName} ADD COLUMN "${columnName}" ${sqlType} ${nullable} ${defaultValue}`.trim();
2834
- await this.client.execute(alterSql);
2835
- this.logger?.debug?.(`Added column ${columnName} to table ${parsedTableName}`);
2941
+ const conditions = ["parentSpanId IS NULL"];
2942
+ const queryArgs = [];
2943
+ if (filters) {
2944
+ if (filters.startedAt?.start) {
2945
+ conditions.push(`startedAt >= ?`);
2946
+ queryArgs.push(filters.startedAt.start.toISOString());
2947
+ }
2948
+ if (filters.startedAt?.end) {
2949
+ conditions.push(`startedAt <= ?`);
2950
+ queryArgs.push(filters.startedAt.end.toISOString());
2951
+ }
2952
+ if (filters.endedAt?.start) {
2953
+ conditions.push(`endedAt >= ?`);
2954
+ queryArgs.push(filters.endedAt.start.toISOString());
2955
+ }
2956
+ if (filters.endedAt?.end) {
2957
+ conditions.push(`endedAt <= ?`);
2958
+ queryArgs.push(filters.endedAt.end.toISOString());
2959
+ }
2960
+ if (filters.spanType !== void 0) {
2961
+ conditions.push(`spanType = ?`);
2962
+ queryArgs.push(filters.spanType);
2963
+ }
2964
+ if (filters.entityType !== void 0) {
2965
+ conditions.push(`entityType = ?`);
2966
+ queryArgs.push(filters.entityType);
2967
+ }
2968
+ if (filters.entityId !== void 0) {
2969
+ conditions.push(`entityId = ?`);
2970
+ queryArgs.push(filters.entityId);
2971
+ }
2972
+ if (filters.entityName !== void 0) {
2973
+ conditions.push(`entityName = ?`);
2974
+ queryArgs.push(filters.entityName);
2975
+ }
2976
+ if (filters.userId !== void 0) {
2977
+ conditions.push(`userId = ?`);
2978
+ queryArgs.push(filters.userId);
2979
+ }
2980
+ if (filters.organizationId !== void 0) {
2981
+ conditions.push(`organizationId = ?`);
2982
+ queryArgs.push(filters.organizationId);
2983
+ }
2984
+ if (filters.resourceId !== void 0) {
2985
+ conditions.push(`resourceId = ?`);
2986
+ queryArgs.push(filters.resourceId);
2987
+ }
2988
+ if (filters.runId !== void 0) {
2989
+ conditions.push(`runId = ?`);
2990
+ queryArgs.push(filters.runId);
2991
+ }
2992
+ if (filters.sessionId !== void 0) {
2993
+ conditions.push(`sessionId = ?`);
2994
+ queryArgs.push(filters.sessionId);
2995
+ }
2996
+ if (filters.threadId !== void 0) {
2997
+ conditions.push(`threadId = ?`);
2998
+ queryArgs.push(filters.threadId);
2999
+ }
3000
+ if (filters.requestId !== void 0) {
3001
+ conditions.push(`requestId = ?`);
3002
+ queryArgs.push(filters.requestId);
3003
+ }
3004
+ if (filters.environment !== void 0) {
3005
+ conditions.push(`environment = ?`);
3006
+ queryArgs.push(filters.environment);
3007
+ }
3008
+ if (filters.source !== void 0) {
3009
+ conditions.push(`source = ?`);
3010
+ queryArgs.push(filters.source);
3011
+ }
3012
+ if (filters.serviceName !== void 0) {
3013
+ conditions.push(`serviceName = ?`);
3014
+ queryArgs.push(filters.serviceName);
3015
+ }
3016
+ if (filters.scope != null) {
3017
+ for (const [key, value] of Object.entries(filters.scope)) {
3018
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(key)) {
3019
+ throw new error.MastraError({
3020
+ id: storage.createStorageErrorId("LIBSQL", "LIST_TRACES", "INVALID_FILTER_KEY"),
3021
+ domain: error.ErrorDomain.STORAGE,
3022
+ category: error.ErrorCategory.USER,
3023
+ details: { key }
3024
+ });
3025
+ }
3026
+ conditions.push(`json_extract(scope, '$.${key}') = ?`);
3027
+ queryArgs.push(typeof value === "string" ? value : JSON.stringify(value));
3028
+ }
3029
+ }
3030
+ if (filters.metadata != null) {
3031
+ for (const [key, value] of Object.entries(filters.metadata)) {
3032
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(key)) {
3033
+ throw new error.MastraError({
3034
+ id: storage.createStorageErrorId("LIBSQL", "LIST_TRACES", "INVALID_FILTER_KEY"),
3035
+ domain: error.ErrorDomain.STORAGE,
3036
+ category: error.ErrorCategory.USER,
3037
+ details: { key }
3038
+ });
3039
+ }
3040
+ conditions.push(`json_extract(metadata, '$.${key}') = ?`);
3041
+ queryArgs.push(typeof value === "string" ? value : JSON.stringify(value));
3042
+ }
3043
+ }
3044
+ if (filters.tags != null && filters.tags.length > 0) {
3045
+ for (const tag of filters.tags) {
3046
+ conditions.push(`EXISTS (SELECT 1 FROM json_each(${tableName}.tags) WHERE value = ?)`);
3047
+ queryArgs.push(tag);
3048
+ }
3049
+ }
3050
+ if (filters.status !== void 0) {
3051
+ switch (filters.status) {
3052
+ case storage.TraceStatus.ERROR:
3053
+ conditions.push(`error IS NOT NULL`);
3054
+ break;
3055
+ case storage.TraceStatus.RUNNING:
3056
+ conditions.push(`endedAt IS NULL AND error IS NULL`);
3057
+ break;
3058
+ case storage.TraceStatus.SUCCESS:
3059
+ conditions.push(`endedAt IS NOT NULL AND error IS NULL`);
3060
+ break;
3061
+ }
3062
+ }
3063
+ if (filters.hasChildError !== void 0) {
3064
+ if (filters.hasChildError) {
3065
+ conditions.push(`EXISTS (
3066
+ SELECT 1 FROM ${tableName} c
3067
+ WHERE c.traceId = ${tableName}.traceId AND c.error IS NOT NULL
3068
+ )`);
3069
+ } else {
3070
+ conditions.push(`NOT EXISTS (
3071
+ SELECT 1 FROM ${tableName} c
3072
+ WHERE c.traceId = ${tableName}.traceId AND c.error IS NOT NULL
3073
+ )`);
3074
+ }
2836
3075
  }
2837
3076
  }
3077
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
3078
+ const sortField = orderBy.field;
3079
+ const sortDirection = orderBy.direction;
3080
+ let orderByClause;
3081
+ if (sortField === "endedAt") {
3082
+ orderByClause = sortDirection === "DESC" ? `CASE WHEN ${sortField} IS NULL THEN 0 ELSE 1 END, ${sortField} DESC` : `CASE WHEN ${sortField} IS NULL THEN 1 ELSE 0 END, ${sortField} ASC`;
3083
+ } else {
3084
+ orderByClause = `${sortField} ${sortDirection}`;
3085
+ }
3086
+ const count = await this.#db.selectTotalCount({
3087
+ tableName: storage.TABLE_SPANS,
3088
+ whereClause: { sql: whereClause, args: queryArgs }
3089
+ });
3090
+ if (count === 0) {
3091
+ return {
3092
+ pagination: {
3093
+ total: 0,
3094
+ page,
3095
+ perPage,
3096
+ hasMore: false
3097
+ },
3098
+ spans: []
3099
+ };
3100
+ }
3101
+ const spans = await this.#db.selectMany({
3102
+ tableName: storage.TABLE_SPANS,
3103
+ whereClause: { sql: whereClause, args: queryArgs },
3104
+ orderBy: orderByClause,
3105
+ offset: page * perPage,
3106
+ limit: perPage
3107
+ });
3108
+ return {
3109
+ pagination: {
3110
+ total: count,
3111
+ page,
3112
+ perPage,
3113
+ hasMore: (page + 1) * perPage < count
3114
+ },
3115
+ spans: spans.map((span) => transformFromSqlRow({ tableName: storage.TABLE_SPANS, sqlRow: span }))
3116
+ };
3117
+ } catch (error$1) {
3118
+ throw new error.MastraError(
3119
+ {
3120
+ id: storage.createStorageErrorId("LIBSQL", "LIST_TRACES", "FAILED"),
3121
+ domain: error.ErrorDomain.STORAGE,
3122
+ category: error.ErrorCategory.USER
3123
+ },
3124
+ error$1
3125
+ );
3126
+ }
3127
+ }
3128
+ async batchCreateSpans(args) {
3129
+ try {
3130
+ const now = (/* @__PURE__ */ new Date()).toISOString();
3131
+ const records = args.records.map((record) => {
3132
+ const startedAt = record.startedAt instanceof Date ? record.startedAt.toISOString() : record.startedAt;
3133
+ const endedAt = record.endedAt instanceof Date ? record.endedAt.toISOString() : record.endedAt;
3134
+ return {
3135
+ ...record,
3136
+ startedAt,
3137
+ endedAt,
3138
+ createdAt: now,
3139
+ updatedAt: now
3140
+ };
3141
+ });
3142
+ return this.#db.batchInsert({
3143
+ tableName: storage.TABLE_SPANS,
3144
+ records
3145
+ });
2838
3146
  } catch (error$1) {
2839
3147
  throw new error.MastraError(
2840
3148
  {
2841
- id: storage.createStorageErrorId("LIBSQL", "ALTER_TABLE", "FAILED"),
3149
+ id: storage.createStorageErrorId("LIBSQL", "BATCH_CREATE_SPANS", "FAILED"),
2842
3150
  domain: error.ErrorDomain.STORAGE,
2843
- category: error.ErrorCategory.THIRD_PARTY,
2844
- details: {
2845
- tableName
2846
- }
3151
+ category: error.ErrorCategory.USER
2847
3152
  },
2848
3153
  error$1
2849
3154
  );
2850
3155
  }
2851
3156
  }
2852
- async clearTable({ tableName }) {
2853
- const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
3157
+ async batchUpdateSpans(args) {
3158
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2854
3159
  try {
2855
- await this.client.execute(`DELETE FROM ${parsedTableName}`);
2856
- } catch (e) {
2857
- const mastraError = new error.MastraError(
3160
+ return this.#db.batchUpdate({
3161
+ tableName: storage.TABLE_SPANS,
3162
+ updates: args.records.map((record) => {
3163
+ const data = { ...record.updates };
3164
+ if (data.endedAt instanceof Date) {
3165
+ data.endedAt = data.endedAt.toISOString();
3166
+ }
3167
+ if (data.startedAt instanceof Date) {
3168
+ data.startedAt = data.startedAt.toISOString();
3169
+ }
3170
+ data.updatedAt = now;
3171
+ return {
3172
+ keys: { spanId: record.spanId, traceId: record.traceId },
3173
+ data
3174
+ };
3175
+ })
3176
+ });
3177
+ } catch (error$1) {
3178
+ throw new error.MastraError(
2858
3179
  {
2859
- id: storage.createStorageErrorId("LIBSQL", "CLEAR_TABLE", "FAILED"),
3180
+ id: storage.createStorageErrorId("LIBSQL", "BATCH_UPDATE_SPANS", "FAILED"),
2860
3181
  domain: error.ErrorDomain.STORAGE,
2861
- category: error.ErrorCategory.THIRD_PARTY,
2862
- details: {
2863
- tableName
2864
- }
3182
+ category: error.ErrorCategory.USER
2865
3183
  },
2866
- e
3184
+ error$1
2867
3185
  );
2868
- this.logger?.trackException?.(mastraError);
2869
- this.logger?.error?.(mastraError.toString());
2870
3186
  }
2871
3187
  }
2872
- async dropTable({ tableName }) {
2873
- const parsedTableName = utils.parseSqlIdentifier(tableName, "table name");
3188
+ async batchDeleteTraces(args) {
2874
3189
  try {
2875
- await this.client.execute(`DROP TABLE IF EXISTS ${parsedTableName}`);
2876
- } catch (e) {
3190
+ const keys = args.traceIds.map((traceId) => ({ traceId }));
3191
+ return this.#db.batchDelete({
3192
+ tableName: storage.TABLE_SPANS,
3193
+ keys
3194
+ });
3195
+ } catch (error$1) {
2877
3196
  throw new error.MastraError(
2878
3197
  {
2879
- id: storage.createStorageErrorId("LIBSQL", "DROP_TABLE", "FAILED"),
3198
+ id: storage.createStorageErrorId("LIBSQL", "BATCH_DELETE_TRACES", "FAILED"),
2880
3199
  domain: error.ErrorDomain.STORAGE,
2881
- category: error.ErrorCategory.THIRD_PARTY,
2882
- details: {
2883
- tableName
2884
- }
3200
+ category: error.ErrorCategory.USER
2885
3201
  },
2886
- e
3202
+ error$1
2887
3203
  );
2888
3204
  }
2889
3205
  }
2890
3206
  };
2891
3207
  var ScoresLibSQL = class extends storage.ScoresStorage {
2892
- operations;
2893
- client;
2894
- constructor({ client, operations }) {
3208
+ #db;
3209
+ #client;
3210
+ constructor(config) {
2895
3211
  super();
2896
- this.operations = operations;
2897
- this.client = client;
3212
+ const client = resolveClient(config);
3213
+ this.#client = client;
3214
+ this.#db = new LibSQLDB({ client, maxRetries: config.maxRetries, initialBackoffMs: config.initialBackoffMs });
3215
+ }
3216
+ async init() {
3217
+ await this.#db.createTable({ tableName: storage.TABLE_SCORERS, schema: storage.SCORERS_SCHEMA });
3218
+ await this.#db.alterTable({
3219
+ tableName: storage.TABLE_SCORERS,
3220
+ schema: storage.SCORERS_SCHEMA,
3221
+ ifNotExists: ["spanId", "requestContext"]
3222
+ });
3223
+ }
3224
+ async dangerouslyClearAll() {
3225
+ await this.#db.deleteData({ tableName: storage.TABLE_SCORERS });
2898
3226
  }
2899
3227
  async listScoresByRunId({
2900
3228
  runId,
@@ -2902,7 +3230,7 @@ var ScoresLibSQL = class extends storage.ScoresStorage {
2902
3230
  }) {
2903
3231
  try {
2904
3232
  const { page, perPage: perPageInput } = pagination;
2905
- const countResult = await this.client.execute({
3233
+ const countResult = await this.#client.execute({
2906
3234
  sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE runId = ?`,
2907
3235
  args: [runId]
2908
3236
  });
@@ -2922,7 +3250,7 @@ var ScoresLibSQL = class extends storage.ScoresStorage {
2922
3250
  const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
2923
3251
  const limitValue = perPageInput === false ? total : perPage;
2924
3252
  const end = perPageInput === false ? total : start + perPage;
2925
- const result = await this.client.execute({
3253
+ const result = await this.#client.execute({
2926
3254
  sql: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE runId = ? ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
2927
3255
  args: [runId, limitValue, start]
2928
3256
  });
@@ -2975,7 +3303,7 @@ var ScoresLibSQL = class extends storage.ScoresStorage {
2975
3303
  queryParams.push(source);
2976
3304
  }
2977
3305
  const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2978
- const countResult = await this.client.execute({
3306
+ const countResult = await this.#client.execute({
2979
3307
  sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} ${whereClause}`,
2980
3308
  args: queryParams
2981
3309
  });
@@ -2995,7 +3323,7 @@ var ScoresLibSQL = class extends storage.ScoresStorage {
2995
3323
  const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
2996
3324
  const limitValue = perPageInput === false ? total : perPage;
2997
3325
  const end = perPageInput === false ? total : start + perPage;
2998
- const result = await this.client.execute({
3326
+ const result = await this.#client.execute({
2999
3327
  sql: `SELECT * FROM ${storage.TABLE_SCORERS} ${whereClause} ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
3000
3328
  args: [...queryParams, limitValue, start]
3001
3329
  });
@@ -3030,7 +3358,7 @@ var ScoresLibSQL = class extends storage.ScoresStorage {
3030
3358
  });
3031
3359
  }
3032
3360
  async getScoreById({ id }) {
3033
- const result = await this.client.execute({
3361
+ const result = await this.#client.execute({
3034
3362
  sql: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE id = ?`,
3035
3363
  args: [id]
3036
3364
  });
@@ -3047,7 +3375,7 @@ var ScoresLibSQL = class extends storage.ScoresStorage {
3047
3375
  domain: error.ErrorDomain.STORAGE,
3048
3376
  category: error.ErrorCategory.USER,
3049
3377
  details: {
3050
- scorer: score.scorer?.id ?? "unknown",
3378
+ scorer: typeof score.scorer?.id === "string" ? score.scorer.id : String(score.scorer?.id ?? "unknown"),
3051
3379
  entityId: score.entityId ?? "unknown",
3052
3380
  entityType: score.entityType ?? "unknown",
3053
3381
  traceId: score.traceId ?? "",
@@ -3060,7 +3388,7 @@ var ScoresLibSQL = class extends storage.ScoresStorage {
3060
3388
  try {
3061
3389
  const id = crypto.randomUUID();
3062
3390
  const now = /* @__PURE__ */ new Date();
3063
- await this.operations.insert({
3391
+ await this.#db.insert({
3064
3392
  tableName: storage.TABLE_SCORERS,
3065
3393
  record: {
3066
3394
  ...parsedScore,
@@ -3088,7 +3416,7 @@ var ScoresLibSQL = class extends storage.ScoresStorage {
3088
3416
  }) {
3089
3417
  try {
3090
3418
  const { page, perPage: perPageInput } = pagination;
3091
- const countResult = await this.client.execute({
3419
+ const countResult = await this.#client.execute({
3092
3420
  sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE entityId = ? AND entityType = ?`,
3093
3421
  args: [entityId, entityType]
3094
3422
  });
@@ -3108,7 +3436,7 @@ var ScoresLibSQL = class extends storage.ScoresStorage {
3108
3436
  const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
3109
3437
  const limitValue = perPageInput === false ? total : perPage;
3110
3438
  const end = perPageInput === false ? total : start + perPage;
3111
- const result = await this.client.execute({
3439
+ const result = await this.#client.execute({
3112
3440
  sql: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE entityId = ? AND entityType = ? ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
3113
3441
  args: [entityId, entityType, limitValue, start]
3114
3442
  });
@@ -3142,14 +3470,14 @@ var ScoresLibSQL = class extends storage.ScoresStorage {
3142
3470
  const { page, perPage: perPageInput } = pagination;
3143
3471
  const perPage = storage.normalizePerPage(perPageInput, 100);
3144
3472
  const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
3145
- const countSQLResult = await this.client.execute({
3473
+ const countSQLResult = await this.#client.execute({
3146
3474
  sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_SCORERS} WHERE traceId = ? AND spanId = ?`,
3147
3475
  args: [traceId, spanId]
3148
3476
  });
3149
3477
  const total = Number(countSQLResult.rows?.[0]?.count ?? 0);
3150
3478
  const limitValue = perPageInput === false ? total : perPage;
3151
3479
  const end = perPageInput === false ? total : start + perPage;
3152
- const result = await this.client.execute({
3480
+ const result = await this.#client.execute({
3153
3481
  sql: `SELECT * FROM ${storage.TABLE_SCORERS} WHERE traceId = ? AND spanId = ? ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
3154
3482
  args: [traceId, spanId, limitValue, start]
3155
3483
  });
@@ -3194,37 +3522,49 @@ function parseWorkflowRun(row) {
3194
3522
  };
3195
3523
  }
3196
3524
  var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3197
- operations;
3198
- client;
3199
- maxRetries;
3200
- initialBackoffMs;
3201
- constructor({
3202
- operations,
3203
- client,
3204
- maxRetries = 5,
3205
- initialBackoffMs = 500
3206
- }) {
3525
+ #db;
3526
+ #client;
3527
+ executeWithRetry;
3528
+ constructor(config) {
3207
3529
  super();
3208
- this.operations = operations;
3209
- this.client = client;
3210
- this.maxRetries = maxRetries;
3211
- this.initialBackoffMs = initialBackoffMs;
3530
+ const client = resolveClient(config);
3531
+ const maxRetries = config.maxRetries ?? 5;
3532
+ const initialBackoffMs = config.initialBackoffMs ?? 500;
3533
+ this.#client = client;
3534
+ this.#db = new LibSQLDB({ client, maxRetries, initialBackoffMs });
3535
+ this.executeWithRetry = createExecuteWriteOperationWithRetry({
3536
+ logger: this.logger,
3537
+ maxRetries,
3538
+ initialBackoffMs
3539
+ });
3212
3540
  this.setupPragmaSettings().catch(
3213
3541
  (err) => this.logger.warn("LibSQL Workflows: Failed to setup PRAGMA settings.", err)
3214
3542
  );
3215
3543
  }
3544
+ async init() {
3545
+ const schema = storage.TABLE_SCHEMAS[storage.TABLE_WORKFLOW_SNAPSHOT];
3546
+ await this.#db.createTable({ tableName: storage.TABLE_WORKFLOW_SNAPSHOT, schema });
3547
+ await this.#db.alterTable({
3548
+ tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
3549
+ schema,
3550
+ ifNotExists: ["resourceId"]
3551
+ });
3552
+ }
3553
+ async dangerouslyClearAll() {
3554
+ await this.#db.deleteData({ tableName: storage.TABLE_WORKFLOW_SNAPSHOT });
3555
+ }
3216
3556
  async setupPragmaSettings() {
3217
3557
  try {
3218
- await this.client.execute("PRAGMA busy_timeout = 10000;");
3558
+ await this.#client.execute("PRAGMA busy_timeout = 10000;");
3219
3559
  this.logger.debug("LibSQL Workflows: PRAGMA busy_timeout=10000 set.");
3220
3560
  try {
3221
- await this.client.execute("PRAGMA journal_mode = WAL;");
3561
+ await this.#client.execute("PRAGMA journal_mode = WAL;");
3222
3562
  this.logger.debug("LibSQL Workflows: PRAGMA journal_mode=WAL set.");
3223
3563
  } catch {
3224
3564
  this.logger.debug("LibSQL Workflows: WAL mode not supported, using default journal mode.");
3225
3565
  }
3226
3566
  try {
3227
- await this.client.execute("PRAGMA synchronous = NORMAL;");
3567
+ await this.#client.execute("PRAGMA synchronous = NORMAL;");
3228
3568
  this.logger.debug("LibSQL Workflows: PRAGMA synchronous=NORMAL set.");
3229
3569
  } catch {
3230
3570
  this.logger.debug("LibSQL Workflows: Failed to set synchronous mode.");
@@ -3233,44 +3573,6 @@ var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3233
3573
  this.logger.warn("LibSQL Workflows: Failed to set PRAGMA settings.", err);
3234
3574
  }
3235
3575
  }
3236
- async executeWithRetry(operation) {
3237
- let attempts = 0;
3238
- let backoff = this.initialBackoffMs;
3239
- while (attempts < this.maxRetries) {
3240
- try {
3241
- return await operation();
3242
- } catch (error) {
3243
- this.logger.debug("LibSQL Workflows: Error caught in retry loop", {
3244
- errorType: error.constructor.name,
3245
- errorCode: error.code,
3246
- errorMessage: error.message,
3247
- attempts,
3248
- maxRetries: this.maxRetries
3249
- });
3250
- const isLockError = error.code === "SQLITE_BUSY" || error.code === "SQLITE_LOCKED" || error.message?.toLowerCase().includes("database is locked") || error.message?.toLowerCase().includes("database table is locked") || error.message?.toLowerCase().includes("table is locked") || error.constructor.name === "SqliteError" && error.message?.toLowerCase().includes("locked");
3251
- if (isLockError) {
3252
- attempts++;
3253
- if (attempts >= this.maxRetries) {
3254
- this.logger.error(
3255
- `LibSQL Workflows: Operation failed after ${this.maxRetries} attempts due to database lock: ${error.message}`,
3256
- { error, attempts, maxRetries: this.maxRetries }
3257
- );
3258
- throw error;
3259
- }
3260
- this.logger.warn(
3261
- `LibSQL Workflows: Attempt ${attempts} failed due to database lock. Retrying in ${backoff}ms...`,
3262
- { errorMessage: error.message, attempts, backoff, maxRetries: this.maxRetries }
3263
- );
3264
- await new Promise((resolve) => setTimeout(resolve, backoff));
3265
- backoff *= 2;
3266
- } else {
3267
- this.logger.error("LibSQL Workflows: Non-lock error occurred, not retrying", { error });
3268
- throw error;
3269
- }
3270
- }
3271
- }
3272
- throw new Error("LibSQL Workflows: Max retries reached, but no error was re-thrown from the loop.");
3273
- }
3274
3576
  async updateWorkflowResults({
3275
3577
  workflowName,
3276
3578
  runId,
@@ -3279,7 +3581,7 @@ var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3279
3581
  requestContext
3280
3582
  }) {
3281
3583
  return this.executeWithRetry(async () => {
3282
- const tx = await this.client.transaction("write");
3584
+ const tx = await this.#client.transaction("write");
3283
3585
  try {
3284
3586
  const existingSnapshotResult = await tx.execute({
3285
3587
  sql: `SELECT snapshot FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} WHERE workflow_name = ? AND run_id = ?`,
@@ -3319,7 +3621,7 @@ var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3319
3621
  }
3320
3622
  throw error;
3321
3623
  }
3322
- });
3624
+ }, "updateWorkflowResults");
3323
3625
  }
3324
3626
  async updateWorkflowState({
3325
3627
  workflowName,
@@ -3327,7 +3629,7 @@ var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3327
3629
  opts
3328
3630
  }) {
3329
3631
  return this.executeWithRetry(async () => {
3330
- const tx = await this.client.transaction("write");
3632
+ const tx = await this.#client.transaction("write");
3331
3633
  try {
3332
3634
  const existingSnapshotResult = await tx.execute({
3333
3635
  sql: `SELECT snapshot FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} WHERE workflow_name = ? AND run_id = ?`,
@@ -3356,24 +3658,27 @@ var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3356
3658
  }
3357
3659
  throw error;
3358
3660
  }
3359
- });
3661
+ }, "updateWorkflowState");
3360
3662
  }
3361
3663
  async persistWorkflowSnapshot({
3362
3664
  workflowName,
3363
3665
  runId,
3364
3666
  resourceId,
3365
- snapshot
3667
+ snapshot,
3668
+ createdAt,
3669
+ updatedAt
3366
3670
  }) {
3671
+ const now = /* @__PURE__ */ new Date();
3367
3672
  const data = {
3368
3673
  workflow_name: workflowName,
3369
3674
  run_id: runId,
3370
3675
  resourceId,
3371
3676
  snapshot,
3372
- createdAt: /* @__PURE__ */ new Date(),
3373
- updatedAt: /* @__PURE__ */ new Date()
3677
+ createdAt: createdAt ?? now,
3678
+ updatedAt: updatedAt ?? now
3374
3679
  };
3375
3680
  this.logger.debug("Persisting workflow snapshot", { workflowName, runId, data });
3376
- await this.operations.insert({
3681
+ await this.#db.insert({
3377
3682
  tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
3378
3683
  record: data
3379
3684
  });
@@ -3383,7 +3688,7 @@ var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3383
3688
  runId
3384
3689
  }) {
3385
3690
  this.logger.debug("Loading workflow snapshot", { workflowName, runId });
3386
- const d = await this.operations.load({
3691
+ const d = await this.#db.select({
3387
3692
  tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
3388
3693
  keys: { workflow_name: workflowName, run_id: runId }
3389
3694
  });
@@ -3405,7 +3710,7 @@ var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3405
3710
  }
3406
3711
  const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
3407
3712
  try {
3408
- const result = await this.client.execute({
3713
+ const result = await this.#client.execute({
3409
3714
  sql: `SELECT * FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC LIMIT 1`,
3410
3715
  args
3411
3716
  });
@@ -3425,22 +3730,24 @@ var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3425
3730
  }
3426
3731
  }
3427
3732
  async deleteWorkflowRunById({ runId, workflowName }) {
3428
- try {
3429
- await this.client.execute({
3430
- sql: `DELETE FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} WHERE workflow_name = ? AND run_id = ?`,
3431
- args: [workflowName, runId]
3432
- });
3433
- } catch (error$1) {
3434
- throw new error.MastraError(
3435
- {
3436
- id: storage.createStorageErrorId("LIBSQL", "DELETE_WORKFLOW_RUN_BY_ID", "FAILED"),
3437
- domain: error.ErrorDomain.STORAGE,
3438
- category: error.ErrorCategory.THIRD_PARTY,
3439
- details: { runId, workflowName }
3440
- },
3441
- error$1
3442
- );
3443
- }
3733
+ return this.executeWithRetry(async () => {
3734
+ try {
3735
+ await this.#client.execute({
3736
+ sql: `DELETE FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} WHERE workflow_name = ? AND run_id = ?`,
3737
+ args: [workflowName, runId]
3738
+ });
3739
+ } catch (error$1) {
3740
+ throw new error.MastraError(
3741
+ {
3742
+ id: storage.createStorageErrorId("LIBSQL", "DELETE_WORKFLOW_RUN_BY_ID", "FAILED"),
3743
+ domain: error.ErrorDomain.STORAGE,
3744
+ category: error.ErrorCategory.THIRD_PARTY,
3745
+ details: { runId, workflowName }
3746
+ },
3747
+ error$1
3748
+ );
3749
+ }
3750
+ }, "deleteWorkflowRunById");
3444
3751
  }
3445
3752
  async listWorkflowRuns({
3446
3753
  workflowName,
@@ -3471,7 +3778,7 @@ var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3471
3778
  args.push(toDate.toISOString());
3472
3779
  }
3473
3780
  if (resourceId) {
3474
- const hasResourceId = await this.operations.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
3781
+ const hasResourceId = await this.#db.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
3475
3782
  if (hasResourceId) {
3476
3783
  conditions.push("resourceId = ?");
3477
3784
  args.push(resourceId);
@@ -3483,7 +3790,7 @@ var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3483
3790
  let total = 0;
3484
3791
  const usePagination = typeof perPage === "number" && typeof page === "number";
3485
3792
  if (usePagination) {
3486
- const countResult = await this.client.execute({
3793
+ const countResult = await this.#client.execute({
3487
3794
  sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
3488
3795
  args
3489
3796
  });
@@ -3491,7 +3798,7 @@ var WorkflowsLibSQL = class extends storage.WorkflowsStorage {
3491
3798
  }
3492
3799
  const normalizedPerPage = usePagination ? storage.normalizePerPage(perPage, Number.MAX_SAFE_INTEGER) : 0;
3493
3800
  const offset = usePagination ? page * normalizedPerPage : 0;
3494
- const result = await this.client.execute({
3801
+ const result = await this.#client.execute({
3495
3802
  sql: `SELECT * FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC${usePagination ? ` LIMIT ? OFFSET ?` : ""}`,
3496
3803
  args: usePagination ? [...args, normalizedPerPage, offset] : args
3497
3804
  });
@@ -3538,18 +3845,17 @@ var LibSQLStore = class extends storage.MastraStorage {
3538
3845
  } else {
3539
3846
  this.client = config.client;
3540
3847
  }
3541
- const operations = new StoreOperationsLibSQL({
3848
+ const domainConfig = {
3542
3849
  client: this.client,
3543
3850
  maxRetries: this.maxRetries,
3544
3851
  initialBackoffMs: this.initialBackoffMs
3545
- });
3546
- const scores = new ScoresLibSQL({ client: this.client, operations });
3547
- const workflows = new WorkflowsLibSQL({ client: this.client, operations });
3548
- const memory = new MemoryLibSQL({ client: this.client, operations });
3549
- const observability = new ObservabilityLibSQL({ operations });
3550
- const agents = new AgentsLibSQL({ client: this.client, operations });
3852
+ };
3853
+ const scores = new ScoresLibSQL(domainConfig);
3854
+ const workflows = new WorkflowsLibSQL(domainConfig);
3855
+ const memory = new MemoryLibSQL(domainConfig);
3856
+ const observability = new ObservabilityLibSQL(domainConfig);
3857
+ const agents = new AgentsLibSQL(domainConfig);
3551
3858
  this.stores = {
3552
- operations,
3553
3859
  scores,
3554
3860
  workflows,
3555
3861
  memory,
@@ -3564,187 +3870,12 @@ var LibSQLStore = class extends storage.MastraStorage {
3564
3870
  hasColumn: true,
3565
3871
  createTable: true,
3566
3872
  deleteMessages: true,
3567
- observabilityInstance: true,
3873
+ observability: true,
3874
+ indexManagement: false,
3568
3875
  listScoresBySpan: true,
3569
3876
  agents: true
3570
3877
  };
3571
3878
  }
3572
- async createTable({
3573
- tableName,
3574
- schema
3575
- }) {
3576
- await this.stores.operations.createTable({ tableName, schema });
3577
- }
3578
- /**
3579
- * Alters table schema to add columns if they don't exist
3580
- * @param tableName Name of the table
3581
- * @param schema Schema of the table
3582
- * @param ifNotExists Array of column names to add if they don't exist
3583
- */
3584
- async alterTable({
3585
- tableName,
3586
- schema,
3587
- ifNotExists
3588
- }) {
3589
- await this.stores.operations.alterTable({ tableName, schema, ifNotExists });
3590
- }
3591
- async clearTable({ tableName }) {
3592
- await this.stores.operations.clearTable({ tableName });
3593
- }
3594
- async dropTable({ tableName }) {
3595
- await this.stores.operations.dropTable({ tableName });
3596
- }
3597
- insert(args) {
3598
- return this.stores.operations.insert(args);
3599
- }
3600
- batchInsert(args) {
3601
- return this.stores.operations.batchInsert(args);
3602
- }
3603
- async load({ tableName, keys }) {
3604
- return this.stores.operations.load({ tableName, keys });
3605
- }
3606
- async getThreadById({ threadId }) {
3607
- return this.stores.memory.getThreadById({ threadId });
3608
- }
3609
- async saveThread({ thread }) {
3610
- return this.stores.memory.saveThread({ thread });
3611
- }
3612
- async updateThread({
3613
- id,
3614
- title,
3615
- metadata
3616
- }) {
3617
- return this.stores.memory.updateThread({ id, title, metadata });
3618
- }
3619
- async deleteThread({ threadId }) {
3620
- return this.stores.memory.deleteThread({ threadId });
3621
- }
3622
- async listMessagesById({ messageIds }) {
3623
- return this.stores.memory.listMessagesById({ messageIds });
3624
- }
3625
- async saveMessages(args) {
3626
- const result = await this.stores.memory.saveMessages({ messages: args.messages });
3627
- return { messages: result.messages };
3628
- }
3629
- async updateMessages({
3630
- messages
3631
- }) {
3632
- return this.stores.memory.updateMessages({ messages });
3633
- }
3634
- async deleteMessages(messageIds) {
3635
- return this.stores.memory.deleteMessages(messageIds);
3636
- }
3637
- async getScoreById({ id }) {
3638
- return this.stores.scores.getScoreById({ id });
3639
- }
3640
- async saveScore(score) {
3641
- return this.stores.scores.saveScore(score);
3642
- }
3643
- async listScoresByScorerId({
3644
- scorerId,
3645
- entityId,
3646
- entityType,
3647
- source,
3648
- pagination
3649
- }) {
3650
- return this.stores.scores.listScoresByScorerId({ scorerId, entityId, entityType, source, pagination });
3651
- }
3652
- async listScoresByRunId({
3653
- runId,
3654
- pagination
3655
- }) {
3656
- return this.stores.scores.listScoresByRunId({ runId, pagination });
3657
- }
3658
- async listScoresByEntityId({
3659
- entityId,
3660
- entityType,
3661
- pagination
3662
- }) {
3663
- return this.stores.scores.listScoresByEntityId({ entityId, entityType, pagination });
3664
- }
3665
- /**
3666
- * WORKFLOWS
3667
- */
3668
- async updateWorkflowResults({
3669
- workflowName,
3670
- runId,
3671
- stepId,
3672
- result,
3673
- requestContext
3674
- }) {
3675
- return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
3676
- }
3677
- async updateWorkflowState({
3678
- workflowName,
3679
- runId,
3680
- opts
3681
- }) {
3682
- return this.stores.workflows.updateWorkflowState({ workflowName, runId, opts });
3683
- }
3684
- async persistWorkflowSnapshot({
3685
- workflowName,
3686
- runId,
3687
- resourceId,
3688
- snapshot
3689
- }) {
3690
- return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, resourceId, snapshot });
3691
- }
3692
- async loadWorkflowSnapshot({
3693
- workflowName,
3694
- runId
3695
- }) {
3696
- return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
3697
- }
3698
- async listWorkflowRuns(args = {}) {
3699
- return this.stores.workflows.listWorkflowRuns(args);
3700
- }
3701
- async getWorkflowRunById({
3702
- runId,
3703
- workflowName
3704
- }) {
3705
- return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
3706
- }
3707
- async deleteWorkflowRunById({ runId, workflowName }) {
3708
- return this.stores.workflows.deleteWorkflowRunById({ runId, workflowName });
3709
- }
3710
- async getResourceById({ resourceId }) {
3711
- return this.stores.memory.getResourceById({ resourceId });
3712
- }
3713
- async saveResource({ resource }) {
3714
- return this.stores.memory.saveResource({ resource });
3715
- }
3716
- async updateResource({
3717
- resourceId,
3718
- workingMemory,
3719
- metadata
3720
- }) {
3721
- return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
3722
- }
3723
- async createSpan(span) {
3724
- return this.stores.observability.createSpan(span);
3725
- }
3726
- async updateSpan(params) {
3727
- return this.stores.observability.updateSpan(params);
3728
- }
3729
- async getTrace(traceId) {
3730
- return this.stores.observability.getTrace(traceId);
3731
- }
3732
- async getTracesPaginated(args) {
3733
- return this.stores.observability.getTracesPaginated(args);
3734
- }
3735
- async listScoresBySpan({
3736
- traceId,
3737
- spanId,
3738
- pagination
3739
- }) {
3740
- return this.stores.scores.listScoresBySpan({ traceId, spanId, pagination });
3741
- }
3742
- async batchCreateSpans(args) {
3743
- return this.stores.observability.batchCreateSpans(args);
3744
- }
3745
- async batchUpdateSpans(args) {
3746
- return this.stores.observability.batchUpdateSpans(args);
3747
- }
3748
3879
  };
3749
3880
 
3750
3881
  // src/vector/prompt.ts