@mastra/libsql 1.0.0-beta.0 → 1.0.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,9 +1,10 @@
1
1
  import { createClient } from '@libsql/client';
2
2
  import { MastraError, ErrorCategory, ErrorDomain } from '@mastra/core/error';
3
+ import { createVectorErrorId, AgentsStorage, AGENTS_SCHEMA, TABLE_AGENTS, createStorageErrorId, normalizePerPage, calculatePagination, MemoryStorage, TABLE_SCHEMAS, TABLE_THREADS, TABLE_MESSAGES, TABLE_RESOURCES, ObservabilityStorage, SPAN_SCHEMA, TABLE_SPANS, listTracesArgsSchema, ScoresStorage, SCORERS_SCHEMA, TABLE_SCORERS, transformScoreRow, WorkflowsStorage, TABLE_WORKFLOW_SNAPSHOT, MastraStorage, TraceStatus, getSqlType, safelyParseJSON } from '@mastra/core/storage';
3
4
  import { parseSqlIdentifier, parseFieldKey } from '@mastra/core/utils';
4
5
  import { MastraVector } from '@mastra/core/vector';
5
6
  import { BaseFilterTranslator } from '@mastra/core/vector/filter';
6
- import { MastraStorage, StoreOperations, TABLE_WORKFLOW_SNAPSHOT, TABLE_SPANS, ScoresStorage, TABLE_SCORERS, normalizePerPage, calculatePagination, safelyParseJSON, WorkflowsStorage, MemoryStorage, TABLE_MESSAGES, TABLE_THREADS, TABLE_RESOURCES, ObservabilityStorage, TABLE_SCHEMAS, SPAN_SCHEMA } from '@mastra/core/storage';
7
+ import { MastraBase } from '@mastra/core/base';
7
8
  import { MessageList } from '@mastra/core/agent';
8
9
  import { saveScorePayloadSchema } from '@mastra/core/evals';
9
10
 
@@ -90,12 +91,20 @@ var createBasicOperator = (symbol) => {
90
91
  };
91
92
  };
92
93
  var createNumericOperator = (symbol) => {
93
- return (key) => {
94
+ return (key, value) => {
94
95
  const jsonPath = getJsonPath(key);
95
- return {
96
- sql: `CAST(json_extract(metadata, ${jsonPath}) AS NUMERIC) ${symbol} ?`,
97
- needsValue: true
98
- };
96
+ const isNumeric = typeof value === "number" || typeof value === "string" && !isNaN(Number(value)) && value.trim() !== "";
97
+ if (isNumeric) {
98
+ return {
99
+ sql: `CAST(json_extract(metadata, ${jsonPath}) AS NUMERIC) ${symbol} ?`,
100
+ needsValue: true
101
+ };
102
+ } else {
103
+ return {
104
+ sql: `CAST(json_extract(metadata, ${jsonPath}) AS TEXT) ${symbol} ?`,
105
+ needsValue: true
106
+ };
107
+ }
99
108
  };
100
109
  };
101
110
  var validateJsonArray = (key) => {
@@ -573,7 +582,7 @@ var LibSQLVector = class extends MastraVector {
573
582
  } catch (error) {
574
583
  throw new MastraError(
575
584
  {
576
- id: "LIBSQL_VECTOR_QUERY_INVALID_ARGS",
585
+ id: createVectorErrorId("LIBSQL", "QUERY", "INVALID_ARGS"),
577
586
  domain: ErrorDomain.STORAGE,
578
587
  category: ErrorCategory.USER
579
588
  },
@@ -615,7 +624,7 @@ var LibSQLVector = class extends MastraVector {
615
624
  } catch (error) {
616
625
  throw new MastraError(
617
626
  {
618
- id: "LIBSQL_VECTOR_QUERY_FAILED",
627
+ id: createVectorErrorId("LIBSQL", "QUERY", "FAILED"),
619
628
  domain: ErrorDomain.STORAGE,
620
629
  category: ErrorCategory.THIRD_PARTY
621
630
  },
@@ -629,7 +638,7 @@ var LibSQLVector = class extends MastraVector {
629
638
  } catch (error) {
630
639
  throw new MastraError(
631
640
  {
632
- id: "LIBSQL_VECTOR_UPSERT_FAILED",
641
+ id: createVectorErrorId("LIBSQL", "UPSERT", "FAILED"),
633
642
  domain: ErrorDomain.STORAGE,
634
643
  category: ErrorCategory.THIRD_PARTY
635
644
  },
@@ -683,7 +692,7 @@ var LibSQLVector = class extends MastraVector {
683
692
  } catch (error) {
684
693
  throw new MastraError(
685
694
  {
686
- id: "LIBSQL_VECTOR_CREATE_INDEX_FAILED",
695
+ id: createVectorErrorId("LIBSQL", "CREATE_INDEX", "FAILED"),
687
696
  domain: ErrorDomain.STORAGE,
688
697
  category: ErrorCategory.THIRD_PARTY,
689
698
  details: { indexName: args.indexName, dimension: args.dimension }
@@ -722,7 +731,7 @@ var LibSQLVector = class extends MastraVector {
722
731
  } catch (error) {
723
732
  throw new MastraError(
724
733
  {
725
- id: "LIBSQL_VECTOR_DELETE_INDEX_FAILED",
734
+ id: createVectorErrorId("LIBSQL", "DELETE_INDEX", "FAILED"),
726
735
  domain: ErrorDomain.STORAGE,
727
736
  category: ErrorCategory.THIRD_PARTY,
728
737
  details: { indexName: args.indexName }
@@ -753,7 +762,7 @@ var LibSQLVector = class extends MastraVector {
753
762
  } catch (error) {
754
763
  throw new MastraError(
755
764
  {
756
- id: "LIBSQL_VECTOR_LIST_INDEXES_FAILED",
765
+ id: createVectorErrorId("LIBSQL", "LIST_INDEXES", "FAILED"),
757
766
  domain: ErrorDomain.STORAGE,
758
767
  category: ErrorCategory.THIRD_PARTY
759
768
  },
@@ -801,7 +810,7 @@ var LibSQLVector = class extends MastraVector {
801
810
  } catch (e) {
802
811
  throw new MastraError(
803
812
  {
804
- id: "LIBSQL_VECTOR_DESCRIBE_INDEX_FAILED",
813
+ id: createVectorErrorId("LIBSQL", "DESCRIBE_INDEX", "FAILED"),
805
814
  domain: ErrorDomain.STORAGE,
806
815
  category: ErrorCategory.THIRD_PARTY,
807
816
  details: { indexName }
@@ -824,8 +833,27 @@ var LibSQLVector = class extends MastraVector {
824
833
  updateVector(args) {
825
834
  return this.executeWriteOperationWithRetry(() => this.doUpdateVector(args));
826
835
  }
827
- async doUpdateVector({ indexName, id, update }) {
836
+ async doUpdateVector(params) {
837
+ const { indexName, update } = params;
828
838
  const parsedIndexName = parseSqlIdentifier(indexName, "index name");
839
+ if ("id" in params && params.id && "filter" in params && params.filter) {
840
+ throw new MastraError({
841
+ id: createVectorErrorId("LIBSQL", "UPDATE_VECTOR", "MUTUALLY_EXCLUSIVE"),
842
+ domain: ErrorDomain.STORAGE,
843
+ category: ErrorCategory.USER,
844
+ details: { indexName },
845
+ text: "id and filter are mutually exclusive - provide only one"
846
+ });
847
+ }
848
+ if (!update.vector && !update.metadata) {
849
+ throw new MastraError({
850
+ id: createVectorErrorId("LIBSQL", "UPDATE_VECTOR", "NO_PAYLOAD"),
851
+ domain: ErrorDomain.STORAGE,
852
+ category: ErrorCategory.USER,
853
+ details: { indexName },
854
+ text: "No updates provided"
855
+ });
856
+ }
829
857
  const updates = [];
830
858
  const args = [];
831
859
  if (update.vector) {
@@ -837,32 +865,81 @@ var LibSQLVector = class extends MastraVector {
837
865
  args.push(JSON.stringify(update.metadata));
838
866
  }
839
867
  if (updates.length === 0) {
868
+ return;
869
+ }
870
+ let whereClause;
871
+ let whereValues;
872
+ if ("id" in params && params.id) {
873
+ whereClause = "vector_id = ?";
874
+ whereValues = [params.id];
875
+ } else if ("filter" in params && params.filter) {
876
+ const filter = params.filter;
877
+ if (!filter || Object.keys(filter).length === 0) {
878
+ throw new MastraError({
879
+ id: createVectorErrorId("LIBSQL", "UPDATE_VECTOR", "EMPTY_FILTER"),
880
+ domain: ErrorDomain.STORAGE,
881
+ category: ErrorCategory.USER,
882
+ details: { indexName },
883
+ text: "Cannot update with empty filter"
884
+ });
885
+ }
886
+ const translatedFilter = this.transformFilter(filter);
887
+ const { sql: filterSql, values: filterValues } = buildFilterQuery(translatedFilter);
888
+ if (!filterSql || filterSql.trim() === "") {
889
+ throw new MastraError({
890
+ id: createVectorErrorId("LIBSQL", "UPDATE_VECTOR", "INVALID_FILTER"),
891
+ domain: ErrorDomain.STORAGE,
892
+ category: ErrorCategory.USER,
893
+ details: { indexName },
894
+ text: "Filter produced empty WHERE clause"
895
+ });
896
+ }
897
+ const normalizedCondition = filterSql.replace(/^\s*WHERE\s+/i, "").trim().toLowerCase();
898
+ const matchAllPatterns = ["true", "1 = 1", "1=1"];
899
+ if (matchAllPatterns.includes(normalizedCondition)) {
900
+ throw new MastraError({
901
+ id: createVectorErrorId("LIBSQL", "UPDATE_VECTOR", "MATCH_ALL_FILTER"),
902
+ domain: ErrorDomain.STORAGE,
903
+ category: ErrorCategory.USER,
904
+ details: { indexName, filterSql: normalizedCondition },
905
+ text: "Filter matches all vectors. Provide a specific filter to update targeted vectors."
906
+ });
907
+ }
908
+ whereClause = filterSql.replace(/^WHERE\s+/i, "");
909
+ whereValues = filterValues;
910
+ } else {
840
911
  throw new MastraError({
841
- id: "LIBSQL_VECTOR_UPDATE_VECTOR_INVALID_ARGS",
912
+ id: createVectorErrorId("LIBSQL", "UPDATE_VECTOR", "NO_TARGET"),
842
913
  domain: ErrorDomain.STORAGE,
843
914
  category: ErrorCategory.USER,
844
- details: { indexName, id },
845
- text: "No updates provided"
915
+ details: { indexName },
916
+ text: "Either id or filter must be provided"
846
917
  });
847
918
  }
848
- args.push(id);
849
919
  const query = `
850
- UPDATE ${parsedIndexName}
851
- SET ${updates.join(", ")}
852
- WHERE vector_id = ?;
853
- `;
920
+ UPDATE ${parsedIndexName}
921
+ SET ${updates.join(", ")}
922
+ WHERE ${whereClause};
923
+ `;
854
924
  try {
855
925
  await this.turso.execute({
856
926
  sql: query,
857
- args
927
+ args: [...args, ...whereValues]
858
928
  });
859
929
  } catch (error) {
930
+ const errorDetails = { indexName };
931
+ if ("id" in params && params.id) {
932
+ errorDetails.id = params.id;
933
+ }
934
+ if ("filter" in params && params.filter) {
935
+ errorDetails.filter = JSON.stringify(params.filter);
936
+ }
860
937
  throw new MastraError(
861
938
  {
862
- id: "LIBSQL_VECTOR_UPDATE_VECTOR_FAILED",
939
+ id: createVectorErrorId("LIBSQL", "UPDATE_VECTOR", "FAILED"),
863
940
  domain: ErrorDomain.STORAGE,
864
941
  category: ErrorCategory.THIRD_PARTY,
865
- details: { indexName, id }
942
+ details: errorDetails
866
943
  },
867
944
  error
868
945
  );
@@ -881,10 +958,13 @@ var LibSQLVector = class extends MastraVector {
881
958
  } catch (error) {
882
959
  throw new MastraError(
883
960
  {
884
- id: "LIBSQL_VECTOR_DELETE_VECTOR_FAILED",
961
+ id: createVectorErrorId("LIBSQL", "DELETE_VECTOR", "FAILED"),
885
962
  domain: ErrorDomain.STORAGE,
886
963
  category: ErrorCategory.THIRD_PARTY,
887
- details: { indexName: args.indexName, id: args.id }
964
+ details: {
965
+ indexName: args.indexName,
966
+ ...args.id && { id: args.id }
967
+ }
888
968
  },
889
969
  error
890
970
  );
@@ -897,13 +977,107 @@ var LibSQLVector = class extends MastraVector {
897
977
  args: [id]
898
978
  });
899
979
  }
980
+ deleteVectors(args) {
981
+ return this.executeWriteOperationWithRetry(() => this.doDeleteVectors(args));
982
+ }
983
+ async doDeleteVectors({ indexName, filter, ids }) {
984
+ const parsedIndexName = parseSqlIdentifier(indexName, "index name");
985
+ if (!filter && !ids) {
986
+ throw new MastraError({
987
+ id: createVectorErrorId("LIBSQL", "DELETE_VECTORS", "NO_TARGET"),
988
+ domain: ErrorDomain.STORAGE,
989
+ category: ErrorCategory.USER,
990
+ details: { indexName },
991
+ text: "Either filter or ids must be provided"
992
+ });
993
+ }
994
+ if (filter && ids) {
995
+ throw new MastraError({
996
+ id: createVectorErrorId("LIBSQL", "DELETE_VECTORS", "MUTUALLY_EXCLUSIVE"),
997
+ domain: ErrorDomain.STORAGE,
998
+ category: ErrorCategory.USER,
999
+ details: { indexName },
1000
+ text: "Cannot provide both filter and ids - they are mutually exclusive"
1001
+ });
1002
+ }
1003
+ let query;
1004
+ let values;
1005
+ if (ids) {
1006
+ if (ids.length === 0) {
1007
+ throw new MastraError({
1008
+ id: createVectorErrorId("LIBSQL", "DELETE_VECTORS", "EMPTY_IDS"),
1009
+ domain: ErrorDomain.STORAGE,
1010
+ category: ErrorCategory.USER,
1011
+ details: { indexName },
1012
+ text: "Cannot delete with empty ids array"
1013
+ });
1014
+ }
1015
+ const placeholders = ids.map(() => "?").join(", ");
1016
+ query = `DELETE FROM ${parsedIndexName} WHERE vector_id IN (${placeholders})`;
1017
+ values = ids;
1018
+ } else {
1019
+ if (!filter || Object.keys(filter).length === 0) {
1020
+ throw new MastraError({
1021
+ id: createVectorErrorId("LIBSQL", "DELETE_VECTORS", "EMPTY_FILTER"),
1022
+ domain: ErrorDomain.STORAGE,
1023
+ category: ErrorCategory.USER,
1024
+ details: { indexName },
1025
+ text: "Cannot delete with empty filter. Use deleteIndex to delete all vectors."
1026
+ });
1027
+ }
1028
+ const translatedFilter = this.transformFilter(filter);
1029
+ const { sql: filterSql, values: filterValues } = buildFilterQuery(translatedFilter);
1030
+ if (!filterSql || filterSql.trim() === "") {
1031
+ throw new MastraError({
1032
+ id: createVectorErrorId("LIBSQL", "DELETE_VECTORS", "INVALID_FILTER"),
1033
+ domain: ErrorDomain.STORAGE,
1034
+ category: ErrorCategory.USER,
1035
+ details: { indexName },
1036
+ text: "Filter produced empty WHERE clause"
1037
+ });
1038
+ }
1039
+ const normalizedCondition = filterSql.replace(/^\s*WHERE\s+/i, "").trim().toLowerCase();
1040
+ const matchAllPatterns = ["true", "1 = 1", "1=1"];
1041
+ if (matchAllPatterns.includes(normalizedCondition)) {
1042
+ throw new MastraError({
1043
+ id: createVectorErrorId("LIBSQL", "DELETE_VECTORS", "MATCH_ALL_FILTER"),
1044
+ domain: ErrorDomain.STORAGE,
1045
+ category: ErrorCategory.USER,
1046
+ details: { indexName, filterSql: normalizedCondition },
1047
+ text: "Filter matches all vectors. Use deleteIndex to delete all vectors from an index."
1048
+ });
1049
+ }
1050
+ query = `DELETE FROM ${parsedIndexName} ${filterSql}`;
1051
+ values = filterValues;
1052
+ }
1053
+ try {
1054
+ await this.turso.execute({
1055
+ sql: query,
1056
+ args: values
1057
+ });
1058
+ } catch (error) {
1059
+ throw new MastraError(
1060
+ {
1061
+ id: createVectorErrorId("LIBSQL", "DELETE_VECTORS", "FAILED"),
1062
+ domain: ErrorDomain.STORAGE,
1063
+ category: ErrorCategory.THIRD_PARTY,
1064
+ details: {
1065
+ indexName,
1066
+ ...filter && { filter: JSON.stringify(filter) },
1067
+ ...ids && { idsCount: ids.length }
1068
+ }
1069
+ },
1070
+ error
1071
+ );
1072
+ }
1073
+ }
900
1074
  truncateIndex(args) {
901
1075
  try {
902
1076
  return this.executeWriteOperationWithRetry(() => this._doTruncateIndex(args));
903
1077
  } catch (error) {
904
1078
  throw new MastraError(
905
1079
  {
906
- id: "LIBSQL_VECTOR_TRUNCATE_INDEX_FAILED",
1080
+ id: createVectorErrorId("LIBSQL", "TRUNCATE_INDEX", "FAILED"),
907
1081
  domain: ErrorDomain.STORAGE,
908
1082
  category: ErrorCategory.THIRD_PARTY,
909
1083
  details: { indexName: args.indexName }
@@ -919,538 +1093,920 @@ var LibSQLVector = class extends MastraVector {
919
1093
  });
920
1094
  }
921
1095
  };
922
- var MemoryLibSQL = class extends MemoryStorage {
923
- client;
924
- operations;
925
- constructor({ client, operations }) {
926
- super();
927
- this.client = client;
928
- this.operations = operations;
929
- }
930
- parseRow(row) {
931
- let content = row.content;
932
- try {
933
- content = JSON.parse(row.content);
934
- } catch {
1096
+ function isLockError(error) {
1097
+ return error.code === "SQLITE_BUSY" || error.code === "SQLITE_LOCKED" || error.message?.toLowerCase().includes("database is locked") || error.message?.toLowerCase().includes("database table is locked") || error.message?.toLowerCase().includes("table is locked") || error.constructor.name === "SqliteError" && error.message?.toLowerCase().includes("locked");
1098
+ }
1099
+ function createExecuteWriteOperationWithRetry({
1100
+ logger,
1101
+ maxRetries,
1102
+ initialBackoffMs
1103
+ }) {
1104
+ return async function executeWriteOperationWithRetry(operationFn, operationDescription) {
1105
+ let attempts = 0;
1106
+ let backoff = initialBackoffMs;
1107
+ while (attempts < maxRetries) {
1108
+ try {
1109
+ return await operationFn();
1110
+ } catch (error) {
1111
+ logger.debug(`LibSQLStore: Error caught in retry loop for ${operationDescription}`, {
1112
+ errorType: error.constructor.name,
1113
+ errorCode: error.code,
1114
+ errorMessage: error.message,
1115
+ attempts,
1116
+ maxRetries
1117
+ });
1118
+ if (isLockError(error)) {
1119
+ attempts++;
1120
+ if (attempts >= maxRetries) {
1121
+ logger.error(
1122
+ `LibSQLStore: Operation failed after ${maxRetries} attempts due to database lock: ${error.message}`,
1123
+ { error, attempts, maxRetries }
1124
+ );
1125
+ throw error;
1126
+ }
1127
+ logger.warn(
1128
+ `LibSQLStore: Attempt ${attempts} failed due to database lock during ${operationDescription}. Retrying in ${backoff}ms...`,
1129
+ { errorMessage: error.message, attempts, backoff, maxRetries }
1130
+ );
1131
+ await new Promise((resolve) => setTimeout(resolve, backoff));
1132
+ backoff *= 2;
1133
+ } else {
1134
+ logger.error(`LibSQLStore: Non-lock error during ${operationDescription}, not retrying`, { error });
1135
+ throw error;
1136
+ }
1137
+ }
935
1138
  }
936
- const result = {
937
- id: row.id,
938
- content,
939
- role: row.role,
940
- createdAt: new Date(row.createdAt),
941
- threadId: row.thread_id,
942
- resourceId: row.resourceId
943
- };
944
- if (row.type && row.type !== `v2`) result.type = row.type;
945
- return result;
946
- }
947
- async _getIncludedMessages({
948
- threadId,
949
- include
950
- }) {
951
- if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
952
- if (!include) return null;
953
- const unionQueries = [];
954
- const params = [];
955
- for (const inc of include) {
956
- const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
957
- const searchId = inc.threadId || threadId;
958
- unionQueries.push(
959
- `
960
- SELECT * FROM (
961
- WITH numbered_messages AS (
962
- SELECT
963
- id, content, role, type, "createdAt", thread_id, "resourceId",
964
- ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
965
- FROM "${TABLE_MESSAGES}"
966
- WHERE thread_id = ?
967
- ),
968
- target_positions AS (
969
- SELECT row_num as target_pos
970
- FROM numbered_messages
971
- WHERE id = ?
972
- )
973
- SELECT DISTINCT m.*
974
- FROM numbered_messages m
975
- CROSS JOIN target_positions t
976
- WHERE m.row_num BETWEEN (t.target_pos - ?) AND (t.target_pos + ?)
977
- )
978
- `
979
- // Keep ASC for final sorting after fetching context
980
- );
981
- params.push(searchId, id, withPreviousMessages, withNextMessages);
1139
+ throw new Error(`LibSQLStore: Unexpected exit from retry loop for ${operationDescription}`);
1140
+ };
1141
+ }
1142
+ function prepareStatement({ tableName, record }) {
1143
+ const parsedTableName = parseSqlIdentifier(tableName, "table name");
1144
+ const columns = Object.keys(record).map((col) => parseSqlIdentifier(col, "column name"));
1145
+ const values = Object.values(record).map((v) => {
1146
+ if (typeof v === `undefined` || v === null) {
1147
+ return null;
982
1148
  }
983
- const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" ASC';
984
- const includedResult = await this.client.execute({ sql: finalQuery, args: params });
985
- const includedRows = includedResult.rows?.map((row) => this.parseRow(row));
986
- const seen = /* @__PURE__ */ new Set();
987
- const dedupedRows = includedRows.filter((row) => {
988
- if (seen.has(row.id)) return false;
989
- seen.add(row.id);
990
- return true;
991
- });
992
- return dedupedRows;
993
- }
994
- async listMessagesById({ messageIds }) {
995
- if (messageIds.length === 0) return { messages: [] };
996
- try {
997
- const sql = `
998
- SELECT
999
- id,
1000
- content,
1001
- role,
1002
- type,
1003
- "createdAt",
1004
- thread_id,
1005
- "resourceId"
1006
- FROM "${TABLE_MESSAGES}"
1007
- WHERE id IN (${messageIds.map(() => "?").join(", ")})
1008
- ORDER BY "createdAt" DESC
1009
- `;
1010
- const result = await this.client.execute({ sql, args: messageIds });
1011
- if (!result.rows) return { messages: [] };
1012
- const list = new MessageList().add(result.rows.map(this.parseRow), "memory");
1013
- return { messages: list.get.all.db() };
1014
- } catch (error) {
1015
- throw new MastraError(
1016
- {
1017
- id: "LIBSQL_STORE_LIST_MESSAGES_BY_ID_FAILED",
1018
- domain: ErrorDomain.STORAGE,
1019
- category: ErrorCategory.THIRD_PARTY,
1020
- details: { messageIds: JSON.stringify(messageIds) }
1021
- },
1022
- error
1023
- );
1149
+ if (v instanceof Date) {
1150
+ return v.toISOString();
1024
1151
  }
1152
+ return typeof v === "object" ? JSON.stringify(v) : v;
1153
+ });
1154
+ const placeholders = values.map(() => "?").join(", ");
1155
+ return {
1156
+ sql: `INSERT OR REPLACE INTO ${parsedTableName} (${columns.join(", ")}) VALUES (${placeholders})`,
1157
+ args: values
1158
+ };
1159
+ }
1160
+ function prepareUpdateStatement({
1161
+ tableName,
1162
+ updates,
1163
+ keys
1164
+ }) {
1165
+ const parsedTableName = parseSqlIdentifier(tableName, "table name");
1166
+ const schema = TABLE_SCHEMAS[tableName];
1167
+ const updateColumns = Object.keys(updates).map((col) => parseSqlIdentifier(col, "column name"));
1168
+ const updateValues = Object.values(updates).map(transformToSqlValue);
1169
+ const setClause = updateColumns.map((col) => `${col} = ?`).join(", ");
1170
+ const whereClause = prepareWhereClause(keys, schema);
1171
+ return {
1172
+ sql: `UPDATE ${parsedTableName} SET ${setClause}${whereClause.sql}`,
1173
+ args: [...updateValues, ...whereClause.args]
1174
+ };
1175
+ }
1176
+ function transformToSqlValue(value) {
1177
+ if (typeof value === "undefined" || value === null) {
1178
+ return null;
1025
1179
  }
1026
- async listMessages(args) {
1027
- const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
1028
- if (!threadId.trim()) {
1029
- throw new MastraError(
1030
- {
1031
- id: "STORAGE_LIBSQL_LIST_MESSAGES_INVALID_THREAD_ID",
1032
- domain: ErrorDomain.STORAGE,
1033
- category: ErrorCategory.THIRD_PARTY,
1034
- details: { threadId }
1035
- },
1036
- new Error("threadId must be a non-empty string")
1037
- );
1038
- }
1039
- if (page < 0) {
1040
- throw new MastraError(
1041
- {
1042
- id: "LIBSQL_STORE_LIST_MESSAGES_INVALID_PAGE",
1043
- domain: ErrorDomain.STORAGE,
1044
- category: ErrorCategory.USER,
1045
- details: { page }
1180
+ if (value instanceof Date) {
1181
+ return value.toISOString();
1182
+ }
1183
+ return typeof value === "object" ? JSON.stringify(value) : value;
1184
+ }
1185
+ function prepareDeleteStatement({ tableName, keys }) {
1186
+ const parsedTableName = parseSqlIdentifier(tableName, "table name");
1187
+ const whereClause = prepareWhereClause(keys, TABLE_SCHEMAS[tableName]);
1188
+ return {
1189
+ sql: `DELETE FROM ${parsedTableName}${whereClause.sql}`,
1190
+ args: whereClause.args
1191
+ };
1192
+ }
1193
+ function prepareWhereClause(filters, schema) {
1194
+ const conditions = [];
1195
+ const args = [];
1196
+ for (const [columnName, filterValue] of Object.entries(filters)) {
1197
+ const column = schema[columnName];
1198
+ if (!column) {
1199
+ throw new Error(`Unknown column: ${columnName}`);
1200
+ }
1201
+ const parsedColumn = parseSqlIdentifier(columnName, "column name");
1202
+ const result = buildCondition2(parsedColumn, filterValue);
1203
+ conditions.push(result.condition);
1204
+ args.push(...result.args);
1205
+ }
1206
+ return {
1207
+ sql: conditions.length > 0 ? ` WHERE ${conditions.join(" AND ")}` : "",
1208
+ args
1209
+ };
1210
+ }
1211
+ function buildCondition2(columnName, filterValue) {
1212
+ if (filterValue === null) {
1213
+ return { condition: `${columnName} IS NULL`, args: [] };
1214
+ }
1215
+ if (typeof filterValue === "object" && filterValue !== null && ("startAt" in filterValue || "endAt" in filterValue)) {
1216
+ return buildDateRangeCondition(columnName, filterValue);
1217
+ }
1218
+ return {
1219
+ condition: `${columnName} = ?`,
1220
+ args: [transformToSqlValue(filterValue)]
1221
+ };
1222
+ }
1223
+ function buildDateRangeCondition(columnName, range) {
1224
+ const conditions = [];
1225
+ const args = [];
1226
+ if (range.startAt !== void 0) {
1227
+ conditions.push(`${columnName} >= ?`);
1228
+ args.push(transformToSqlValue(range.startAt));
1229
+ }
1230
+ if (range.endAt !== void 0) {
1231
+ conditions.push(`${columnName} <= ?`);
1232
+ args.push(transformToSqlValue(range.endAt));
1233
+ }
1234
+ if (conditions.length === 0) {
1235
+ throw new Error("Date range must specify at least startAt or endAt");
1236
+ }
1237
+ return {
1238
+ condition: conditions.join(" AND "),
1239
+ args
1240
+ };
1241
+ }
1242
+ function transformFromSqlRow({
1243
+ tableName,
1244
+ sqlRow
1245
+ }) {
1246
+ const result = {};
1247
+ const jsonColumns = new Set(
1248
+ Object.keys(TABLE_SCHEMAS[tableName]).filter((key) => TABLE_SCHEMAS[tableName][key].type === "jsonb").map((key) => key)
1249
+ );
1250
+ const dateColumns = new Set(
1251
+ Object.keys(TABLE_SCHEMAS[tableName]).filter((key) => TABLE_SCHEMAS[tableName][key].type === "timestamp").map((key) => key)
1252
+ );
1253
+ for (const [key, value] of Object.entries(sqlRow)) {
1254
+ if (value === null || value === void 0) {
1255
+ result[key] = value;
1256
+ continue;
1257
+ }
1258
+ if (dateColumns.has(key) && typeof value === "string") {
1259
+ result[key] = new Date(value);
1260
+ continue;
1261
+ }
1262
+ if (jsonColumns.has(key) && typeof value === "string") {
1263
+ result[key] = safelyParseJSON(value);
1264
+ continue;
1265
+ }
1266
+ result[key] = value;
1267
+ }
1268
+ return result;
1269
+ }
1270
+
1271
+ // src/storage/db/index.ts
1272
+ function resolveClient(config) {
1273
+ if ("client" in config) {
1274
+ return config.client;
1275
+ }
1276
+ return createClient({
1277
+ url: config.url,
1278
+ ...config.authToken ? { authToken: config.authToken } : {}
1279
+ });
1280
+ }
1281
+ var LibSQLDB = class extends MastraBase {
1282
+ client;
1283
+ maxRetries;
1284
+ initialBackoffMs;
1285
+ executeWriteOperationWithRetry;
1286
+ constructor({
1287
+ client,
1288
+ maxRetries,
1289
+ initialBackoffMs
1290
+ }) {
1291
+ super({
1292
+ component: "STORAGE",
1293
+ name: "LIBSQL_DB_LAYER"
1294
+ });
1295
+ this.client = client;
1296
+ this.maxRetries = maxRetries ?? 5;
1297
+ this.initialBackoffMs = initialBackoffMs ?? 100;
1298
+ this.executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
1299
+ logger: this.logger,
1300
+ maxRetries: this.maxRetries,
1301
+ initialBackoffMs: this.initialBackoffMs
1302
+ });
1303
+ }
1304
+ /**
1305
+ * Checks if a column exists in the specified table.
1306
+ *
1307
+ * @param table - The name of the table to check
1308
+ * @param column - The name of the column to look for
1309
+ * @returns `true` if the column exists in the table, `false` otherwise
1310
+ */
1311
+ async hasColumn(table, column) {
1312
+ const sanitizedTable = parseSqlIdentifier(table, "table name");
1313
+ const result = await this.client.execute({
1314
+ sql: `PRAGMA table_info("${sanitizedTable}")`
1315
+ });
1316
+ return result.rows?.some((row) => row.name === column);
1317
+ }
1318
+ /**
1319
+ * Internal insert implementation without retry logic.
1320
+ */
1321
+ async doInsert({
1322
+ tableName,
1323
+ record
1324
+ }) {
1325
+ await this.client.execute(
1326
+ prepareStatement({
1327
+ tableName,
1328
+ record
1329
+ })
1330
+ );
1331
+ }
1332
+ /**
1333
+ * Inserts or replaces a record in the specified table with automatic retry on lock errors.
1334
+ *
1335
+ * @param args - The insert arguments
1336
+ * @param args.tableName - The name of the table to insert into
1337
+ * @param args.record - The record to insert (key-value pairs)
1338
+ */
1339
+ insert(args) {
1340
+ return this.executeWriteOperationWithRetry(() => this.doInsert(args), `insert into table ${args.tableName}`);
1341
+ }
1342
+ /**
1343
+ * Internal update implementation without retry logic.
1344
+ */
1345
+ async doUpdate({
1346
+ tableName,
1347
+ keys,
1348
+ data
1349
+ }) {
1350
+ await this.client.execute(prepareUpdateStatement({ tableName, updates: data, keys }));
1351
+ }
1352
+ /**
1353
+ * Updates a record in the specified table with automatic retry on lock errors.
1354
+ *
1355
+ * @param args - The update arguments
1356
+ * @param args.tableName - The name of the table to update
1357
+ * @param args.keys - The key(s) identifying the record to update
1358
+ * @param args.data - The fields to update (key-value pairs)
1359
+ */
1360
+ update(args) {
1361
+ return this.executeWriteOperationWithRetry(() => this.doUpdate(args), `update table ${args.tableName}`);
1362
+ }
1363
+ /**
1364
+ * Internal batch insert implementation without retry logic.
1365
+ */
1366
+ async doBatchInsert({
1367
+ tableName,
1368
+ records
1369
+ }) {
1370
+ if (records.length === 0) return;
1371
+ const batchStatements = records.map((r) => prepareStatement({ tableName, record: r }));
1372
+ await this.client.batch(batchStatements, "write");
1373
+ }
1374
+ /**
1375
+ * Inserts multiple records in a single batch transaction with automatic retry on lock errors.
1376
+ *
1377
+ * @param args - The batch insert arguments
1378
+ * @param args.tableName - The name of the table to insert into
1379
+ * @param args.records - Array of records to insert
1380
+ * @throws {MastraError} When the batch insert fails after retries
1381
+ */
1382
+ async batchInsert(args) {
1383
+ return this.executeWriteOperationWithRetry(
1384
+ () => this.doBatchInsert(args),
1385
+ `batch insert into table ${args.tableName}`
1386
+ ).catch((error) => {
1387
+ throw new MastraError(
1388
+ {
1389
+ id: createStorageErrorId("LIBSQL", "BATCH_INSERT", "FAILED"),
1390
+ domain: ErrorDomain.STORAGE,
1391
+ category: ErrorCategory.THIRD_PARTY,
1392
+ details: {
1393
+ tableName: args.tableName
1394
+ }
1046
1395
  },
1047
- new Error("page must be >= 0")
1396
+ error
1048
1397
  );
1049
- }
1050
- const perPage = normalizePerPage(perPageInput, 40);
1051
- const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
1052
- try {
1053
- const { field, direction } = this.parseOrderBy(orderBy, "ASC");
1054
- const orderByStatement = `ORDER BY "${field}" ${direction}`;
1055
- const conditions = [`thread_id = ?`];
1056
- const queryParams = [threadId];
1057
- if (resourceId) {
1058
- conditions.push(`"resourceId" = ?`);
1059
- queryParams.push(resourceId);
1060
- }
1061
- if (filter?.dateRange?.start) {
1062
- conditions.push(`"createdAt" >= ?`);
1063
- queryParams.push(
1064
- filter.dateRange.start instanceof Date ? filter.dateRange.start.toISOString() : filter.dateRange.start
1065
- );
1066
- }
1067
- if (filter?.dateRange?.end) {
1068
- conditions.push(`"createdAt" <= ?`);
1069
- queryParams.push(
1070
- filter.dateRange.end instanceof Date ? filter.dateRange.end.toISOString() : filter.dateRange.end
1398
+ });
1399
+ }
1400
+ /**
1401
+ * Internal batch update implementation without retry logic.
1402
+ * Each record can be updated based on single or composite keys.
1403
+ */
1404
+ async doBatchUpdate({
1405
+ tableName,
1406
+ updates
1407
+ }) {
1408
+ if (updates.length === 0) return;
1409
+ const batchStatements = updates.map(
1410
+ ({ keys, data }) => prepareUpdateStatement({
1411
+ tableName,
1412
+ updates: data,
1413
+ keys
1414
+ })
1415
+ );
1416
+ await this.client.batch(batchStatements, "write");
1417
+ }
1418
+ /**
1419
+ * Updates multiple records in a single batch transaction with automatic retry on lock errors.
1420
+ * Each record can be updated based on single or composite keys.
1421
+ *
1422
+ * @param args - The batch update arguments
1423
+ * @param args.tableName - The name of the table to update
1424
+ * @param args.updates - Array of update operations, each containing keys and data
1425
+ * @throws {MastraError} When the batch update fails after retries
1426
+ */
1427
+ async batchUpdate(args) {
1428
+ return this.executeWriteOperationWithRetry(
1429
+ () => this.doBatchUpdate(args),
1430
+ `batch update in table ${args.tableName}`
1431
+ ).catch((error) => {
1432
+ throw new MastraError(
1433
+ {
1434
+ id: createStorageErrorId("LIBSQL", "BATCH_UPDATE", "FAILED"),
1435
+ domain: ErrorDomain.STORAGE,
1436
+ category: ErrorCategory.THIRD_PARTY,
1437
+ details: {
1438
+ tableName: args.tableName
1439
+ }
1440
+ },
1441
+ error
1442
+ );
1443
+ });
1444
+ }
1445
+ /**
1446
+ * Internal batch delete implementation without retry logic.
1447
+ * Each record can be deleted based on single or composite keys.
1448
+ */
1449
+ async doBatchDelete({
1450
+ tableName,
1451
+ keys
1452
+ }) {
1453
+ if (keys.length === 0) return;
1454
+ const batchStatements = keys.map(
1455
+ (keyObj) => prepareDeleteStatement({
1456
+ tableName,
1457
+ keys: keyObj
1458
+ })
1459
+ );
1460
+ await this.client.batch(batchStatements, "write");
1461
+ }
1462
+ /**
1463
+ * Deletes multiple records in a single batch transaction with automatic retry on lock errors.
1464
+ * Each record can be deleted based on single or composite keys.
1465
+ *
1466
+ * @param args - The batch delete arguments
1467
+ * @param args.tableName - The name of the table to delete from
1468
+ * @param args.keys - Array of key objects identifying records to delete
1469
+ * @throws {MastraError} When the batch delete fails after retries
1470
+ */
1471
+ async batchDelete({
1472
+ tableName,
1473
+ keys
1474
+ }) {
1475
+ return this.executeWriteOperationWithRetry(
1476
+ () => this.doBatchDelete({ tableName, keys }),
1477
+ `batch delete from table ${tableName}`
1478
+ ).catch((error) => {
1479
+ throw new MastraError(
1480
+ {
1481
+ id: createStorageErrorId("LIBSQL", "BATCH_DELETE", "FAILED"),
1482
+ domain: ErrorDomain.STORAGE,
1483
+ category: ErrorCategory.THIRD_PARTY,
1484
+ details: {
1485
+ tableName
1486
+ }
1487
+ },
1488
+ error
1489
+ );
1490
+ });
1491
+ }
1492
+ /**
1493
+ * Internal single-record delete implementation without retry logic.
1494
+ */
1495
+ async doDelete({ tableName, keys }) {
1496
+ await this.client.execute(prepareDeleteStatement({ tableName, keys }));
1497
+ }
1498
+ /**
1499
+ * Deletes a single record from the specified table with automatic retry on lock errors.
1500
+ *
1501
+ * @param args - The delete arguments
1502
+ * @param args.tableName - The name of the table to delete from
1503
+ * @param args.keys - The key(s) identifying the record to delete
1504
+ * @throws {MastraError} When the delete fails after retries
1505
+ */
1506
+ async delete(args) {
1507
+ return this.executeWriteOperationWithRetry(() => this.doDelete(args), `delete from table ${args.tableName}`).catch(
1508
+ (error) => {
1509
+ throw new MastraError(
1510
+ {
1511
+ id: createStorageErrorId("LIBSQL", "DELETE", "FAILED"),
1512
+ domain: ErrorDomain.STORAGE,
1513
+ category: ErrorCategory.THIRD_PARTY,
1514
+ details: {
1515
+ tableName: args.tableName
1516
+ }
1517
+ },
1518
+ error
1071
1519
  );
1072
1520
  }
1073
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1074
- const countResult = await this.client.execute({
1075
- sql: `SELECT COUNT(*) as count FROM ${TABLE_MESSAGES} ${whereClause}`,
1076
- args: queryParams
1077
- });
1078
- const total = Number(countResult.rows?.[0]?.count ?? 0);
1079
- const limitValue = perPageInput === false ? total : perPage;
1080
- const dataResult = await this.client.execute({
1081
- sql: `SELECT id, content, role, type, "createdAt", "resourceId", "thread_id" FROM ${TABLE_MESSAGES} ${whereClause} ${orderByStatement} LIMIT ? OFFSET ?`,
1082
- args: [...queryParams, limitValue, offset]
1521
+ );
1522
+ }
1523
+ /**
1524
+ * Selects a single record from the specified table by key(s).
1525
+ * Returns the most recently created record if multiple matches exist.
1526
+ * Automatically parses JSON string values back to objects/arrays.
1527
+ *
1528
+ * @typeParam R - The expected return type of the record
1529
+ * @param args - The select arguments
1530
+ * @param args.tableName - The name of the table to select from
1531
+ * @param args.keys - The key(s) identifying the record to select
1532
+ * @returns The matching record or `null` if not found
1533
+ */
1534
+ async select({ tableName, keys }) {
1535
+ const parsedTableName = parseSqlIdentifier(tableName, "table name");
1536
+ const parsedKeys = Object.keys(keys).map((key) => parseSqlIdentifier(key, "column name"));
1537
+ const conditions = parsedKeys.map((key) => `${key} = ?`).join(" AND ");
1538
+ const values = Object.values(keys);
1539
+ const result = await this.client.execute({
1540
+ sql: `SELECT * FROM ${parsedTableName} WHERE ${conditions} ORDER BY createdAt DESC LIMIT 1`,
1541
+ args: values
1542
+ });
1543
+ if (!result.rows || result.rows.length === 0) {
1544
+ return null;
1545
+ }
1546
+ const row = result.rows[0];
1547
+ const parsed = Object.fromEntries(
1548
+ Object.entries(row || {}).map(([k, v]) => {
1549
+ try {
1550
+ return [k, typeof v === "string" ? v.startsWith("{") || v.startsWith("[") ? JSON.parse(v) : v : v];
1551
+ } catch {
1552
+ return [k, v];
1553
+ }
1554
+ })
1555
+ );
1556
+ return parsed;
1557
+ }
1558
+ /**
1559
+ * Selects multiple records from the specified table with optional filtering, ordering, and pagination.
1560
+ *
1561
+ * @typeParam R - The expected return type of each record
1562
+ * @param args - The select arguments
1563
+ * @param args.tableName - The name of the table to select from
1564
+ * @param args.whereClause - Optional WHERE clause with SQL string and arguments
1565
+ * @param args.orderBy - Optional ORDER BY clause (e.g., "createdAt DESC")
1566
+ * @param args.offset - Optional offset for pagination
1567
+ * @param args.limit - Optional limit for pagination
1568
+ * @param args.args - Optional additional query arguments
1569
+ * @returns Array of matching records
1570
+ */
1571
+ async selectMany({
1572
+ tableName,
1573
+ whereClause,
1574
+ orderBy,
1575
+ offset,
1576
+ limit,
1577
+ args
1578
+ }) {
1579
+ const parsedTableName = parseSqlIdentifier(tableName, "table name");
1580
+ let statement = `SELECT * FROM ${parsedTableName}`;
1581
+ if (whereClause?.sql) {
1582
+ statement += ` ${whereClause.sql}`;
1583
+ }
1584
+ if (orderBy) {
1585
+ statement += ` ORDER BY ${orderBy}`;
1586
+ }
1587
+ if (limit) {
1588
+ statement += ` LIMIT ${limit}`;
1589
+ }
1590
+ if (offset) {
1591
+ statement += ` OFFSET ${offset}`;
1592
+ }
1593
+ const result = await this.client.execute({
1594
+ sql: statement,
1595
+ args: [...whereClause?.args ?? [], ...args ?? []]
1596
+ });
1597
+ return result.rows;
1598
+ }
1599
+ /**
1600
+ * Returns the total count of records matching the optional WHERE clause.
1601
+ *
1602
+ * @param args - The count arguments
1603
+ * @param args.tableName - The name of the table to count from
1604
+ * @param args.whereClause - Optional WHERE clause with SQL string and arguments
1605
+ * @returns The total count of matching records
1606
+ */
1607
+ async selectTotalCount({
1608
+ tableName,
1609
+ whereClause
1610
+ }) {
1611
+ const parsedTableName = parseSqlIdentifier(tableName, "table name");
1612
+ const statement = `SELECT COUNT(*) as count FROM ${parsedTableName} ${whereClause ? `${whereClause.sql}` : ""}`;
1613
+ const result = await this.client.execute({
1614
+ sql: statement,
1615
+ args: whereClause?.args ?? []
1616
+ });
1617
+ if (!result.rows || result.rows.length === 0) {
1618
+ return 0;
1619
+ }
1620
+ return result.rows[0]?.count ?? 0;
1621
+ }
1622
+ /**
1623
+ * Maps a storage column type to its SQLite equivalent.
1624
+ */
1625
+ getSqlType(type) {
1626
+ switch (type) {
1627
+ case "bigint":
1628
+ return "INTEGER";
1629
+ // SQLite uses INTEGER for all integer sizes
1630
+ case "timestamp":
1631
+ return "TEXT";
1632
+ // Store timestamps as ISO strings in SQLite
1633
+ case "float":
1634
+ return "REAL";
1635
+ // SQLite's floating point type
1636
+ case "boolean":
1637
+ return "INTEGER";
1638
+ // SQLite uses 0/1 for booleans
1639
+ case "jsonb":
1640
+ return "TEXT";
1641
+ // Store JSON as TEXT in SQLite
1642
+ default:
1643
+ return getSqlType(type);
1644
+ }
1645
+ }
1646
+ /**
1647
+ * Creates a table if it doesn't exist based on the provided schema.
1648
+ *
1649
+ * @param args - The create table arguments
1650
+ * @param args.tableName - The name of the table to create
1651
+ * @param args.schema - The schema definition for the table columns
1652
+ */
1653
+ async createTable({
1654
+ tableName,
1655
+ schema
1656
+ }) {
1657
+ try {
1658
+ const parsedTableName = parseSqlIdentifier(tableName, "table name");
1659
+ const columnDefinitions = Object.entries(schema).map(([colName, colDef]) => {
1660
+ const type = this.getSqlType(colDef.type);
1661
+ const nullable = colDef.nullable === false ? "NOT NULL" : "";
1662
+ const primaryKey = colDef.primaryKey ? "PRIMARY KEY" : "";
1663
+ return `"${colName}" ${type} ${nullable} ${primaryKey}`.trim();
1083
1664
  });
1084
- const messages = (dataResult.rows || []).map((row) => this.parseRow(row));
1085
- if (total === 0 && messages.length === 0 && (!include || include.length === 0)) {
1086
- return {
1087
- messages: [],
1088
- total: 0,
1089
- page,
1090
- perPage: perPageForResponse,
1091
- hasMore: false
1092
- };
1665
+ const tableConstraints = [];
1666
+ if (tableName === TABLE_WORKFLOW_SNAPSHOT) {
1667
+ tableConstraints.push("UNIQUE (workflow_name, run_id)");
1093
1668
  }
1094
- const messageIds = new Set(messages.map((m) => m.id));
1095
- if (include && include.length > 0) {
1096
- const includeMessages = await this._getIncludedMessages({ threadId, include });
1097
- if (includeMessages) {
1098
- for (const includeMsg of includeMessages) {
1099
- if (!messageIds.has(includeMsg.id)) {
1100
- messages.push(includeMsg);
1101
- messageIds.add(includeMsg.id);
1102
- }
1103
- }
1104
- }
1669
+ const allDefinitions = [...columnDefinitions, ...tableConstraints].join(",\n ");
1670
+ const sql = `CREATE TABLE IF NOT EXISTS ${parsedTableName} (
1671
+ ${allDefinitions}
1672
+ )`;
1673
+ await this.client.execute(sql);
1674
+ this.logger.debug(`LibSQLDB: Created table ${tableName}`);
1675
+ if (tableName === TABLE_SPANS) {
1676
+ await this.migrateSpansTable();
1105
1677
  }
1106
- const list = new MessageList().add(messages, "memory");
1107
- let finalMessages = list.get.all.db();
1108
- finalMessages = finalMessages.sort((a, b) => {
1109
- const isDateField = field === "createdAt" || field === "updatedAt";
1110
- const aValue = isDateField ? new Date(a[field]).getTime() : a[field];
1111
- const bValue = isDateField ? new Date(b[field]).getTime() : b[field];
1112
- if (typeof aValue === "number" && typeof bValue === "number") {
1113
- return direction === "ASC" ? aValue - bValue : bValue - aValue;
1114
- }
1115
- return direction === "ASC" ? String(aValue).localeCompare(String(bValue)) : String(bValue).localeCompare(String(aValue));
1116
- });
1117
- const returnedThreadMessageIds = new Set(finalMessages.filter((m) => m.threadId === threadId).map((m) => m.id));
1118
- const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
1119
- const hasMore = perPageInput !== false && !allThreadMessagesReturned && offset + perPage < total;
1120
- return {
1121
- messages: finalMessages,
1122
- total,
1123
- page,
1124
- perPage: perPageForResponse,
1125
- hasMore
1126
- };
1127
1678
  } catch (error) {
1128
- const mastraError = new MastraError(
1679
+ throw new MastraError(
1129
1680
  {
1130
- id: "LIBSQL_STORE_LIST_MESSAGES_FAILED",
1681
+ id: createStorageErrorId("LIBSQL", "CREATE_TABLE", "FAILED"),
1131
1682
  domain: ErrorDomain.STORAGE,
1132
1683
  category: ErrorCategory.THIRD_PARTY,
1133
- details: {
1134
- threadId,
1135
- resourceId: resourceId ?? ""
1136
- }
1684
+ details: { tableName }
1137
1685
  },
1138
1686
  error
1139
1687
  );
1140
- this.logger?.error?.(mastraError.toString());
1141
- this.logger?.trackException?.(mastraError);
1142
- return {
1143
- messages: [],
1144
- total: 0,
1145
- page,
1146
- perPage: perPageForResponse,
1147
- hasMore: false
1148
- };
1149
1688
  }
1150
1689
  }
1151
- async saveMessages({ messages }) {
1152
- if (messages.length === 0) return { messages };
1690
+ /**
1691
+ * Migrates the spans table schema from OLD_SPAN_SCHEMA to current SPAN_SCHEMA.
1692
+ * This adds new columns that don't exist in old schema.
1693
+ */
1694
+ async migrateSpansTable() {
1695
+ const schema = TABLE_SCHEMAS[TABLE_SPANS];
1153
1696
  try {
1154
- const threadId = messages[0]?.threadId;
1155
- if (!threadId) {
1156
- throw new Error("Thread ID is required");
1157
- }
1158
- const batchStatements = messages.map((message) => {
1159
- const time = message.createdAt || /* @__PURE__ */ new Date();
1160
- if (!message.threadId) {
1161
- throw new Error(
1162
- `Expected to find a threadId for message, but couldn't find one. An unexpected error has occurred.`
1163
- );
1164
- }
1165
- if (!message.resourceId) {
1166
- throw new Error(
1167
- `Expected to find a resourceId for message, but couldn't find one. An unexpected error has occurred.`
1168
- );
1697
+ for (const [columnName, columnDef] of Object.entries(schema)) {
1698
+ const columnExists = await this.hasColumn(TABLE_SPANS, columnName);
1699
+ if (!columnExists) {
1700
+ const sqlType = this.getSqlType(columnDef.type);
1701
+ const alterSql = `ALTER TABLE "${TABLE_SPANS}" ADD COLUMN "${columnName}" ${sqlType}`;
1702
+ await this.client.execute(alterSql);
1703
+ this.logger.debug(`LibSQLDB: Added column '${columnName}' to ${TABLE_SPANS}`);
1169
1704
  }
1170
- return {
1171
- sql: `INSERT INTO "${TABLE_MESSAGES}" (id, thread_id, content, role, type, "createdAt", "resourceId")
1172
- VALUES (?, ?, ?, ?, ?, ?, ?)
1173
- ON CONFLICT(id) DO UPDATE SET
1174
- thread_id=excluded.thread_id,
1175
- content=excluded.content,
1176
- role=excluded.role,
1177
- type=excluded.type,
1178
- "resourceId"=excluded."resourceId"
1179
- `,
1180
- args: [
1181
- message.id,
1182
- message.threadId,
1183
- typeof message.content === "object" ? JSON.stringify(message.content) : message.content,
1184
- message.role,
1185
- message.type || "v2",
1186
- time instanceof Date ? time.toISOString() : time,
1187
- message.resourceId
1188
- ]
1189
- };
1190
- });
1191
- const now = (/* @__PURE__ */ new Date()).toISOString();
1192
- batchStatements.push({
1193
- sql: `UPDATE "${TABLE_THREADS}" SET "updatedAt" = ? WHERE id = ?`,
1194
- args: [now, threadId]
1705
+ }
1706
+ this.logger.info(`LibSQLDB: Migration completed for ${TABLE_SPANS}`);
1707
+ } catch (error) {
1708
+ this.logger.warn(`LibSQLDB: Failed to migrate spans table ${TABLE_SPANS}:`, error);
1709
+ }
1710
+ }
1711
+ /**
1712
+ * Gets a default value for a column type (used when adding NOT NULL columns).
1713
+ */
1714
+ getDefaultValue(type) {
1715
+ switch (type) {
1716
+ case "text":
1717
+ case "uuid":
1718
+ return "DEFAULT ''";
1719
+ case "integer":
1720
+ case "bigint":
1721
+ case "float":
1722
+ return "DEFAULT 0";
1723
+ case "boolean":
1724
+ return "DEFAULT 0";
1725
+ case "jsonb":
1726
+ return "DEFAULT '{}'";
1727
+ case "timestamp":
1728
+ return "DEFAULT CURRENT_TIMESTAMP";
1729
+ default:
1730
+ return "DEFAULT ''";
1731
+ }
1732
+ }
1733
+ /**
1734
+ * Alters an existing table to add missing columns.
1735
+ * Used for schema migrations when new columns are added.
1736
+ *
1737
+ * @param args - The alter table arguments
1738
+ * @param args.tableName - The name of the table to alter
1739
+ * @param args.schema - The full schema definition for the table
1740
+ * @param args.ifNotExists - Array of column names to add if they don't exist
1741
+ */
1742
+ async alterTable({
1743
+ tableName,
1744
+ schema,
1745
+ ifNotExists
1746
+ }) {
1747
+ const parsedTableName = parseSqlIdentifier(tableName, "table name");
1748
+ try {
1749
+ const tableInfo = await this.client.execute({
1750
+ sql: `PRAGMA table_info("${parsedTableName}")`
1195
1751
  });
1196
- const BATCH_SIZE = 50;
1197
- const messageStatements = batchStatements.slice(0, -1);
1198
- const threadUpdateStatement = batchStatements[batchStatements.length - 1];
1199
- for (let i = 0; i < messageStatements.length; i += BATCH_SIZE) {
1200
- const batch = messageStatements.slice(i, i + BATCH_SIZE);
1201
- if (batch.length > 0) {
1202
- await this.client.batch(batch, "write");
1752
+ const existingColumns = new Set((tableInfo.rows || []).map((row) => row.name?.toLowerCase()));
1753
+ for (const columnName of ifNotExists) {
1754
+ if (!existingColumns.has(columnName.toLowerCase()) && schema[columnName]) {
1755
+ const columnDef = schema[columnName];
1756
+ const sqlType = this.getSqlType(columnDef.type);
1757
+ const defaultValue = this.getDefaultValue(columnDef.type);
1758
+ const alterSql = `ALTER TABLE ${parsedTableName} ADD COLUMN "${columnName}" ${sqlType} ${defaultValue}`;
1759
+ await this.client.execute(alterSql);
1760
+ this.logger.debug(`LibSQLDB: Added column ${columnName} to table ${tableName}`);
1203
1761
  }
1204
1762
  }
1205
- if (threadUpdateStatement) {
1206
- await this.client.execute(threadUpdateStatement);
1207
- }
1208
- const list = new MessageList().add(messages, "memory");
1209
- return { messages: list.get.all.db() };
1210
1763
  } catch (error) {
1211
1764
  throw new MastraError(
1212
1765
  {
1213
- id: "LIBSQL_STORE_SAVE_MESSAGES_FAILED",
1766
+ id: createStorageErrorId("LIBSQL", "ALTER_TABLE", "FAILED"),
1214
1767
  domain: ErrorDomain.STORAGE,
1215
- category: ErrorCategory.THIRD_PARTY
1768
+ category: ErrorCategory.THIRD_PARTY,
1769
+ details: { tableName }
1216
1770
  },
1217
1771
  error
1218
1772
  );
1219
1773
  }
1220
1774
  }
1221
- async updateMessages({
1222
- messages
1223
- }) {
1224
- if (messages.length === 0) {
1225
- return [];
1226
- }
1227
- const messageIds = messages.map((m) => m.id);
1228
- const placeholders = messageIds.map(() => "?").join(",");
1229
- const selectSql = `SELECT * FROM ${TABLE_MESSAGES} WHERE id IN (${placeholders})`;
1230
- const existingResult = await this.client.execute({ sql: selectSql, args: messageIds });
1231
- const existingMessages = existingResult.rows.map((row) => this.parseRow(row));
1232
- if (existingMessages.length === 0) {
1233
- return [];
1234
- }
1235
- const batchStatements = [];
1236
- const threadIdsToUpdate = /* @__PURE__ */ new Set();
1237
- const columnMapping = {
1238
- threadId: "thread_id"
1239
- };
1240
- for (const existingMessage of existingMessages) {
1241
- const updatePayload = messages.find((m) => m.id === existingMessage.id);
1242
- if (!updatePayload) continue;
1243
- const { id, ...fieldsToUpdate } = updatePayload;
1244
- if (Object.keys(fieldsToUpdate).length === 0) continue;
1245
- threadIdsToUpdate.add(existingMessage.threadId);
1246
- if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
1247
- threadIdsToUpdate.add(updatePayload.threadId);
1248
- }
1249
- const setClauses = [];
1250
- const args = [];
1251
- const updatableFields = { ...fieldsToUpdate };
1252
- if (updatableFields.content) {
1253
- const newContent = {
1254
- ...existingMessage.content,
1255
- ...updatableFields.content,
1256
- // Deep merge metadata if it exists on both
1257
- ...existingMessage.content?.metadata && updatableFields.content.metadata ? {
1258
- metadata: {
1259
- ...existingMessage.content.metadata,
1260
- ...updatableFields.content.metadata
1261
- }
1262
- } : {}
1263
- };
1264
- setClauses.push(`${parseSqlIdentifier("content", "column name")} = ?`);
1265
- args.push(JSON.stringify(newContent));
1266
- delete updatableFields.content;
1267
- }
1268
- for (const key in updatableFields) {
1269
- if (Object.prototype.hasOwnProperty.call(updatableFields, key)) {
1270
- const dbKey = columnMapping[key] || key;
1271
- setClauses.push(`${parseSqlIdentifier(dbKey, "column name")} = ?`);
1272
- let value = updatableFields[key];
1273
- if (typeof value === "object" && value !== null) {
1274
- value = JSON.stringify(value);
1775
+ /**
1776
+ * Deletes all records from the specified table.
1777
+ * Errors are logged but not thrown.
1778
+ *
1779
+ * @param args - The delete arguments
1780
+ * @param args.tableName - The name of the table to clear
1781
+ */
1782
+ async deleteData({ tableName }) {
1783
+ const parsedTableName = parseSqlIdentifier(tableName, "table name");
1784
+ try {
1785
+ await this.client.execute(`DELETE FROM ${parsedTableName}`);
1786
+ } catch (e) {
1787
+ const mastraError = new MastraError(
1788
+ {
1789
+ id: createStorageErrorId("LIBSQL", "CLEAR_TABLE", "FAILED"),
1790
+ domain: ErrorDomain.STORAGE,
1791
+ category: ErrorCategory.THIRD_PARTY,
1792
+ details: {
1793
+ tableName
1275
1794
  }
1276
- args.push(value);
1277
- }
1278
- }
1279
- if (setClauses.length === 0) continue;
1280
- args.push(id);
1281
- const sql = `UPDATE ${TABLE_MESSAGES} SET ${setClauses.join(", ")} WHERE id = ?`;
1282
- batchStatements.push({ sql, args });
1283
- }
1284
- if (batchStatements.length === 0) {
1285
- return existingMessages;
1795
+ },
1796
+ e
1797
+ );
1798
+ this.logger?.trackException?.(mastraError);
1799
+ this.logger?.error?.(mastraError.toString());
1286
1800
  }
1287
- const now = (/* @__PURE__ */ new Date()).toISOString();
1288
- for (const threadId of threadIdsToUpdate) {
1289
- if (threadId) {
1290
- batchStatements.push({
1291
- sql: `UPDATE ${TABLE_THREADS} SET updatedAt = ? WHERE id = ?`,
1292
- args: [now, threadId]
1293
- });
1801
+ }
1802
+ };
1803
+
1804
+ // src/storage/domains/agents/index.ts
1805
+ var AgentsLibSQL = class extends AgentsStorage {
1806
+ #db;
1807
+ constructor(config) {
1808
+ super();
1809
+ const client = resolveClient(config);
1810
+ this.#db = new LibSQLDB({ client, maxRetries: config.maxRetries, initialBackoffMs: config.initialBackoffMs });
1811
+ }
1812
+ async init() {
1813
+ await this.#db.createTable({ tableName: TABLE_AGENTS, schema: AGENTS_SCHEMA });
1814
+ }
1815
+ async dangerouslyClearAll() {
1816
+ await this.#db.deleteData({ tableName: TABLE_AGENTS });
1817
+ }
1818
+ parseJson(value, fieldName) {
1819
+ if (!value) return void 0;
1820
+ if (typeof value !== "string") return value;
1821
+ try {
1822
+ return JSON.parse(value);
1823
+ } catch (error) {
1824
+ const details = {
1825
+ value: value.length > 100 ? value.substring(0, 100) + "..." : value
1826
+ };
1827
+ if (fieldName) {
1828
+ details.field = fieldName;
1294
1829
  }
1830
+ throw new MastraError(
1831
+ {
1832
+ id: createStorageErrorId("LIBSQL", "PARSE_JSON", "INVALID_JSON"),
1833
+ domain: ErrorDomain.STORAGE,
1834
+ category: ErrorCategory.SYSTEM,
1835
+ text: `Failed to parse JSON${fieldName ? ` for field "${fieldName}"` : ""}: ${error instanceof Error ? error.message : "Unknown error"}`,
1836
+ details
1837
+ },
1838
+ error
1839
+ );
1295
1840
  }
1296
- await this.client.batch(batchStatements, "write");
1297
- const updatedResult = await this.client.execute({ sql: selectSql, args: messageIds });
1298
- return updatedResult.rows.map((row) => this.parseRow(row));
1299
1841
  }
1300
- async deleteMessages(messageIds) {
1301
- if (!messageIds || messageIds.length === 0) {
1302
- return;
1303
- }
1842
+ parseRow(row) {
1843
+ return {
1844
+ id: row.id,
1845
+ name: row.name,
1846
+ description: row.description,
1847
+ instructions: row.instructions,
1848
+ model: this.parseJson(row.model, "model"),
1849
+ tools: this.parseJson(row.tools, "tools"),
1850
+ defaultOptions: this.parseJson(row.defaultOptions, "defaultOptions"),
1851
+ workflows: this.parseJson(row.workflows, "workflows"),
1852
+ agents: this.parseJson(row.agents, "agents"),
1853
+ inputProcessors: this.parseJson(row.inputProcessors, "inputProcessors"),
1854
+ outputProcessors: this.parseJson(row.outputProcessors, "outputProcessors"),
1855
+ memory: this.parseJson(row.memory, "memory"),
1856
+ scorers: this.parseJson(row.scorers, "scorers"),
1857
+ metadata: this.parseJson(row.metadata, "metadata"),
1858
+ createdAt: new Date(row.createdAt),
1859
+ updatedAt: new Date(row.updatedAt)
1860
+ };
1861
+ }
1862
+ async getAgentById({ id }) {
1304
1863
  try {
1305
- const BATCH_SIZE = 100;
1306
- const threadIds = /* @__PURE__ */ new Set();
1307
- const tx = await this.client.transaction("write");
1308
- try {
1309
- for (let i = 0; i < messageIds.length; i += BATCH_SIZE) {
1310
- const batch = messageIds.slice(i, i + BATCH_SIZE);
1311
- const placeholders = batch.map(() => "?").join(",");
1312
- const result = await tx.execute({
1313
- sql: `SELECT DISTINCT thread_id FROM "${TABLE_MESSAGES}" WHERE id IN (${placeholders})`,
1314
- args: batch
1315
- });
1316
- result.rows?.forEach((row) => {
1317
- if (row.thread_id) threadIds.add(row.thread_id);
1318
- });
1319
- await tx.execute({
1320
- sql: `DELETE FROM "${TABLE_MESSAGES}" WHERE id IN (${placeholders})`,
1321
- args: batch
1322
- });
1323
- }
1324
- if (threadIds.size > 0) {
1325
- const now = (/* @__PURE__ */ new Date()).toISOString();
1326
- for (const threadId of threadIds) {
1327
- await tx.execute({
1328
- sql: `UPDATE "${TABLE_THREADS}" SET "updatedAt" = ? WHERE id = ?`,
1329
- args: [now, threadId]
1330
- });
1331
- }
1332
- }
1333
- await tx.commit();
1334
- } catch (error) {
1335
- await tx.rollback();
1336
- throw error;
1337
- }
1864
+ const result = await this.#db.select({
1865
+ tableName: TABLE_AGENTS,
1866
+ keys: { id }
1867
+ });
1868
+ return result ? this.parseRow(result) : null;
1338
1869
  } catch (error) {
1339
1870
  throw new MastraError(
1340
1871
  {
1341
- id: "LIBSQL_STORE_DELETE_MESSAGES_FAILED",
1872
+ id: createStorageErrorId("LIBSQL", "GET_AGENT_BY_ID", "FAILED"),
1342
1873
  domain: ErrorDomain.STORAGE,
1343
1874
  category: ErrorCategory.THIRD_PARTY,
1344
- details: { messageIds: messageIds.join(", ") }
1875
+ details: { agentId: id }
1345
1876
  },
1346
1877
  error
1347
1878
  );
1348
1879
  }
1349
1880
  }
1350
- async getResourceById({ resourceId }) {
1351
- const result = await this.operations.load({
1352
- tableName: TABLE_RESOURCES,
1353
- keys: { id: resourceId }
1354
- });
1355
- if (!result) {
1356
- return null;
1881
+ async createAgent({ agent }) {
1882
+ try {
1883
+ const now = /* @__PURE__ */ new Date();
1884
+ await this.#db.insert({
1885
+ tableName: TABLE_AGENTS,
1886
+ record: {
1887
+ id: agent.id,
1888
+ name: agent.name,
1889
+ description: agent.description ?? null,
1890
+ instructions: agent.instructions,
1891
+ model: agent.model,
1892
+ tools: agent.tools ?? null,
1893
+ defaultOptions: agent.defaultOptions ?? null,
1894
+ workflows: agent.workflows ?? null,
1895
+ agents: agent.agents ?? null,
1896
+ inputProcessors: agent.inputProcessors ?? null,
1897
+ outputProcessors: agent.outputProcessors ?? null,
1898
+ memory: agent.memory ?? null,
1899
+ scorers: agent.scorers ?? null,
1900
+ metadata: agent.metadata ?? null,
1901
+ createdAt: now,
1902
+ updatedAt: now
1903
+ }
1904
+ });
1905
+ return {
1906
+ ...agent,
1907
+ createdAt: now,
1908
+ updatedAt: now
1909
+ };
1910
+ } catch (error) {
1911
+ throw new MastraError(
1912
+ {
1913
+ id: createStorageErrorId("LIBSQL", "CREATE_AGENT", "FAILED"),
1914
+ domain: ErrorDomain.STORAGE,
1915
+ category: ErrorCategory.THIRD_PARTY,
1916
+ details: { agentId: agent.id }
1917
+ },
1918
+ error
1919
+ );
1357
1920
  }
1358
- return {
1359
- ...result,
1360
- // Ensure workingMemory is always returned as a string, even if auto-parsed as JSON
1361
- workingMemory: result.workingMemory && typeof result.workingMemory === "object" ? JSON.stringify(result.workingMemory) : result.workingMemory,
1362
- metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata,
1363
- createdAt: new Date(result.createdAt),
1364
- updatedAt: new Date(result.updatedAt)
1365
- };
1366
1921
  }
1367
- async saveResource({ resource }) {
1368
- await this.operations.insert({
1369
- tableName: TABLE_RESOURCES,
1370
- record: {
1371
- ...resource,
1372
- metadata: JSON.stringify(resource.metadata)
1922
+ async updateAgent({ id, ...updates }) {
1923
+ try {
1924
+ const existingAgent = await this.getAgentById({ id });
1925
+ if (!existingAgent) {
1926
+ throw new MastraError({
1927
+ id: createStorageErrorId("LIBSQL", "UPDATE_AGENT", "NOT_FOUND"),
1928
+ domain: ErrorDomain.STORAGE,
1929
+ category: ErrorCategory.USER,
1930
+ text: `Agent ${id} not found`,
1931
+ details: { agentId: id }
1932
+ });
1373
1933
  }
1374
- });
1375
- return resource;
1376
- }
1377
- async updateResource({
1378
- resourceId,
1379
- workingMemory,
1380
- metadata
1381
- }) {
1382
- const existingResource = await this.getResourceById({ resourceId });
1383
- if (!existingResource) {
1384
- const newResource = {
1385
- id: resourceId,
1386
- workingMemory,
1387
- metadata: metadata || {},
1388
- createdAt: /* @__PURE__ */ new Date(),
1934
+ const data = {
1389
1935
  updatedAt: /* @__PURE__ */ new Date()
1390
1936
  };
1391
- return this.saveResource({ resource: newResource });
1392
- }
1393
- const updatedResource = {
1394
- ...existingResource,
1395
- workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
1396
- metadata: {
1397
- ...existingResource.metadata,
1398
- ...metadata
1399
- },
1400
- updatedAt: /* @__PURE__ */ new Date()
1401
- };
1402
- const updates = [];
1403
- const values = [];
1404
- if (workingMemory !== void 0) {
1405
- updates.push("workingMemory = ?");
1406
- values.push(workingMemory);
1407
- }
1408
- if (metadata) {
1409
- updates.push("metadata = ?");
1410
- values.push(JSON.stringify(updatedResource.metadata));
1411
- }
1412
- updates.push("updatedAt = ?");
1413
- values.push(updatedResource.updatedAt.toISOString());
1414
- values.push(resourceId);
1415
- await this.client.execute({
1416
- sql: `UPDATE ${TABLE_RESOURCES} SET ${updates.join(", ")} WHERE id = ?`,
1417
- args: values
1418
- });
1419
- return updatedResource;
1937
+ if (updates.name !== void 0) data.name = updates.name;
1938
+ if (updates.description !== void 0) data.description = updates.description;
1939
+ if (updates.instructions !== void 0) data.instructions = updates.instructions;
1940
+ if (updates.model !== void 0) data.model = updates.model;
1941
+ if (updates.tools !== void 0) data.tools = updates.tools;
1942
+ if (updates.defaultOptions !== void 0) data.defaultOptions = updates.defaultOptions;
1943
+ if (updates.workflows !== void 0) data.workflows = updates.workflows;
1944
+ if (updates.agents !== void 0) data.agents = updates.agents;
1945
+ if (updates.inputProcessors !== void 0) data.inputProcessors = updates.inputProcessors;
1946
+ if (updates.outputProcessors !== void 0) data.outputProcessors = updates.outputProcessors;
1947
+ if (updates.memory !== void 0) data.memory = updates.memory;
1948
+ if (updates.scorers !== void 0) data.scorers = updates.scorers;
1949
+ if (updates.metadata !== void 0) {
1950
+ data.metadata = { ...existingAgent.metadata, ...updates.metadata };
1951
+ }
1952
+ if (Object.keys(data).length > 1) {
1953
+ await this.#db.update({
1954
+ tableName: TABLE_AGENTS,
1955
+ keys: { id },
1956
+ data
1957
+ });
1958
+ }
1959
+ const updatedAgent = await this.getAgentById({ id });
1960
+ if (!updatedAgent) {
1961
+ throw new MastraError({
1962
+ id: createStorageErrorId("LIBSQL", "UPDATE_AGENT", "NOT_FOUND_AFTER_UPDATE"),
1963
+ domain: ErrorDomain.STORAGE,
1964
+ category: ErrorCategory.SYSTEM,
1965
+ text: `Agent ${id} not found after update`,
1966
+ details: { agentId: id }
1967
+ });
1968
+ }
1969
+ return updatedAgent;
1970
+ } catch (error) {
1971
+ if (error instanceof MastraError) {
1972
+ throw error;
1973
+ }
1974
+ throw new MastraError(
1975
+ {
1976
+ id: createStorageErrorId("LIBSQL", "UPDATE_AGENT", "FAILED"),
1977
+ domain: ErrorDomain.STORAGE,
1978
+ category: ErrorCategory.THIRD_PARTY,
1979
+ details: { agentId: id }
1980
+ },
1981
+ error
1982
+ );
1983
+ }
1420
1984
  }
1421
- async getThreadById({ threadId }) {
1985
+ async deleteAgent({ id }) {
1422
1986
  try {
1423
- const result = await this.operations.load({
1424
- tableName: TABLE_THREADS,
1425
- keys: { id: threadId }
1987
+ await this.#db.delete({
1988
+ tableName: TABLE_AGENTS,
1989
+ keys: { id }
1426
1990
  });
1427
- if (!result) {
1428
- return null;
1429
- }
1430
- return {
1431
- ...result,
1432
- metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata,
1433
- createdAt: new Date(result.createdAt),
1434
- updatedAt: new Date(result.updatedAt)
1435
- };
1436
1991
  } catch (error) {
1437
1992
  throw new MastraError(
1438
1993
  {
1439
- id: "LIBSQL_STORE_GET_THREAD_BY_ID_FAILED",
1994
+ id: createStorageErrorId("LIBSQL", "DELETE_AGENT", "FAILED"),
1440
1995
  domain: ErrorDomain.STORAGE,
1441
1996
  category: ErrorCategory.THIRD_PARTY,
1442
- details: { threadId }
1997
+ details: { agentId: id }
1443
1998
  },
1444
1999
  error
1445
2000
  );
1446
2001
  }
1447
2002
  }
1448
- async listThreadsByResourceId(args) {
1449
- const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
2003
+ async listAgents(args) {
2004
+ const { page = 0, perPage: perPageInput, orderBy } = args || {};
2005
+ const { field, direction } = this.parseOrderBy(orderBy);
1450
2006
  if (page < 0) {
1451
2007
  throw new MastraError(
1452
2008
  {
1453
- id: "LIBSQL_STORE_LIST_THREADS_BY_RESOURCE_ID_INVALID_PAGE",
2009
+ id: createStorageErrorId("LIBSQL", "LIST_AGENTS", "INVALID_PAGE"),
1454
2010
  domain: ErrorDomain.STORAGE,
1455
2011
  category: ErrorCategory.USER,
1456
2012
  details: { page }
@@ -1460,28 +2016,11 @@ var MemoryLibSQL = class extends MemoryStorage {
1460
2016
  }
1461
2017
  const perPage = normalizePerPage(perPageInput, 100);
1462
2018
  const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
1463
- const { field, direction } = this.parseOrderBy(orderBy);
1464
2019
  try {
1465
- const baseQuery = `FROM ${TABLE_THREADS} WHERE resourceId = ?`;
1466
- const queryParams = [resourceId];
1467
- const mapRowToStorageThreadType = (row) => ({
1468
- id: row.id,
1469
- resourceId: row.resourceId,
1470
- title: row.title,
1471
- createdAt: new Date(row.createdAt),
1472
- // Convert string to Date
1473
- updatedAt: new Date(row.updatedAt),
1474
- // Convert string to Date
1475
- metadata: typeof row.metadata === "string" ? JSON.parse(row.metadata) : row.metadata
1476
- });
1477
- const countResult = await this.client.execute({
1478
- sql: `SELECT COUNT(*) as count ${baseQuery}`,
1479
- args: queryParams
1480
- });
1481
- const total = Number(countResult.rows?.[0]?.count ?? 0);
2020
+ const total = await this.#db.selectTotalCount({ tableName: TABLE_AGENTS });
1482
2021
  if (total === 0) {
1483
2022
  return {
1484
- threads: [],
2023
+ agents: [],
1485
2024
  total: 0,
1486
2025
  page,
1487
2026
  perPage: perPageForResponse,
@@ -1489,942 +2028,1199 @@ var MemoryLibSQL = class extends MemoryStorage {
1489
2028
  };
1490
2029
  }
1491
2030
  const limitValue = perPageInput === false ? total : perPage;
1492
- const dataResult = await this.client.execute({
1493
- sql: `SELECT * ${baseQuery} ORDER BY "${field}" ${direction} LIMIT ? OFFSET ?`,
1494
- args: [...queryParams, limitValue, offset]
2031
+ const rows = await this.#db.selectMany({
2032
+ tableName: TABLE_AGENTS,
2033
+ orderBy: `"${field}" ${direction}`,
2034
+ limit: limitValue,
2035
+ offset
1495
2036
  });
1496
- const threads = (dataResult.rows || []).map(mapRowToStorageThreadType);
2037
+ const agents = rows.map((row) => this.parseRow(row));
1497
2038
  return {
1498
- threads,
2039
+ agents,
1499
2040
  total,
1500
2041
  page,
1501
2042
  perPage: perPageForResponse,
1502
2043
  hasMore: perPageInput === false ? false : offset + perPage < total
1503
2044
  };
1504
- } catch (error) {
1505
- const mastraError = new MastraError(
1506
- {
1507
- id: "LIBSQL_STORE_LIST_THREADS_BY_RESOURCE_ID_FAILED",
1508
- domain: ErrorDomain.STORAGE,
1509
- category: ErrorCategory.THIRD_PARTY,
1510
- details: { resourceId }
1511
- },
1512
- error
1513
- );
1514
- this.logger?.trackException?.(mastraError);
1515
- this.logger?.error?.(mastraError.toString());
1516
- return {
1517
- threads: [],
1518
- total: 0,
1519
- page,
1520
- perPage: perPageForResponse,
1521
- hasMore: false
1522
- };
1523
- }
1524
- }
1525
- async saveThread({ thread }) {
1526
- try {
1527
- await this.operations.insert({
1528
- tableName: TABLE_THREADS,
1529
- record: {
1530
- ...thread,
1531
- metadata: JSON.stringify(thread.metadata)
1532
- }
1533
- });
1534
- return thread;
1535
- } catch (error) {
1536
- const mastraError = new MastraError(
1537
- {
1538
- id: "LIBSQL_STORE_SAVE_THREAD_FAILED",
1539
- domain: ErrorDomain.STORAGE,
1540
- category: ErrorCategory.THIRD_PARTY,
1541
- details: { threadId: thread.id }
1542
- },
1543
- error
1544
- );
1545
- this.logger?.trackException?.(mastraError);
1546
- this.logger?.error?.(mastraError.toString());
1547
- throw mastraError;
1548
- }
1549
- }
1550
- async updateThread({
1551
- id,
1552
- title,
1553
- metadata
1554
- }) {
1555
- const thread = await this.getThreadById({ threadId: id });
1556
- if (!thread) {
1557
- throw new MastraError({
1558
- id: "LIBSQL_STORE_UPDATE_THREAD_FAILED_THREAD_NOT_FOUND",
1559
- domain: ErrorDomain.STORAGE,
1560
- category: ErrorCategory.USER,
1561
- text: `Thread ${id} not found`,
1562
- details: {
1563
- status: 404,
1564
- threadId: id
1565
- }
1566
- });
1567
- }
1568
- const updatedThread = {
1569
- ...thread,
1570
- title,
1571
- metadata: {
1572
- ...thread.metadata,
1573
- ...metadata
1574
- }
1575
- };
1576
- try {
1577
- await this.client.execute({
1578
- sql: `UPDATE ${TABLE_THREADS} SET title = ?, metadata = ? WHERE id = ?`,
1579
- args: [title, JSON.stringify(updatedThread.metadata), id]
1580
- });
1581
- return updatedThread;
1582
- } catch (error) {
1583
- throw new MastraError(
1584
- {
1585
- id: "LIBSQL_STORE_UPDATE_THREAD_FAILED",
1586
- domain: ErrorDomain.STORAGE,
1587
- category: ErrorCategory.THIRD_PARTY,
1588
- text: `Failed to update thread ${id}`,
1589
- details: { threadId: id }
1590
- },
1591
- error
1592
- );
1593
- }
1594
- }
1595
- async deleteThread({ threadId }) {
1596
- try {
1597
- await this.client.execute({
1598
- sql: `DELETE FROM ${TABLE_MESSAGES} WHERE thread_id = ?`,
1599
- args: [threadId]
1600
- });
1601
- await this.client.execute({
1602
- sql: `DELETE FROM ${TABLE_THREADS} WHERE id = ?`,
1603
- args: [threadId]
1604
- });
1605
2045
  } catch (error) {
1606
2046
  throw new MastraError(
1607
2047
  {
1608
- id: "LIBSQL_STORE_DELETE_THREAD_FAILED",
2048
+ id: createStorageErrorId("LIBSQL", "LIST_AGENTS", "FAILED"),
1609
2049
  domain: ErrorDomain.STORAGE,
1610
- category: ErrorCategory.THIRD_PARTY,
1611
- details: { threadId }
2050
+ category: ErrorCategory.THIRD_PARTY
1612
2051
  },
1613
2052
  error
1614
2053
  );
1615
2054
  }
1616
2055
  }
1617
2056
  };
1618
- function createExecuteWriteOperationWithRetry({
1619
- logger,
1620
- maxRetries,
1621
- initialBackoffMs
1622
- }) {
1623
- return async function executeWriteOperationWithRetry(operationFn, operationDescription) {
1624
- let retries = 0;
1625
- while (true) {
1626
- try {
1627
- return await operationFn();
1628
- } catch (error) {
1629
- if (error.message && (error.message.includes("SQLITE_BUSY") || error.message.includes("database is locked")) && retries < maxRetries) {
1630
- retries++;
1631
- const backoffTime = initialBackoffMs * Math.pow(2, retries - 1);
1632
- logger.warn(
1633
- `LibSQLStore: Encountered SQLITE_BUSY during ${operationDescription}. Retrying (${retries}/${maxRetries}) in ${backoffTime}ms...`
1634
- );
1635
- await new Promise((resolve) => setTimeout(resolve, backoffTime));
1636
- } else {
1637
- logger.error(`LibSQLStore: Error during ${operationDescription} after ${retries} retries: ${error}`);
1638
- throw error;
1639
- }
1640
- }
1641
- }
1642
- };
1643
- }
1644
- function prepareStatement({ tableName, record }) {
1645
- const parsedTableName = parseSqlIdentifier(tableName, "table name");
1646
- const columns = Object.keys(record).map((col) => parseSqlIdentifier(col, "column name"));
1647
- const values = Object.values(record).map((v) => {
1648
- if (typeof v === `undefined` || v === null) {
1649
- return null;
1650
- }
1651
- if (v instanceof Date) {
1652
- return v.toISOString();
1653
- }
1654
- return typeof v === "object" ? JSON.stringify(v) : v;
1655
- });
1656
- const placeholders = values.map(() => "?").join(", ");
1657
- return {
1658
- sql: `INSERT OR REPLACE INTO ${parsedTableName} (${columns.join(", ")}) VALUES (${placeholders})`,
1659
- args: values
1660
- };
1661
- }
1662
- function prepareUpdateStatement({
1663
- tableName,
1664
- updates,
1665
- keys
1666
- }) {
1667
- const parsedTableName = parseSqlIdentifier(tableName, "table name");
1668
- const schema = TABLE_SCHEMAS[tableName];
1669
- const updateColumns = Object.keys(updates).map((col) => parseSqlIdentifier(col, "column name"));
1670
- const updateValues = Object.values(updates).map(transformToSqlValue);
1671
- const setClause = updateColumns.map((col) => `${col} = ?`).join(", ");
1672
- const whereClause = prepareWhereClause(keys, schema);
1673
- return {
1674
- sql: `UPDATE ${parsedTableName} SET ${setClause}${whereClause.sql}`,
1675
- args: [...updateValues, ...whereClause.args]
1676
- };
1677
- }
1678
- function transformToSqlValue(value) {
1679
- if (typeof value === "undefined" || value === null) {
1680
- return null;
1681
- }
1682
- if (value instanceof Date) {
1683
- return value.toISOString();
1684
- }
1685
- return typeof value === "object" ? JSON.stringify(value) : value;
1686
- }
1687
- function prepareDeleteStatement({ tableName, keys }) {
1688
- const parsedTableName = parseSqlIdentifier(tableName, "table name");
1689
- const whereClause = prepareWhereClause(keys, TABLE_SCHEMAS[tableName]);
1690
- return {
1691
- sql: `DELETE FROM ${parsedTableName}${whereClause.sql}`,
1692
- args: whereClause.args
1693
- };
1694
- }
1695
- function prepareWhereClause(filters, schema) {
1696
- const conditions = [];
1697
- const args = [];
1698
- for (const [columnName, filterValue] of Object.entries(filters)) {
1699
- const column = schema[columnName];
1700
- if (!column) {
1701
- throw new Error(`Unknown column: ${columnName}`);
1702
- }
1703
- const parsedColumn = parseSqlIdentifier(columnName, "column name");
1704
- const result = buildCondition2(parsedColumn, filterValue);
1705
- conditions.push(result.condition);
1706
- args.push(...result.args);
1707
- }
1708
- return {
1709
- sql: conditions.length > 0 ? ` WHERE ${conditions.join(" AND ")}` : "",
1710
- args
1711
- };
1712
- }
1713
- function buildCondition2(columnName, filterValue) {
1714
- if (filterValue === null) {
1715
- return { condition: `${columnName} IS NULL`, args: [] };
1716
- }
1717
- if (typeof filterValue === "object" && filterValue !== null && ("startAt" in filterValue || "endAt" in filterValue)) {
1718
- return buildDateRangeCondition(columnName, filterValue);
1719
- }
1720
- return {
1721
- condition: `${columnName} = ?`,
1722
- args: [transformToSqlValue(filterValue)]
1723
- };
1724
- }
1725
- function buildDateRangeCondition(columnName, range) {
1726
- const conditions = [];
1727
- const args = [];
1728
- if (range.startAt !== void 0) {
1729
- conditions.push(`${columnName} >= ?`);
1730
- args.push(transformToSqlValue(range.startAt));
1731
- }
1732
- if (range.endAt !== void 0) {
1733
- conditions.push(`${columnName} <= ?`);
1734
- args.push(transformToSqlValue(range.endAt));
1735
- }
1736
- if (conditions.length === 0) {
1737
- throw new Error("Date range must specify at least startAt or endAt");
1738
- }
1739
- return {
1740
- condition: conditions.join(" AND "),
1741
- args
1742
- };
1743
- }
1744
- function buildDateRangeFilter(dateRange, columnName = "createdAt") {
1745
- if (!dateRange?.start && !dateRange?.end) {
1746
- return {};
1747
- }
1748
- const filter = {};
1749
- if (dateRange.start) {
1750
- filter.startAt = new Date(dateRange.start).toISOString();
2057
+ var MemoryLibSQL = class extends MemoryStorage {
2058
+ #client;
2059
+ #db;
2060
+ constructor(config) {
2061
+ super();
2062
+ const client = resolveClient(config);
2063
+ this.#client = client;
2064
+ this.#db = new LibSQLDB({ client, maxRetries: config.maxRetries, initialBackoffMs: config.initialBackoffMs });
2065
+ }
2066
+ async init() {
2067
+ await this.#db.createTable({ tableName: TABLE_THREADS, schema: TABLE_SCHEMAS[TABLE_THREADS] });
2068
+ await this.#db.createTable({ tableName: TABLE_MESSAGES, schema: TABLE_SCHEMAS[TABLE_MESSAGES] });
2069
+ await this.#db.createTable({ tableName: TABLE_RESOURCES, schema: TABLE_SCHEMAS[TABLE_RESOURCES] });
2070
+ await this.#db.alterTable({
2071
+ tableName: TABLE_MESSAGES,
2072
+ schema: TABLE_SCHEMAS[TABLE_MESSAGES],
2073
+ ifNotExists: ["resourceId"]
2074
+ });
1751
2075
  }
1752
- if (dateRange.end) {
1753
- filter.endAt = new Date(dateRange.end).toISOString();
2076
+ async dangerouslyClearAll() {
2077
+ await this.#db.deleteData({ tableName: TABLE_MESSAGES });
2078
+ await this.#db.deleteData({ tableName: TABLE_THREADS });
2079
+ await this.#db.deleteData({ tableName: TABLE_RESOURCES });
1754
2080
  }
1755
- return { [columnName]: filter };
1756
- }
1757
- function transformFromSqlRow({
1758
- tableName,
1759
- sqlRow
1760
- }) {
1761
- const result = {};
1762
- const jsonColumns = new Set(
1763
- Object.keys(TABLE_SCHEMAS[tableName]).filter((key) => TABLE_SCHEMAS[tableName][key].type === "jsonb").map((key) => key)
1764
- );
1765
- const dateColumns = new Set(
1766
- Object.keys(TABLE_SCHEMAS[tableName]).filter((key) => TABLE_SCHEMAS[tableName][key].type === "timestamp").map((key) => key)
1767
- );
1768
- for (const [key, value] of Object.entries(sqlRow)) {
1769
- if (value === null || value === void 0) {
1770
- result[key] = value;
1771
- continue;
1772
- }
1773
- if (dateColumns.has(key) && typeof value === "string") {
1774
- result[key] = new Date(value);
1775
- continue;
1776
- }
1777
- if (jsonColumns.has(key) && typeof value === "string") {
1778
- result[key] = safelyParseJSON(value);
1779
- continue;
2081
+ parseRow(row) {
2082
+ let content = row.content;
2083
+ try {
2084
+ content = JSON.parse(row.content);
2085
+ } catch {
1780
2086
  }
1781
- result[key] = value;
2087
+ const result = {
2088
+ id: row.id,
2089
+ content,
2090
+ role: row.role,
2091
+ createdAt: new Date(row.createdAt),
2092
+ threadId: row.thread_id,
2093
+ resourceId: row.resourceId
2094
+ };
2095
+ if (row.type && row.type !== `v2`) result.type = row.type;
2096
+ return result;
1782
2097
  }
1783
- return result;
1784
- }
1785
-
1786
- // src/storage/domains/observability/index.ts
1787
- var ObservabilityLibSQL = class extends ObservabilityStorage {
1788
- operations;
1789
- constructor({ operations }) {
1790
- super();
1791
- this.operations = operations;
2098
+ async _getIncludedMessages({ include }) {
2099
+ if (!include || include.length === 0) return null;
2100
+ const unionQueries = [];
2101
+ const params = [];
2102
+ for (const inc of include) {
2103
+ const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
2104
+ unionQueries.push(
2105
+ `
2106
+ SELECT * FROM (
2107
+ WITH target_thread AS (
2108
+ SELECT thread_id FROM "${TABLE_MESSAGES}" WHERE id = ?
2109
+ ),
2110
+ numbered_messages AS (
2111
+ SELECT
2112
+ id, content, role, type, "createdAt", thread_id, "resourceId",
2113
+ ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
2114
+ FROM "${TABLE_MESSAGES}"
2115
+ WHERE thread_id = (SELECT thread_id FROM target_thread)
2116
+ ),
2117
+ target_positions AS (
2118
+ SELECT row_num as target_pos
2119
+ FROM numbered_messages
2120
+ WHERE id = ?
2121
+ )
2122
+ SELECT DISTINCT m.*
2123
+ FROM numbered_messages m
2124
+ CROSS JOIN target_positions t
2125
+ WHERE m.row_num BETWEEN (t.target_pos - ?) AND (t.target_pos + ?)
2126
+ )
2127
+ `
2128
+ // Keep ASC for final sorting after fetching context
2129
+ );
2130
+ params.push(id, id, withPreviousMessages, withNextMessages);
2131
+ }
2132
+ const finalQuery = unionQueries.join(" UNION ALL ") + ' ORDER BY "createdAt" ASC';
2133
+ const includedResult = await this.#client.execute({ sql: finalQuery, args: params });
2134
+ const includedRows = includedResult.rows?.map((row) => this.parseRow(row));
2135
+ const seen = /* @__PURE__ */ new Set();
2136
+ const dedupedRows = includedRows.filter((row) => {
2137
+ if (seen.has(row.id)) return false;
2138
+ seen.add(row.id);
2139
+ return true;
2140
+ });
2141
+ return dedupedRows;
1792
2142
  }
1793
- async createSpan(span) {
2143
+ async listMessagesById({ messageIds }) {
2144
+ if (messageIds.length === 0) return { messages: [] };
1794
2145
  try {
1795
- const now = (/* @__PURE__ */ new Date()).toISOString();
1796
- const record = {
1797
- ...span,
1798
- createdAt: now,
1799
- updatedAt: now
1800
- };
1801
- return this.operations.insert({ tableName: TABLE_SPANS, record });
2146
+ const sql = `
2147
+ SELECT
2148
+ id,
2149
+ content,
2150
+ role,
2151
+ type,
2152
+ "createdAt",
2153
+ thread_id,
2154
+ "resourceId"
2155
+ FROM "${TABLE_MESSAGES}"
2156
+ WHERE id IN (${messageIds.map(() => "?").join(", ")})
2157
+ ORDER BY "createdAt" DESC
2158
+ `;
2159
+ const result = await this.#client.execute({ sql, args: messageIds });
2160
+ if (!result.rows) return { messages: [] };
2161
+ const list = new MessageList().add(result.rows.map(this.parseRow), "memory");
2162
+ return { messages: list.get.all.db() };
1802
2163
  } catch (error) {
1803
2164
  throw new MastraError(
1804
2165
  {
1805
- id: "LIBSQL_STORE_CREATE_SPAN_FAILED",
2166
+ id: createStorageErrorId("LIBSQL", "LIST_MESSAGES_BY_ID", "FAILED"),
2167
+ domain: ErrorDomain.STORAGE,
2168
+ category: ErrorCategory.THIRD_PARTY,
2169
+ details: { messageIds: JSON.stringify(messageIds) }
2170
+ },
2171
+ error
2172
+ );
2173
+ }
2174
+ }
2175
+ async listMessages(args) {
2176
+ const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
2177
+ const threadIds = Array.isArray(threadId) ? threadId : [threadId];
2178
+ if (threadIds.length === 0 || threadIds.some((id) => !id.trim())) {
2179
+ throw new MastraError(
2180
+ {
2181
+ id: createStorageErrorId("LIBSQL", "LIST_MESSAGES", "INVALID_THREAD_ID"),
2182
+ domain: ErrorDomain.STORAGE,
2183
+ category: ErrorCategory.THIRD_PARTY,
2184
+ details: { threadId: Array.isArray(threadId) ? threadId.join(",") : threadId }
2185
+ },
2186
+ new Error("threadId must be a non-empty string or array of non-empty strings")
2187
+ );
2188
+ }
2189
+ if (page < 0) {
2190
+ throw new MastraError(
2191
+ {
2192
+ id: createStorageErrorId("LIBSQL", "LIST_MESSAGES", "INVALID_PAGE"),
1806
2193
  domain: ErrorDomain.STORAGE,
1807
2194
  category: ErrorCategory.USER,
2195
+ details: { page }
2196
+ },
2197
+ new Error("page must be >= 0")
2198
+ );
2199
+ }
2200
+ const perPage = normalizePerPage(perPageInput, 40);
2201
+ const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2202
+ try {
2203
+ const { field, direction } = this.parseOrderBy(orderBy, "ASC");
2204
+ const orderByStatement = `ORDER BY "${field}" ${direction}`;
2205
+ const threadPlaceholders = threadIds.map(() => "?").join(", ");
2206
+ const conditions = [`thread_id IN (${threadPlaceholders})`];
2207
+ const queryParams = [...threadIds];
2208
+ if (resourceId) {
2209
+ conditions.push(`"resourceId" = ?`);
2210
+ queryParams.push(resourceId);
2211
+ }
2212
+ if (filter?.dateRange?.start) {
2213
+ conditions.push(`"createdAt" >= ?`);
2214
+ queryParams.push(
2215
+ filter.dateRange.start instanceof Date ? filter.dateRange.start.toISOString() : filter.dateRange.start
2216
+ );
2217
+ }
2218
+ if (filter?.dateRange?.end) {
2219
+ conditions.push(`"createdAt" <= ?`);
2220
+ queryParams.push(
2221
+ filter.dateRange.end instanceof Date ? filter.dateRange.end.toISOString() : filter.dateRange.end
2222
+ );
2223
+ }
2224
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2225
+ const countResult = await this.#client.execute({
2226
+ sql: `SELECT COUNT(*) as count FROM ${TABLE_MESSAGES} ${whereClause}`,
2227
+ args: queryParams
2228
+ });
2229
+ const total = Number(countResult.rows?.[0]?.count ?? 0);
2230
+ const limitValue = perPageInput === false ? total : perPage;
2231
+ const dataResult = await this.#client.execute({
2232
+ sql: `SELECT id, content, role, type, "createdAt", "resourceId", "thread_id" FROM ${TABLE_MESSAGES} ${whereClause} ${orderByStatement} LIMIT ? OFFSET ?`,
2233
+ args: [...queryParams, limitValue, offset]
2234
+ });
2235
+ const messages = (dataResult.rows || []).map((row) => this.parseRow(row));
2236
+ if (total === 0 && messages.length === 0 && (!include || include.length === 0)) {
2237
+ return {
2238
+ messages: [],
2239
+ total: 0,
2240
+ page,
2241
+ perPage: perPageForResponse,
2242
+ hasMore: false
2243
+ };
2244
+ }
2245
+ const messageIds = new Set(messages.map((m) => m.id));
2246
+ if (include && include.length > 0) {
2247
+ const includeMessages = await this._getIncludedMessages({ include });
2248
+ if (includeMessages) {
2249
+ for (const includeMsg of includeMessages) {
2250
+ if (!messageIds.has(includeMsg.id)) {
2251
+ messages.push(includeMsg);
2252
+ messageIds.add(includeMsg.id);
2253
+ }
2254
+ }
2255
+ }
2256
+ }
2257
+ const list = new MessageList().add(messages, "memory");
2258
+ let finalMessages = list.get.all.db();
2259
+ finalMessages = finalMessages.sort((a, b) => {
2260
+ const isDateField = field === "createdAt" || field === "updatedAt";
2261
+ const aValue = isDateField ? new Date(a[field]).getTime() : a[field];
2262
+ const bValue = isDateField ? new Date(b[field]).getTime() : b[field];
2263
+ if (typeof aValue === "number" && typeof bValue === "number") {
2264
+ return direction === "ASC" ? aValue - bValue : bValue - aValue;
2265
+ }
2266
+ return direction === "ASC" ? String(aValue).localeCompare(String(bValue)) : String(bValue).localeCompare(String(aValue));
2267
+ });
2268
+ const threadIdSet = new Set(threadIds);
2269
+ const returnedThreadMessageIds = new Set(
2270
+ finalMessages.filter((m) => m.threadId && threadIdSet.has(m.threadId)).map((m) => m.id)
2271
+ );
2272
+ const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
2273
+ const hasMore = perPageInput !== false && !allThreadMessagesReturned && offset + perPage < total;
2274
+ return {
2275
+ messages: finalMessages,
2276
+ total,
2277
+ page,
2278
+ perPage: perPageForResponse,
2279
+ hasMore
2280
+ };
2281
+ } catch (error) {
2282
+ const mastraError = new MastraError(
2283
+ {
2284
+ id: createStorageErrorId("LIBSQL", "LIST_MESSAGES", "FAILED"),
2285
+ domain: ErrorDomain.STORAGE,
2286
+ category: ErrorCategory.THIRD_PARTY,
1808
2287
  details: {
1809
- spanId: span.spanId,
1810
- traceId: span.traceId,
1811
- spanType: span.spanType,
1812
- spanName: span.name
2288
+ threadId: Array.isArray(threadId) ? threadId.join(",") : threadId,
2289
+ resourceId: resourceId ?? ""
1813
2290
  }
1814
2291
  },
1815
2292
  error
1816
2293
  );
2294
+ this.logger?.error?.(mastraError.toString());
2295
+ this.logger?.trackException?.(mastraError);
2296
+ return {
2297
+ messages: [],
2298
+ total: 0,
2299
+ page,
2300
+ perPage: perPageForResponse,
2301
+ hasMore: false
2302
+ };
1817
2303
  }
1818
2304
  }
1819
- async getTrace(traceId) {
2305
+ async saveMessages({ messages }) {
2306
+ if (messages.length === 0) return { messages };
1820
2307
  try {
1821
- const spans = await this.operations.loadMany({
1822
- tableName: TABLE_SPANS,
1823
- whereClause: { sql: " WHERE traceId = ?", args: [traceId] },
1824
- orderBy: "startedAt DESC"
2308
+ const threadId = messages[0]?.threadId;
2309
+ if (!threadId) {
2310
+ throw new Error("Thread ID is required");
2311
+ }
2312
+ const batchStatements = messages.map((message) => {
2313
+ const time = message.createdAt || /* @__PURE__ */ new Date();
2314
+ if (!message.threadId) {
2315
+ throw new Error(
2316
+ `Expected to find a threadId for message, but couldn't find one. An unexpected error has occurred.`
2317
+ );
2318
+ }
2319
+ if (!message.resourceId) {
2320
+ throw new Error(
2321
+ `Expected to find a resourceId for message, but couldn't find one. An unexpected error has occurred.`
2322
+ );
2323
+ }
2324
+ return {
2325
+ sql: `INSERT INTO "${TABLE_MESSAGES}" (id, thread_id, content, role, type, "createdAt", "resourceId")
2326
+ VALUES (?, ?, ?, ?, ?, ?, ?)
2327
+ ON CONFLICT(id) DO UPDATE SET
2328
+ thread_id=excluded.thread_id,
2329
+ content=excluded.content,
2330
+ role=excluded.role,
2331
+ type=excluded.type,
2332
+ "resourceId"=excluded."resourceId"
2333
+ `,
2334
+ args: [
2335
+ message.id,
2336
+ message.threadId,
2337
+ typeof message.content === "object" ? JSON.stringify(message.content) : message.content,
2338
+ message.role,
2339
+ message.type || "v2",
2340
+ time instanceof Date ? time.toISOString() : time,
2341
+ message.resourceId
2342
+ ]
2343
+ };
1825
2344
  });
1826
- if (!spans || spans.length === 0) {
1827
- return null;
2345
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2346
+ batchStatements.push({
2347
+ sql: `UPDATE "${TABLE_THREADS}" SET "updatedAt" = ? WHERE id = ?`,
2348
+ args: [now, threadId]
2349
+ });
2350
+ const BATCH_SIZE = 50;
2351
+ const messageStatements = batchStatements.slice(0, -1);
2352
+ const threadUpdateStatement = batchStatements[batchStatements.length - 1];
2353
+ for (let i = 0; i < messageStatements.length; i += BATCH_SIZE) {
2354
+ const batch = messageStatements.slice(i, i + BATCH_SIZE);
2355
+ if (batch.length > 0) {
2356
+ await this.#client.batch(batch, "write");
2357
+ }
1828
2358
  }
1829
- return {
1830
- traceId,
1831
- spans: spans.map((span) => transformFromSqlRow({ tableName: TABLE_SPANS, sqlRow: span }))
1832
- };
2359
+ if (threadUpdateStatement) {
2360
+ await this.#client.execute(threadUpdateStatement);
2361
+ }
2362
+ const list = new MessageList().add(messages, "memory");
2363
+ return { messages: list.get.all.db() };
1833
2364
  } catch (error) {
1834
2365
  throw new MastraError(
1835
2366
  {
1836
- id: "LIBSQL_STORE_GET_TRACE_FAILED",
2367
+ id: createStorageErrorId("LIBSQL", "SAVE_MESSAGES", "FAILED"),
1837
2368
  domain: ErrorDomain.STORAGE,
1838
- category: ErrorCategory.USER,
1839
- details: {
1840
- traceId
1841
- }
2369
+ category: ErrorCategory.THIRD_PARTY
1842
2370
  },
1843
2371
  error
1844
2372
  );
1845
2373
  }
1846
2374
  }
1847
- async updateSpan({
1848
- spanId,
1849
- traceId,
1850
- updates
2375
+ async updateMessages({
2376
+ messages
1851
2377
  }) {
1852
- try {
1853
- await this.operations.update({
1854
- tableName: TABLE_SPANS,
1855
- keys: { spanId, traceId },
1856
- data: { ...updates, updatedAt: (/* @__PURE__ */ new Date()).toISOString() }
1857
- });
1858
- } catch (error) {
1859
- throw new MastraError(
1860
- {
1861
- id: "LIBSQL_STORE_UPDATE_SPAN_FAILED",
1862
- domain: ErrorDomain.STORAGE,
1863
- category: ErrorCategory.USER,
1864
- details: {
1865
- spanId,
1866
- traceId
1867
- }
1868
- },
1869
- error
1870
- );
2378
+ if (messages.length === 0) {
2379
+ return [];
1871
2380
  }
1872
- }
1873
- async getTracesPaginated({
1874
- filters,
1875
- pagination
1876
- }) {
1877
- const page = pagination?.page ?? 0;
1878
- const perPage = pagination?.perPage ?? 10;
1879
- const { entityId, entityType, ...actualFilters } = filters || {};
1880
- const filtersWithDateRange = {
1881
- ...actualFilters,
1882
- ...buildDateRangeFilter(pagination?.dateRange, "startedAt"),
1883
- parentSpanId: null
2381
+ const messageIds = messages.map((m) => m.id);
2382
+ const placeholders = messageIds.map(() => "?").join(",");
2383
+ const selectSql = `SELECT * FROM ${TABLE_MESSAGES} WHERE id IN (${placeholders})`;
2384
+ const existingResult = await this.#client.execute({ sql: selectSql, args: messageIds });
2385
+ const existingMessages = existingResult.rows.map((row) => this.parseRow(row));
2386
+ if (existingMessages.length === 0) {
2387
+ return [];
2388
+ }
2389
+ const batchStatements = [];
2390
+ const threadIdsToUpdate = /* @__PURE__ */ new Set();
2391
+ const columnMapping = {
2392
+ threadId: "thread_id"
1884
2393
  };
1885
- const whereClause = prepareWhereClause(filtersWithDateRange, SPAN_SCHEMA);
1886
- let actualWhereClause = whereClause.sql || "";
1887
- if (entityId && entityType) {
1888
- const statement = `name = ?`;
1889
- let name = "";
1890
- if (entityType === "workflow") {
1891
- name = `workflow run: '${entityId}'`;
1892
- } else if (entityType === "agent") {
1893
- name = `agent run: '${entityId}'`;
1894
- } else {
1895
- const error = new MastraError({
1896
- id: "LIBSQL_STORE_GET_TRACES_PAGINATED_FAILED",
1897
- domain: ErrorDomain.STORAGE,
1898
- category: ErrorCategory.USER,
1899
- details: {
1900
- entityType
1901
- },
1902
- text: `Cannot filter by entity type: ${entityType}`
1903
- });
1904
- this.logger?.trackException(error);
1905
- throw error;
2394
+ for (const existingMessage of existingMessages) {
2395
+ const updatePayload = messages.find((m) => m.id === existingMessage.id);
2396
+ if (!updatePayload) continue;
2397
+ const { id, ...fieldsToUpdate } = updatePayload;
2398
+ if (Object.keys(fieldsToUpdate).length === 0) continue;
2399
+ threadIdsToUpdate.add(existingMessage.threadId);
2400
+ if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
2401
+ threadIdsToUpdate.add(updatePayload.threadId);
1906
2402
  }
1907
- whereClause.args.push(name);
1908
- if (actualWhereClause) {
1909
- actualWhereClause += ` AND ${statement}`;
1910
- } else {
1911
- actualWhereClause += `WHERE ${statement}`;
2403
+ const setClauses = [];
2404
+ const args = [];
2405
+ const updatableFields = { ...fieldsToUpdate };
2406
+ if (updatableFields.content) {
2407
+ const newContent = {
2408
+ ...existingMessage.content,
2409
+ ...updatableFields.content,
2410
+ // Deep merge metadata if it exists on both
2411
+ ...existingMessage.content?.metadata && updatableFields.content.metadata ? {
2412
+ metadata: {
2413
+ ...existingMessage.content.metadata,
2414
+ ...updatableFields.content.metadata
2415
+ }
2416
+ } : {}
2417
+ };
2418
+ setClauses.push(`${parseSqlIdentifier("content", "column name")} = ?`);
2419
+ args.push(JSON.stringify(newContent));
2420
+ delete updatableFields.content;
2421
+ }
2422
+ for (const key in updatableFields) {
2423
+ if (Object.prototype.hasOwnProperty.call(updatableFields, key)) {
2424
+ const dbKey = columnMapping[key] || key;
2425
+ setClauses.push(`${parseSqlIdentifier(dbKey, "column name")} = ?`);
2426
+ let value = updatableFields[key];
2427
+ if (typeof value === "object" && value !== null) {
2428
+ value = JSON.stringify(value);
2429
+ }
2430
+ args.push(value);
2431
+ }
2432
+ }
2433
+ if (setClauses.length === 0) continue;
2434
+ args.push(id);
2435
+ const sql = `UPDATE ${TABLE_MESSAGES} SET ${setClauses.join(", ")} WHERE id = ?`;
2436
+ batchStatements.push({ sql, args });
2437
+ }
2438
+ if (batchStatements.length === 0) {
2439
+ return existingMessages;
2440
+ }
2441
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2442
+ for (const threadId of threadIdsToUpdate) {
2443
+ if (threadId) {
2444
+ batchStatements.push({
2445
+ sql: `UPDATE ${TABLE_THREADS} SET updatedAt = ? WHERE id = ?`,
2446
+ args: [now, threadId]
2447
+ });
1912
2448
  }
1913
2449
  }
1914
- const orderBy = "startedAt DESC";
1915
- let count = 0;
2450
+ await this.#client.batch(batchStatements, "write");
2451
+ const updatedResult = await this.#client.execute({ sql: selectSql, args: messageIds });
2452
+ return updatedResult.rows.map((row) => this.parseRow(row));
2453
+ }
2454
+ async deleteMessages(messageIds) {
2455
+ if (!messageIds || messageIds.length === 0) {
2456
+ return;
2457
+ }
1916
2458
  try {
1917
- count = await this.operations.loadTotalCount({
1918
- tableName: TABLE_SPANS,
1919
- whereClause: { sql: actualWhereClause, args: whereClause.args }
1920
- });
2459
+ const BATCH_SIZE = 100;
2460
+ const threadIds = /* @__PURE__ */ new Set();
2461
+ const tx = await this.#client.transaction("write");
2462
+ try {
2463
+ for (let i = 0; i < messageIds.length; i += BATCH_SIZE) {
2464
+ const batch = messageIds.slice(i, i + BATCH_SIZE);
2465
+ const placeholders = batch.map(() => "?").join(",");
2466
+ const result = await tx.execute({
2467
+ sql: `SELECT DISTINCT thread_id FROM "${TABLE_MESSAGES}" WHERE id IN (${placeholders})`,
2468
+ args: batch
2469
+ });
2470
+ result.rows?.forEach((row) => {
2471
+ if (row.thread_id) threadIds.add(row.thread_id);
2472
+ });
2473
+ await tx.execute({
2474
+ sql: `DELETE FROM "${TABLE_MESSAGES}" WHERE id IN (${placeholders})`,
2475
+ args: batch
2476
+ });
2477
+ }
2478
+ if (threadIds.size > 0) {
2479
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2480
+ for (const threadId of threadIds) {
2481
+ await tx.execute({
2482
+ sql: `UPDATE "${TABLE_THREADS}" SET "updatedAt" = ? WHERE id = ?`,
2483
+ args: [now, threadId]
2484
+ });
2485
+ }
2486
+ }
2487
+ await tx.commit();
2488
+ } catch (error) {
2489
+ await tx.rollback();
2490
+ throw error;
2491
+ }
1921
2492
  } catch (error) {
1922
2493
  throw new MastraError(
1923
2494
  {
1924
- id: "LIBSQL_STORE_GET_TRACES_PAGINATED_COUNT_FAILED",
2495
+ id: createStorageErrorId("LIBSQL", "DELETE_MESSAGES", "FAILED"),
1925
2496
  domain: ErrorDomain.STORAGE,
1926
- category: ErrorCategory.USER
2497
+ category: ErrorCategory.THIRD_PARTY,
2498
+ details: { messageIds: messageIds.join(", ") }
1927
2499
  },
1928
2500
  error
1929
2501
  );
1930
2502
  }
1931
- if (count === 0) {
1932
- return {
1933
- pagination: {
1934
- total: 0,
1935
- page,
1936
- perPage,
1937
- hasMore: false
1938
- },
1939
- spans: []
2503
+ }
2504
+ async getResourceById({ resourceId }) {
2505
+ const result = await this.#db.select({
2506
+ tableName: TABLE_RESOURCES,
2507
+ keys: { id: resourceId }
2508
+ });
2509
+ if (!result) {
2510
+ return null;
2511
+ }
2512
+ return {
2513
+ ...result,
2514
+ // Ensure workingMemory is always returned as a string, even if auto-parsed as JSON
2515
+ workingMemory: result.workingMemory && typeof result.workingMemory === "object" ? JSON.stringify(result.workingMemory) : result.workingMemory,
2516
+ metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata,
2517
+ createdAt: new Date(result.createdAt),
2518
+ updatedAt: new Date(result.updatedAt)
2519
+ };
2520
+ }
2521
+ async saveResource({ resource }) {
2522
+ await this.#db.insert({
2523
+ tableName: TABLE_RESOURCES,
2524
+ record: {
2525
+ ...resource,
2526
+ metadata: JSON.stringify(resource.metadata)
2527
+ }
2528
+ });
2529
+ return resource;
2530
+ }
2531
+ async updateResource({
2532
+ resourceId,
2533
+ workingMemory,
2534
+ metadata
2535
+ }) {
2536
+ const existingResource = await this.getResourceById({ resourceId });
2537
+ if (!existingResource) {
2538
+ const newResource = {
2539
+ id: resourceId,
2540
+ workingMemory,
2541
+ metadata: metadata || {},
2542
+ createdAt: /* @__PURE__ */ new Date(),
2543
+ updatedAt: /* @__PURE__ */ new Date()
1940
2544
  };
2545
+ return this.saveResource({ resource: newResource });
1941
2546
  }
1942
- try {
1943
- const spans = await this.operations.loadMany({
1944
- tableName: TABLE_SPANS,
1945
- whereClause: {
1946
- sql: actualWhereClause,
1947
- args: whereClause.args
1948
- },
1949
- orderBy,
1950
- offset: page * perPage,
1951
- limit: perPage
2547
+ const updatedResource = {
2548
+ ...existingResource,
2549
+ workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
2550
+ metadata: {
2551
+ ...existingResource.metadata,
2552
+ ...metadata
2553
+ },
2554
+ updatedAt: /* @__PURE__ */ new Date()
2555
+ };
2556
+ const updates = [];
2557
+ const values = [];
2558
+ if (workingMemory !== void 0) {
2559
+ updates.push("workingMemory = ?");
2560
+ values.push(workingMemory);
2561
+ }
2562
+ if (metadata) {
2563
+ updates.push("metadata = ?");
2564
+ values.push(JSON.stringify(updatedResource.metadata));
2565
+ }
2566
+ updates.push("updatedAt = ?");
2567
+ values.push(updatedResource.updatedAt.toISOString());
2568
+ values.push(resourceId);
2569
+ await this.#client.execute({
2570
+ sql: `UPDATE ${TABLE_RESOURCES} SET ${updates.join(", ")} WHERE id = ?`,
2571
+ args: values
2572
+ });
2573
+ return updatedResource;
2574
+ }
2575
+ async getThreadById({ threadId }) {
2576
+ try {
2577
+ const result = await this.#db.select({
2578
+ tableName: TABLE_THREADS,
2579
+ keys: { id: threadId }
1952
2580
  });
2581
+ if (!result) {
2582
+ return null;
2583
+ }
1953
2584
  return {
1954
- pagination: {
1955
- total: count,
1956
- page,
1957
- perPage,
1958
- hasMore: spans.length === perPage
1959
- },
1960
- spans: spans.map((span) => transformFromSqlRow({ tableName: TABLE_SPANS, sqlRow: span }))
2585
+ ...result,
2586
+ metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata,
2587
+ createdAt: new Date(result.createdAt),
2588
+ updatedAt: new Date(result.updatedAt)
1961
2589
  };
1962
2590
  } catch (error) {
1963
2591
  throw new MastraError(
1964
2592
  {
1965
- id: "LIBSQL_STORE_GET_TRACES_PAGINATED_FAILED",
2593
+ id: createStorageErrorId("LIBSQL", "GET_THREAD_BY_ID", "FAILED"),
1966
2594
  domain: ErrorDomain.STORAGE,
1967
- category: ErrorCategory.USER
2595
+ category: ErrorCategory.THIRD_PARTY,
2596
+ details: { threadId }
1968
2597
  },
1969
2598
  error
1970
2599
  );
1971
2600
  }
1972
2601
  }
1973
- async batchCreateSpans(args) {
1974
- try {
1975
- const now = (/* @__PURE__ */ new Date()).toISOString();
1976
- return this.operations.batchInsert({
1977
- tableName: TABLE_SPANS,
1978
- records: args.records.map((record) => ({
1979
- ...record,
1980
- createdAt: now,
1981
- updatedAt: now
1982
- }))
1983
- });
1984
- } catch (error) {
2602
+ async listThreadsByResourceId(args) {
2603
+ const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
2604
+ if (page < 0) {
1985
2605
  throw new MastraError(
1986
2606
  {
1987
- id: "LIBSQL_STORE_BATCH_CREATE_SPANS_FAILED",
2607
+ id: createStorageErrorId("LIBSQL", "LIST_THREADS_BY_RESOURCE_ID", "INVALID_PAGE"),
1988
2608
  domain: ErrorDomain.STORAGE,
1989
- category: ErrorCategory.USER
2609
+ category: ErrorCategory.USER,
2610
+ details: { page }
1990
2611
  },
1991
- error
2612
+ new Error("page must be >= 0")
1992
2613
  );
1993
2614
  }
1994
- }
1995
- async batchUpdateSpans(args) {
2615
+ const perPage = normalizePerPage(perPageInput, 100);
2616
+ const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2617
+ const { field, direction } = this.parseOrderBy(orderBy);
1996
2618
  try {
1997
- return this.operations.batchUpdate({
1998
- tableName: TABLE_SPANS,
1999
- updates: args.records.map((record) => ({
2000
- keys: { spanId: record.spanId, traceId: record.traceId },
2001
- data: { ...record.updates, updatedAt: (/* @__PURE__ */ new Date()).toISOString() }
2002
- }))
2619
+ const baseQuery = `FROM ${TABLE_THREADS} WHERE resourceId = ?`;
2620
+ const queryParams = [resourceId];
2621
+ const mapRowToStorageThreadType = (row) => ({
2622
+ id: row.id,
2623
+ resourceId: row.resourceId,
2624
+ title: row.title,
2625
+ createdAt: new Date(row.createdAt),
2626
+ // Convert string to Date
2627
+ updatedAt: new Date(row.updatedAt),
2628
+ // Convert string to Date
2629
+ metadata: typeof row.metadata === "string" ? JSON.parse(row.metadata) : row.metadata
2630
+ });
2631
+ const countResult = await this.#client.execute({
2632
+ sql: `SELECT COUNT(*) as count ${baseQuery}`,
2633
+ args: queryParams
2634
+ });
2635
+ const total = Number(countResult.rows?.[0]?.count ?? 0);
2636
+ if (total === 0) {
2637
+ return {
2638
+ threads: [],
2639
+ total: 0,
2640
+ page,
2641
+ perPage: perPageForResponse,
2642
+ hasMore: false
2643
+ };
2644
+ }
2645
+ const limitValue = perPageInput === false ? total : perPage;
2646
+ const dataResult = await this.#client.execute({
2647
+ sql: `SELECT * ${baseQuery} ORDER BY "${field}" ${direction} LIMIT ? OFFSET ?`,
2648
+ args: [...queryParams, limitValue, offset]
2003
2649
  });
2650
+ const threads = (dataResult.rows || []).map(mapRowToStorageThreadType);
2651
+ return {
2652
+ threads,
2653
+ total,
2654
+ page,
2655
+ perPage: perPageForResponse,
2656
+ hasMore: perPageInput === false ? false : offset + perPage < total
2657
+ };
2004
2658
  } catch (error) {
2005
- throw new MastraError(
2659
+ const mastraError = new MastraError(
2006
2660
  {
2007
- id: "LIBSQL_STORE_BATCH_UPDATE_SPANS_FAILED",
2661
+ id: createStorageErrorId("LIBSQL", "LIST_THREADS_BY_RESOURCE_ID", "FAILED"),
2008
2662
  domain: ErrorDomain.STORAGE,
2009
- category: ErrorCategory.USER
2663
+ category: ErrorCategory.THIRD_PARTY,
2664
+ details: { resourceId }
2010
2665
  },
2011
2666
  error
2012
2667
  );
2668
+ this.logger?.trackException?.(mastraError);
2669
+ this.logger?.error?.(mastraError.toString());
2670
+ return {
2671
+ threads: [],
2672
+ total: 0,
2673
+ page,
2674
+ perPage: perPageForResponse,
2675
+ hasMore: false
2676
+ };
2013
2677
  }
2014
2678
  }
2015
- async batchDeleteTraces(args) {
2679
+ async saveThread({ thread }) {
2016
2680
  try {
2017
- const keys = args.traceIds.map((traceId) => ({ traceId }));
2018
- return this.operations.batchDelete({
2019
- tableName: TABLE_SPANS,
2020
- keys
2681
+ await this.#db.insert({
2682
+ tableName: TABLE_THREADS,
2683
+ record: {
2684
+ ...thread,
2685
+ metadata: JSON.stringify(thread.metadata)
2686
+ }
2021
2687
  });
2688
+ return thread;
2022
2689
  } catch (error) {
2023
- throw new MastraError(
2690
+ const mastraError = new MastraError(
2024
2691
  {
2025
- id: "LIBSQL_STORE_BATCH_DELETE_TRACES_FAILED",
2692
+ id: createStorageErrorId("LIBSQL", "SAVE_THREAD", "FAILED"),
2026
2693
  domain: ErrorDomain.STORAGE,
2027
- category: ErrorCategory.USER
2694
+ category: ErrorCategory.THIRD_PARTY,
2695
+ details: { threadId: thread.id }
2028
2696
  },
2029
2697
  error
2030
2698
  );
2699
+ this.logger?.trackException?.(mastraError);
2700
+ this.logger?.error?.(mastraError.toString());
2701
+ throw mastraError;
2031
2702
  }
2032
2703
  }
2033
- };
2034
- var StoreOperationsLibSQL = class extends StoreOperations {
2035
- client;
2036
- /**
2037
- * Maximum number of retries for write operations if an SQLITE_BUSY error occurs.
2038
- * @default 5
2039
- */
2040
- maxRetries;
2041
- /**
2042
- * Initial backoff time in milliseconds for retrying write operations on SQLITE_BUSY.
2043
- * The backoff time will double with each retry (exponential backoff).
2044
- * @default 100
2045
- */
2046
- initialBackoffMs;
2047
- constructor({
2048
- client,
2049
- maxRetries,
2050
- initialBackoffMs
2051
- }) {
2052
- super();
2053
- this.client = client;
2054
- this.maxRetries = maxRetries ?? 5;
2055
- this.initialBackoffMs = initialBackoffMs ?? 100;
2056
- }
2057
- async hasColumn(table, column) {
2058
- const result = await this.client.execute({
2059
- sql: `PRAGMA table_info(${table})`
2060
- });
2061
- return (await result.rows)?.some((row) => row.name === column);
2062
- }
2063
- getCreateTableSQL(tableName, schema) {
2064
- const parsedTableName = parseSqlIdentifier(tableName, "table name");
2065
- const columns = Object.entries(schema).map(([name, col]) => {
2066
- const parsedColumnName = parseSqlIdentifier(name, "column name");
2067
- let type = col.type.toUpperCase();
2068
- if (type === "TEXT") type = "TEXT";
2069
- if (type === "TIMESTAMP") type = "TEXT";
2070
- const nullable = col.nullable ? "" : "NOT NULL";
2071
- const primaryKey = col.primaryKey ? "PRIMARY KEY" : "";
2072
- return `${parsedColumnName} ${type} ${nullable} ${primaryKey}`.trim();
2073
- });
2074
- if (tableName === TABLE_WORKFLOW_SNAPSHOT) {
2075
- const stmnt = `CREATE TABLE IF NOT EXISTS ${parsedTableName} (
2076
- ${columns.join(",\n")},
2077
- PRIMARY KEY (workflow_name, run_id)
2078
- )`;
2079
- return stmnt;
2080
- }
2081
- if (tableName === TABLE_SPANS) {
2082
- const stmnt = `CREATE TABLE IF NOT EXISTS ${parsedTableName} (
2083
- ${columns.join(",\n")},
2084
- PRIMARY KEY (traceId, spanId)
2085
- )`;
2086
- return stmnt;
2087
- }
2088
- return `CREATE TABLE IF NOT EXISTS ${parsedTableName} (${columns.join(", ")})`;
2089
- }
2090
- async createTable({
2091
- tableName,
2092
- schema
2704
+ async updateThread({
2705
+ id,
2706
+ title,
2707
+ metadata
2093
2708
  }) {
2709
+ const thread = await this.getThreadById({ threadId: id });
2710
+ if (!thread) {
2711
+ throw new MastraError({
2712
+ id: createStorageErrorId("LIBSQL", "UPDATE_THREAD", "NOT_FOUND"),
2713
+ domain: ErrorDomain.STORAGE,
2714
+ category: ErrorCategory.USER,
2715
+ text: `Thread ${id} not found`,
2716
+ details: {
2717
+ status: 404,
2718
+ threadId: id
2719
+ }
2720
+ });
2721
+ }
2722
+ const updatedThread = {
2723
+ ...thread,
2724
+ title,
2725
+ metadata: {
2726
+ ...thread.metadata,
2727
+ ...metadata
2728
+ }
2729
+ };
2094
2730
  try {
2095
- this.logger.debug(`Creating database table`, { tableName, operation: "schema init" });
2096
- const sql = this.getCreateTableSQL(tableName, schema);
2097
- await this.client.execute(sql);
2731
+ await this.#client.execute({
2732
+ sql: `UPDATE ${TABLE_THREADS} SET title = ?, metadata = ? WHERE id = ?`,
2733
+ args: [title, JSON.stringify(updatedThread.metadata), id]
2734
+ });
2735
+ return updatedThread;
2098
2736
  } catch (error) {
2099
2737
  throw new MastraError(
2100
2738
  {
2101
- id: "LIBSQL_STORE_CREATE_TABLE_FAILED",
2739
+ id: createStorageErrorId("LIBSQL", "UPDATE_THREAD", "FAILED"),
2102
2740
  domain: ErrorDomain.STORAGE,
2103
2741
  category: ErrorCategory.THIRD_PARTY,
2104
- details: {
2105
- tableName
2106
- }
2742
+ text: `Failed to update thread ${id}`,
2743
+ details: { threadId: id }
2107
2744
  },
2108
2745
  error
2109
2746
  );
2110
2747
  }
2111
2748
  }
2112
- getSqlType(type) {
2113
- switch (type) {
2114
- case "bigint":
2115
- return "INTEGER";
2116
- // SQLite uses INTEGER for all integer sizes
2117
- case "jsonb":
2118
- return "TEXT";
2119
- // Store JSON as TEXT in SQLite
2120
- default:
2121
- return super.getSqlType(type);
2122
- }
2123
- }
2124
- async doInsert({
2125
- tableName,
2126
- record
2127
- }) {
2128
- await this.client.execute(
2129
- prepareStatement({
2130
- tableName,
2131
- record
2132
- })
2133
- );
2134
- }
2135
- insert(args) {
2136
- const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
2137
- logger: this.logger,
2138
- maxRetries: this.maxRetries,
2139
- initialBackoffMs: this.initialBackoffMs
2140
- });
2141
- return executeWriteOperationWithRetry(() => this.doInsert(args), `insert into table ${args.tableName}`);
2142
- }
2143
- async load({ tableName, keys }) {
2144
- const parsedTableName = parseSqlIdentifier(tableName, "table name");
2145
- const parsedKeys = Object.keys(keys).map((key) => parseSqlIdentifier(key, "column name"));
2146
- const conditions = parsedKeys.map((key) => `${key} = ?`).join(" AND ");
2147
- const values = Object.values(keys);
2148
- const result = await this.client.execute({
2149
- sql: `SELECT * FROM ${parsedTableName} WHERE ${conditions} ORDER BY createdAt DESC LIMIT 1`,
2150
- args: values
2151
- });
2152
- if (!result.rows || result.rows.length === 0) {
2153
- return null;
2154
- }
2155
- const row = result.rows[0];
2156
- const parsed = Object.fromEntries(
2157
- Object.entries(row || {}).map(([k, v]) => {
2158
- try {
2159
- return [k, typeof v === "string" ? v.startsWith("{") || v.startsWith("[") ? JSON.parse(v) : v : v];
2160
- } catch {
2161
- return [k, v];
2162
- }
2163
- })
2164
- );
2165
- return parsed;
2166
- }
2167
- async loadMany({
2168
- tableName,
2169
- whereClause,
2170
- orderBy,
2171
- offset,
2172
- limit,
2173
- args
2174
- }) {
2175
- const parsedTableName = parseSqlIdentifier(tableName, "table name");
2176
- let statement = `SELECT * FROM ${parsedTableName}`;
2177
- if (whereClause?.sql) {
2178
- statement += `${whereClause.sql}`;
2179
- }
2180
- if (orderBy) {
2181
- statement += ` ORDER BY ${orderBy}`;
2182
- }
2183
- if (limit) {
2184
- statement += ` LIMIT ${limit}`;
2185
- }
2186
- if (offset) {
2187
- statement += ` OFFSET ${offset}`;
2749
+ async deleteThread({ threadId }) {
2750
+ try {
2751
+ await this.#client.execute({
2752
+ sql: `DELETE FROM ${TABLE_MESSAGES} WHERE thread_id = ?`,
2753
+ args: [threadId]
2754
+ });
2755
+ await this.#client.execute({
2756
+ sql: `DELETE FROM ${TABLE_THREADS} WHERE id = ?`,
2757
+ args: [threadId]
2758
+ });
2759
+ } catch (error) {
2760
+ throw new MastraError(
2761
+ {
2762
+ id: createStorageErrorId("LIBSQL", "DELETE_THREAD", "FAILED"),
2763
+ domain: ErrorDomain.STORAGE,
2764
+ category: ErrorCategory.THIRD_PARTY,
2765
+ details: { threadId }
2766
+ },
2767
+ error
2768
+ );
2188
2769
  }
2189
- const result = await this.client.execute({
2190
- sql: statement,
2191
- args: [...whereClause?.args ?? [], ...args ?? []]
2192
- });
2193
- return result.rows;
2194
2770
  }
2195
- async loadTotalCount({
2196
- tableName,
2197
- whereClause
2198
- }) {
2199
- const parsedTableName = parseSqlIdentifier(tableName, "table name");
2200
- const statement = `SELECT COUNT(*) as count FROM ${parsedTableName} ${whereClause ? `${whereClause.sql}` : ""}`;
2201
- const result = await this.client.execute({
2202
- sql: statement,
2203
- args: whereClause?.args ?? []
2204
- });
2205
- if (!result.rows || result.rows.length === 0) {
2206
- return 0;
2207
- }
2208
- return result.rows[0]?.count ?? 0;
2771
+ };
2772
+ var ObservabilityLibSQL = class extends ObservabilityStorage {
2773
+ #db;
2774
+ constructor(config) {
2775
+ super();
2776
+ const client = resolveClient(config);
2777
+ this.#db = new LibSQLDB({ client, maxRetries: config.maxRetries, initialBackoffMs: config.initialBackoffMs });
2209
2778
  }
2210
- update(args) {
2211
- const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
2212
- logger: this.logger,
2213
- maxRetries: this.maxRetries,
2214
- initialBackoffMs: this.initialBackoffMs
2215
- });
2216
- return executeWriteOperationWithRetry(() => this.executeUpdate(args), `update table ${args.tableName}`);
2779
+ async init() {
2780
+ await this.#db.createTable({ tableName: TABLE_SPANS, schema: SPAN_SCHEMA });
2217
2781
  }
2218
- async executeUpdate({
2219
- tableName,
2220
- keys,
2221
- data
2222
- }) {
2223
- await this.client.execute(prepareUpdateStatement({ tableName, updates: data, keys }));
2782
+ async dangerouslyClearAll() {
2783
+ await this.#db.deleteData({ tableName: TABLE_SPANS });
2224
2784
  }
2225
- async doBatchInsert({
2226
- tableName,
2227
- records
2228
- }) {
2229
- if (records.length === 0) return;
2230
- const batchStatements = records.map((r) => prepareStatement({ tableName, record: r }));
2231
- await this.client.batch(batchStatements, "write");
2785
+ get tracingStrategy() {
2786
+ return {
2787
+ preferred: "batch-with-updates",
2788
+ supported: ["batch-with-updates", "insert-only"]
2789
+ };
2232
2790
  }
2233
- batchInsert(args) {
2234
- const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
2235
- logger: this.logger,
2236
- maxRetries: this.maxRetries,
2237
- initialBackoffMs: this.initialBackoffMs
2238
- });
2239
- return executeWriteOperationWithRetry(
2240
- () => this.doBatchInsert(args),
2241
- `batch insert into table ${args.tableName}`
2242
- ).catch((error) => {
2791
+ async createSpan(args) {
2792
+ const { span } = args;
2793
+ try {
2794
+ const startedAt = span.startedAt instanceof Date ? span.startedAt.toISOString() : span.startedAt;
2795
+ const endedAt = span.endedAt instanceof Date ? span.endedAt.toISOString() : span.endedAt;
2796
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2797
+ const record = {
2798
+ ...span,
2799
+ startedAt,
2800
+ endedAt,
2801
+ createdAt: now,
2802
+ updatedAt: now
2803
+ };
2804
+ return this.#db.insert({ tableName: TABLE_SPANS, record });
2805
+ } catch (error) {
2243
2806
  throw new MastraError(
2244
2807
  {
2245
- id: "LIBSQL_STORE_BATCH_INSERT_FAILED",
2808
+ id: createStorageErrorId("LIBSQL", "CREATE_SPAN", "FAILED"),
2246
2809
  domain: ErrorDomain.STORAGE,
2247
- category: ErrorCategory.THIRD_PARTY,
2810
+ category: ErrorCategory.USER,
2248
2811
  details: {
2249
- tableName: args.tableName
2812
+ spanId: span.spanId,
2813
+ traceId: span.traceId,
2814
+ spanType: span.spanType,
2815
+ name: span.name
2250
2816
  }
2251
2817
  },
2252
2818
  error
2253
2819
  );
2254
- });
2820
+ }
2255
2821
  }
2256
- /**
2257
- * Public batch update method with retry logic
2258
- */
2259
- batchUpdate(args) {
2260
- const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
2261
- logger: this.logger,
2262
- maxRetries: this.maxRetries,
2263
- initialBackoffMs: this.initialBackoffMs
2264
- });
2265
- return executeWriteOperationWithRetry(
2266
- () => this.executeBatchUpdate(args),
2267
- `batch update in table ${args.tableName}`
2268
- ).catch((error) => {
2822
+ async getSpan(args) {
2823
+ const { traceId, spanId } = args;
2824
+ try {
2825
+ const rows = await this.#db.selectMany({
2826
+ tableName: TABLE_SPANS,
2827
+ whereClause: { sql: " WHERE traceId = ? AND spanId = ?", args: [traceId, spanId] },
2828
+ limit: 1
2829
+ });
2830
+ if (!rows || rows.length === 0) {
2831
+ return null;
2832
+ }
2833
+ return {
2834
+ span: transformFromSqlRow({ tableName: TABLE_SPANS, sqlRow: rows[0] })
2835
+ };
2836
+ } catch (error) {
2269
2837
  throw new MastraError(
2270
2838
  {
2271
- id: "LIBSQL_STORE_BATCH_UPDATE_FAILED",
2839
+ id: createStorageErrorId("LIBSQL", "GET_SPAN", "FAILED"),
2272
2840
  domain: ErrorDomain.STORAGE,
2273
- category: ErrorCategory.THIRD_PARTY,
2274
- details: {
2275
- tableName: args.tableName
2276
- }
2841
+ category: ErrorCategory.USER,
2842
+ details: { traceId, spanId }
2277
2843
  },
2278
2844
  error
2279
2845
  );
2280
- });
2846
+ }
2281
2847
  }
2282
- /**
2283
- * Updates multiple records in batch. Each record can be updated based on single or composite keys.
2284
- */
2285
- async executeBatchUpdate({
2286
- tableName,
2287
- updates
2288
- }) {
2289
- if (updates.length === 0) return;
2290
- const batchStatements = updates.map(
2291
- ({ keys, data }) => prepareUpdateStatement({
2292
- tableName,
2293
- updates: data,
2294
- keys
2295
- })
2296
- );
2297
- await this.client.batch(batchStatements, "write");
2848
+ async getRootSpan(args) {
2849
+ const { traceId } = args;
2850
+ try {
2851
+ const rows = await this.#db.selectMany({
2852
+ tableName: TABLE_SPANS,
2853
+ whereClause: { sql: " WHERE traceId = ? AND parentSpanId IS NULL", args: [traceId] },
2854
+ limit: 1
2855
+ });
2856
+ if (!rows || rows.length === 0) {
2857
+ return null;
2858
+ }
2859
+ return {
2860
+ span: transformFromSqlRow({ tableName: TABLE_SPANS, sqlRow: rows[0] })
2861
+ };
2862
+ } catch (error) {
2863
+ throw new MastraError(
2864
+ {
2865
+ id: createStorageErrorId("LIBSQL", "GET_ROOT_SPAN", "FAILED"),
2866
+ domain: ErrorDomain.STORAGE,
2867
+ category: ErrorCategory.USER,
2868
+ details: { traceId }
2869
+ },
2870
+ error
2871
+ );
2872
+ }
2298
2873
  }
2299
- /**
2300
- * Public batch delete method with retry logic
2301
- */
2302
- batchDelete({ tableName, keys }) {
2303
- const executeWriteOperationWithRetry = createExecuteWriteOperationWithRetry({
2304
- logger: this.logger,
2305
- maxRetries: this.maxRetries,
2306
- initialBackoffMs: this.initialBackoffMs
2307
- });
2308
- return executeWriteOperationWithRetry(
2309
- () => this.executeBatchDelete({ tableName, keys }),
2310
- `batch delete from table ${tableName}`
2311
- ).catch((error) => {
2874
+ async getTrace(args) {
2875
+ const { traceId } = args;
2876
+ try {
2877
+ const spans = await this.#db.selectMany({
2878
+ tableName: TABLE_SPANS,
2879
+ whereClause: { sql: " WHERE traceId = ?", args: [traceId] },
2880
+ orderBy: "startedAt ASC"
2881
+ });
2882
+ if (!spans || spans.length === 0) {
2883
+ return null;
2884
+ }
2885
+ return {
2886
+ traceId,
2887
+ spans: spans.map((span) => transformFromSqlRow({ tableName: TABLE_SPANS, sqlRow: span }))
2888
+ };
2889
+ } catch (error) {
2312
2890
  throw new MastraError(
2313
2891
  {
2314
- id: "LIBSQL_STORE_BATCH_DELETE_FAILED",
2892
+ id: createStorageErrorId("LIBSQL", "GET_TRACE", "FAILED"),
2315
2893
  domain: ErrorDomain.STORAGE,
2316
- category: ErrorCategory.THIRD_PARTY,
2894
+ category: ErrorCategory.USER,
2317
2895
  details: {
2318
- tableName
2896
+ traceId
2319
2897
  }
2320
2898
  },
2321
2899
  error
2322
2900
  );
2323
- });
2901
+ }
2324
2902
  }
2325
- /**
2326
- * Deletes multiple records in batch. Each record can be deleted based on single or composite keys.
2327
- */
2328
- async executeBatchDelete({
2329
- tableName,
2330
- keys
2331
- }) {
2332
- if (keys.length === 0) return;
2333
- const batchStatements = keys.map(
2334
- (keyObj) => prepareDeleteStatement({
2335
- tableName,
2336
- keys: keyObj
2337
- })
2338
- );
2339
- await this.client.batch(batchStatements, "write");
2903
+ async updateSpan(args) {
2904
+ const { traceId, spanId, updates } = args;
2905
+ try {
2906
+ const data = { ...updates };
2907
+ if (data.endedAt instanceof Date) {
2908
+ data.endedAt = data.endedAt.toISOString();
2909
+ }
2910
+ if (data.startedAt instanceof Date) {
2911
+ data.startedAt = data.startedAt.toISOString();
2912
+ }
2913
+ data.updatedAt = (/* @__PURE__ */ new Date()).toISOString();
2914
+ await this.#db.update({
2915
+ tableName: TABLE_SPANS,
2916
+ keys: { spanId, traceId },
2917
+ data
2918
+ });
2919
+ } catch (error) {
2920
+ throw new MastraError(
2921
+ {
2922
+ id: createStorageErrorId("LIBSQL", "UPDATE_SPAN", "FAILED"),
2923
+ domain: ErrorDomain.STORAGE,
2924
+ category: ErrorCategory.USER,
2925
+ details: {
2926
+ spanId,
2927
+ traceId
2928
+ }
2929
+ },
2930
+ error
2931
+ );
2932
+ }
2340
2933
  }
2341
- /**
2342
- * Alters table schema to add columns if they don't exist
2343
- * @param tableName Name of the table
2344
- * @param schema Schema of the table
2345
- * @param ifNotExists Array of column names to add if they don't exist
2346
- */
2347
- async alterTable({
2348
- tableName,
2349
- schema,
2350
- ifNotExists
2351
- }) {
2352
- const parsedTableName = parseSqlIdentifier(tableName, "table name");
2934
+ async listTraces(args) {
2935
+ const { filters, pagination, orderBy } = listTracesArgsSchema.parse(args);
2936
+ const { page, perPage } = pagination;
2937
+ const tableName = parseSqlIdentifier(TABLE_SPANS, "table name");
2353
2938
  try {
2354
- const pragmaQuery = `PRAGMA table_info(${parsedTableName})`;
2355
- const result = await this.client.execute(pragmaQuery);
2356
- const existingColumnNames = new Set(result.rows.map((row) => row.name.toLowerCase()));
2357
- for (const columnName of ifNotExists) {
2358
- if (!existingColumnNames.has(columnName.toLowerCase()) && schema[columnName]) {
2359
- const columnDef = schema[columnName];
2360
- const sqlType = this.getSqlType(columnDef.type);
2361
- const nullable = columnDef.nullable === false ? "NOT NULL" : "";
2362
- const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : "";
2363
- const alterSql = `ALTER TABLE ${parsedTableName} ADD COLUMN "${columnName}" ${sqlType} ${nullable} ${defaultValue}`.trim();
2364
- await this.client.execute(alterSql);
2365
- this.logger?.debug?.(`Added column ${columnName} to table ${parsedTableName}`);
2939
+ const conditions = ["parentSpanId IS NULL"];
2940
+ const queryArgs = [];
2941
+ if (filters) {
2942
+ if (filters.startedAt?.start) {
2943
+ conditions.push(`startedAt >= ?`);
2944
+ queryArgs.push(filters.startedAt.start.toISOString());
2945
+ }
2946
+ if (filters.startedAt?.end) {
2947
+ conditions.push(`startedAt <= ?`);
2948
+ queryArgs.push(filters.startedAt.end.toISOString());
2366
2949
  }
2950
+ if (filters.endedAt?.start) {
2951
+ conditions.push(`endedAt >= ?`);
2952
+ queryArgs.push(filters.endedAt.start.toISOString());
2953
+ }
2954
+ if (filters.endedAt?.end) {
2955
+ conditions.push(`endedAt <= ?`);
2956
+ queryArgs.push(filters.endedAt.end.toISOString());
2957
+ }
2958
+ if (filters.spanType !== void 0) {
2959
+ conditions.push(`spanType = ?`);
2960
+ queryArgs.push(filters.spanType);
2961
+ }
2962
+ if (filters.entityType !== void 0) {
2963
+ conditions.push(`entityType = ?`);
2964
+ queryArgs.push(filters.entityType);
2965
+ }
2966
+ if (filters.entityId !== void 0) {
2967
+ conditions.push(`entityId = ?`);
2968
+ queryArgs.push(filters.entityId);
2969
+ }
2970
+ if (filters.entityName !== void 0) {
2971
+ conditions.push(`entityName = ?`);
2972
+ queryArgs.push(filters.entityName);
2973
+ }
2974
+ if (filters.userId !== void 0) {
2975
+ conditions.push(`userId = ?`);
2976
+ queryArgs.push(filters.userId);
2977
+ }
2978
+ if (filters.organizationId !== void 0) {
2979
+ conditions.push(`organizationId = ?`);
2980
+ queryArgs.push(filters.organizationId);
2981
+ }
2982
+ if (filters.resourceId !== void 0) {
2983
+ conditions.push(`resourceId = ?`);
2984
+ queryArgs.push(filters.resourceId);
2985
+ }
2986
+ if (filters.runId !== void 0) {
2987
+ conditions.push(`runId = ?`);
2988
+ queryArgs.push(filters.runId);
2989
+ }
2990
+ if (filters.sessionId !== void 0) {
2991
+ conditions.push(`sessionId = ?`);
2992
+ queryArgs.push(filters.sessionId);
2993
+ }
2994
+ if (filters.threadId !== void 0) {
2995
+ conditions.push(`threadId = ?`);
2996
+ queryArgs.push(filters.threadId);
2997
+ }
2998
+ if (filters.requestId !== void 0) {
2999
+ conditions.push(`requestId = ?`);
3000
+ queryArgs.push(filters.requestId);
3001
+ }
3002
+ if (filters.environment !== void 0) {
3003
+ conditions.push(`environment = ?`);
3004
+ queryArgs.push(filters.environment);
3005
+ }
3006
+ if (filters.source !== void 0) {
3007
+ conditions.push(`source = ?`);
3008
+ queryArgs.push(filters.source);
3009
+ }
3010
+ if (filters.serviceName !== void 0) {
3011
+ conditions.push(`serviceName = ?`);
3012
+ queryArgs.push(filters.serviceName);
3013
+ }
3014
+ if (filters.scope != null) {
3015
+ for (const [key, value] of Object.entries(filters.scope)) {
3016
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(key)) {
3017
+ throw new MastraError({
3018
+ id: createStorageErrorId("LIBSQL", "LIST_TRACES", "INVALID_FILTER_KEY"),
3019
+ domain: ErrorDomain.STORAGE,
3020
+ category: ErrorCategory.USER,
3021
+ details: { key }
3022
+ });
3023
+ }
3024
+ conditions.push(`json_extract(scope, '$.${key}') = ?`);
3025
+ queryArgs.push(typeof value === "string" ? value : JSON.stringify(value));
3026
+ }
3027
+ }
3028
+ if (filters.metadata != null) {
3029
+ for (const [key, value] of Object.entries(filters.metadata)) {
3030
+ if (!/^[a-zA-Z_][a-zA-Z0-9_]*$/.test(key)) {
3031
+ throw new MastraError({
3032
+ id: createStorageErrorId("LIBSQL", "LIST_TRACES", "INVALID_FILTER_KEY"),
3033
+ domain: ErrorDomain.STORAGE,
3034
+ category: ErrorCategory.USER,
3035
+ details: { key }
3036
+ });
3037
+ }
3038
+ conditions.push(`json_extract(metadata, '$.${key}') = ?`);
3039
+ queryArgs.push(typeof value === "string" ? value : JSON.stringify(value));
3040
+ }
3041
+ }
3042
+ if (filters.tags != null && filters.tags.length > 0) {
3043
+ for (const tag of filters.tags) {
3044
+ conditions.push(`EXISTS (SELECT 1 FROM json_each(${tableName}.tags) WHERE value = ?)`);
3045
+ queryArgs.push(tag);
3046
+ }
3047
+ }
3048
+ if (filters.status !== void 0) {
3049
+ switch (filters.status) {
3050
+ case TraceStatus.ERROR:
3051
+ conditions.push(`error IS NOT NULL`);
3052
+ break;
3053
+ case TraceStatus.RUNNING:
3054
+ conditions.push(`endedAt IS NULL AND error IS NULL`);
3055
+ break;
3056
+ case TraceStatus.SUCCESS:
3057
+ conditions.push(`endedAt IS NOT NULL AND error IS NULL`);
3058
+ break;
3059
+ }
3060
+ }
3061
+ if (filters.hasChildError !== void 0) {
3062
+ if (filters.hasChildError) {
3063
+ conditions.push(`EXISTS (
3064
+ SELECT 1 FROM ${tableName} c
3065
+ WHERE c.traceId = ${tableName}.traceId AND c.error IS NOT NULL
3066
+ )`);
3067
+ } else {
3068
+ conditions.push(`NOT EXISTS (
3069
+ SELECT 1 FROM ${tableName} c
3070
+ WHERE c.traceId = ${tableName}.traceId AND c.error IS NOT NULL
3071
+ )`);
3072
+ }
3073
+ }
3074
+ }
3075
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
3076
+ const sortField = orderBy.field;
3077
+ const sortDirection = orderBy.direction;
3078
+ let orderByClause;
3079
+ if (sortField === "endedAt") {
3080
+ orderByClause = sortDirection === "DESC" ? `CASE WHEN ${sortField} IS NULL THEN 0 ELSE 1 END, ${sortField} DESC` : `CASE WHEN ${sortField} IS NULL THEN 1 ELSE 0 END, ${sortField} ASC`;
3081
+ } else {
3082
+ orderByClause = `${sortField} ${sortDirection}`;
3083
+ }
3084
+ const count = await this.#db.selectTotalCount({
3085
+ tableName: TABLE_SPANS,
3086
+ whereClause: { sql: whereClause, args: queryArgs }
3087
+ });
3088
+ if (count === 0) {
3089
+ return {
3090
+ pagination: {
3091
+ total: 0,
3092
+ page,
3093
+ perPage,
3094
+ hasMore: false
3095
+ },
3096
+ spans: []
3097
+ };
2367
3098
  }
3099
+ const spans = await this.#db.selectMany({
3100
+ tableName: TABLE_SPANS,
3101
+ whereClause: { sql: whereClause, args: queryArgs },
3102
+ orderBy: orderByClause,
3103
+ offset: page * perPage,
3104
+ limit: perPage
3105
+ });
3106
+ return {
3107
+ pagination: {
3108
+ total: count,
3109
+ page,
3110
+ perPage,
3111
+ hasMore: (page + 1) * perPage < count
3112
+ },
3113
+ spans: spans.map((span) => transformFromSqlRow({ tableName: TABLE_SPANS, sqlRow: span }))
3114
+ };
3115
+ } catch (error) {
3116
+ throw new MastraError(
3117
+ {
3118
+ id: createStorageErrorId("LIBSQL", "LIST_TRACES", "FAILED"),
3119
+ domain: ErrorDomain.STORAGE,
3120
+ category: ErrorCategory.USER
3121
+ },
3122
+ error
3123
+ );
3124
+ }
3125
+ }
3126
+ async batchCreateSpans(args) {
3127
+ try {
3128
+ const now = (/* @__PURE__ */ new Date()).toISOString();
3129
+ const records = args.records.map((record) => {
3130
+ const startedAt = record.startedAt instanceof Date ? record.startedAt.toISOString() : record.startedAt;
3131
+ const endedAt = record.endedAt instanceof Date ? record.endedAt.toISOString() : record.endedAt;
3132
+ return {
3133
+ ...record,
3134
+ startedAt,
3135
+ endedAt,
3136
+ createdAt: now,
3137
+ updatedAt: now
3138
+ };
3139
+ });
3140
+ return this.#db.batchInsert({
3141
+ tableName: TABLE_SPANS,
3142
+ records
3143
+ });
2368
3144
  } catch (error) {
2369
3145
  throw new MastraError(
2370
3146
  {
2371
- id: "LIBSQL_STORE_ALTER_TABLE_FAILED",
3147
+ id: createStorageErrorId("LIBSQL", "BATCH_CREATE_SPANS", "FAILED"),
2372
3148
  domain: ErrorDomain.STORAGE,
2373
- category: ErrorCategory.THIRD_PARTY,
2374
- details: {
2375
- tableName
2376
- }
3149
+ category: ErrorCategory.USER
2377
3150
  },
2378
3151
  error
2379
3152
  );
2380
3153
  }
2381
3154
  }
2382
- async clearTable({ tableName }) {
2383
- const parsedTableName = parseSqlIdentifier(tableName, "table name");
3155
+ async batchUpdateSpans(args) {
3156
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2384
3157
  try {
2385
- await this.client.execute(`DELETE FROM ${parsedTableName}`);
2386
- } catch (e) {
2387
- const mastraError = new MastraError(
3158
+ return this.#db.batchUpdate({
3159
+ tableName: TABLE_SPANS,
3160
+ updates: args.records.map((record) => {
3161
+ const data = { ...record.updates };
3162
+ if (data.endedAt instanceof Date) {
3163
+ data.endedAt = data.endedAt.toISOString();
3164
+ }
3165
+ if (data.startedAt instanceof Date) {
3166
+ data.startedAt = data.startedAt.toISOString();
3167
+ }
3168
+ data.updatedAt = now;
3169
+ return {
3170
+ keys: { spanId: record.spanId, traceId: record.traceId },
3171
+ data
3172
+ };
3173
+ })
3174
+ });
3175
+ } catch (error) {
3176
+ throw new MastraError(
2388
3177
  {
2389
- id: "LIBSQL_STORE_CLEAR_TABLE_FAILED",
3178
+ id: createStorageErrorId("LIBSQL", "BATCH_UPDATE_SPANS", "FAILED"),
2390
3179
  domain: ErrorDomain.STORAGE,
2391
- category: ErrorCategory.THIRD_PARTY,
2392
- details: {
2393
- tableName
2394
- }
3180
+ category: ErrorCategory.USER
2395
3181
  },
2396
- e
3182
+ error
2397
3183
  );
2398
- this.logger?.trackException?.(mastraError);
2399
- this.logger?.error?.(mastraError.toString());
2400
3184
  }
2401
3185
  }
2402
- async dropTable({ tableName }) {
2403
- const parsedTableName = parseSqlIdentifier(tableName, "table name");
3186
+ async batchDeleteTraces(args) {
2404
3187
  try {
2405
- await this.client.execute(`DROP TABLE IF EXISTS ${parsedTableName}`);
2406
- } catch (e) {
3188
+ const keys = args.traceIds.map((traceId) => ({ traceId }));
3189
+ return this.#db.batchDelete({
3190
+ tableName: TABLE_SPANS,
3191
+ keys
3192
+ });
3193
+ } catch (error) {
2407
3194
  throw new MastraError(
2408
3195
  {
2409
- id: "LIBSQL_STORE_DROP_TABLE_FAILED",
3196
+ id: createStorageErrorId("LIBSQL", "BATCH_DELETE_TRACES", "FAILED"),
2410
3197
  domain: ErrorDomain.STORAGE,
2411
- category: ErrorCategory.THIRD_PARTY,
2412
- details: {
2413
- tableName
2414
- }
3198
+ category: ErrorCategory.USER
2415
3199
  },
2416
- e
3200
+ error
2417
3201
  );
2418
3202
  }
2419
3203
  }
2420
3204
  };
2421
3205
  var ScoresLibSQL = class extends ScoresStorage {
2422
- operations;
2423
- client;
2424
- constructor({ client, operations }) {
3206
+ #db;
3207
+ #client;
3208
+ constructor(config) {
2425
3209
  super();
2426
- this.operations = operations;
2427
- this.client = client;
3210
+ const client = resolveClient(config);
3211
+ this.#client = client;
3212
+ this.#db = new LibSQLDB({ client, maxRetries: config.maxRetries, initialBackoffMs: config.initialBackoffMs });
3213
+ }
3214
+ async init() {
3215
+ await this.#db.createTable({ tableName: TABLE_SCORERS, schema: SCORERS_SCHEMA });
3216
+ await this.#db.alterTable({
3217
+ tableName: TABLE_SCORERS,
3218
+ schema: SCORERS_SCHEMA,
3219
+ ifNotExists: ["spanId", "requestContext"]
3220
+ });
3221
+ }
3222
+ async dangerouslyClearAll() {
3223
+ await this.#db.deleteData({ tableName: TABLE_SCORERS });
2428
3224
  }
2429
3225
  async listScoresByRunId({
2430
3226
  runId,
@@ -2432,7 +3228,7 @@ var ScoresLibSQL = class extends ScoresStorage {
2432
3228
  }) {
2433
3229
  try {
2434
3230
  const { page, perPage: perPageInput } = pagination;
2435
- const countResult = await this.client.execute({
3231
+ const countResult = await this.#client.execute({
2436
3232
  sql: `SELECT COUNT(*) as count FROM ${TABLE_SCORERS} WHERE runId = ?`,
2437
3233
  args: [runId]
2438
3234
  });
@@ -2452,7 +3248,7 @@ var ScoresLibSQL = class extends ScoresStorage {
2452
3248
  const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2453
3249
  const limitValue = perPageInput === false ? total : perPage;
2454
3250
  const end = perPageInput === false ? total : start + perPage;
2455
- const result = await this.client.execute({
3251
+ const result = await this.#client.execute({
2456
3252
  sql: `SELECT * FROM ${TABLE_SCORERS} WHERE runId = ? ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
2457
3253
  args: [runId, limitValue, start]
2458
3254
  });
@@ -2469,7 +3265,7 @@ var ScoresLibSQL = class extends ScoresStorage {
2469
3265
  } catch (error) {
2470
3266
  throw new MastraError(
2471
3267
  {
2472
- id: "LIBSQL_STORE_GET_SCORES_BY_RUN_ID_FAILED",
3268
+ id: createStorageErrorId("LIBSQL", "LIST_SCORES_BY_RUN_ID", "FAILED"),
2473
3269
  domain: ErrorDomain.STORAGE,
2474
3270
  category: ErrorCategory.THIRD_PARTY
2475
3271
  },
@@ -2505,7 +3301,7 @@ var ScoresLibSQL = class extends ScoresStorage {
2505
3301
  queryParams.push(source);
2506
3302
  }
2507
3303
  const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2508
- const countResult = await this.client.execute({
3304
+ const countResult = await this.#client.execute({
2509
3305
  sql: `SELECT COUNT(*) as count FROM ${TABLE_SCORERS} ${whereClause}`,
2510
3306
  args: queryParams
2511
3307
  });
@@ -2525,7 +3321,7 @@ var ScoresLibSQL = class extends ScoresStorage {
2525
3321
  const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2526
3322
  const limitValue = perPageInput === false ? total : perPage;
2527
3323
  const end = perPageInput === false ? total : start + perPage;
2528
- const result = await this.client.execute({
3324
+ const result = await this.#client.execute({
2529
3325
  sql: `SELECT * FROM ${TABLE_SCORERS} ${whereClause} ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
2530
3326
  args: [...queryParams, limitValue, start]
2531
3327
  });
@@ -2542,7 +3338,7 @@ var ScoresLibSQL = class extends ScoresStorage {
2542
3338
  } catch (error) {
2543
3339
  throw new MastraError(
2544
3340
  {
2545
- id: "LIBSQL_STORE_GET_SCORES_BY_SCORER_ID_FAILED",
3341
+ id: createStorageErrorId("LIBSQL", "LIST_SCORES_BY_SCORER_ID", "FAILED"),
2546
3342
  domain: ErrorDomain.STORAGE,
2547
3343
  category: ErrorCategory.THIRD_PARTY
2548
3344
  },
@@ -2550,48 +3346,17 @@ var ScoresLibSQL = class extends ScoresStorage {
2550
3346
  );
2551
3347
  }
2552
3348
  }
3349
+ /**
3350
+ * LibSQL-specific score row transformation.
3351
+ * Maps additionalLLMContext column to additionalContext field.
3352
+ */
2553
3353
  transformScoreRow(row) {
2554
- const scorerValue = safelyParseJSON(row.scorer);
2555
- const inputValue = safelyParseJSON(row.input ?? "{}");
2556
- const outputValue = safelyParseJSON(row.output ?? "{}");
2557
- const additionalLLMContextValue = row.additionalLLMContext ? safelyParseJSON(row.additionalLLMContext) : null;
2558
- const requestContextValue = row.requestContext ? safelyParseJSON(row.requestContext) : null;
2559
- const metadataValue = row.metadata ? safelyParseJSON(row.metadata) : null;
2560
- const entityValue = row.entity ? safelyParseJSON(row.entity) : null;
2561
- const preprocessStepResultValue = row.preprocessStepResult ? safelyParseJSON(row.preprocessStepResult) : null;
2562
- const analyzeStepResultValue = row.analyzeStepResult ? safelyParseJSON(row.analyzeStepResult) : null;
2563
- return {
2564
- id: row.id,
2565
- traceId: row.traceId,
2566
- spanId: row.spanId,
2567
- runId: row.runId,
2568
- scorer: scorerValue,
2569
- score: row.score,
2570
- reason: row.reason,
2571
- preprocessStepResult: preprocessStepResultValue,
2572
- analyzeStepResult: analyzeStepResultValue,
2573
- analyzePrompt: row.analyzePrompt,
2574
- preprocessPrompt: row.preprocessPrompt,
2575
- generateScorePrompt: row.generateScorePrompt,
2576
- generateReasonPrompt: row.generateReasonPrompt,
2577
- metadata: metadataValue,
2578
- input: inputValue,
2579
- output: outputValue,
2580
- additionalContext: additionalLLMContextValue,
2581
- requestContext: requestContextValue,
2582
- entityType: row.entityType,
2583
- entity: entityValue,
2584
- entityId: row.entityId,
2585
- scorerId: row.scorerId,
2586
- source: row.source,
2587
- resourceId: row.resourceId,
2588
- threadId: row.threadId,
2589
- createdAt: row.createdAt,
2590
- updatedAt: row.updatedAt
2591
- };
3354
+ return transformScoreRow(row, {
3355
+ fieldMappings: { additionalContext: "additionalLLMContext" }
3356
+ });
2592
3357
  }
2593
3358
  async getScoreById({ id }) {
2594
- const result = await this.client.execute({
3359
+ const result = await this.#client.execute({
2595
3360
  sql: `SELECT * FROM ${TABLE_SCORERS} WHERE id = ?`,
2596
3361
  args: [id]
2597
3362
  });
@@ -2604,15 +3369,15 @@ var ScoresLibSQL = class extends ScoresStorage {
2604
3369
  } catch (error) {
2605
3370
  throw new MastraError(
2606
3371
  {
2607
- id: "LIBSQL_STORE_SAVE_SCORE_FAILED_INVALID_SCORE_PAYLOAD",
3372
+ id: createStorageErrorId("LIBSQL", "SAVE_SCORE", "VALIDATION_FAILED"),
2608
3373
  domain: ErrorDomain.STORAGE,
2609
3374
  category: ErrorCategory.USER,
2610
3375
  details: {
2611
- scorer: score.scorer.id,
2612
- entityId: score.entityId,
2613
- entityType: score.entityType,
2614
- traceId: score.traceId || "",
2615
- spanId: score.spanId || ""
3376
+ scorer: typeof score.scorer?.id === "string" ? score.scorer.id : String(score.scorer?.id ?? "unknown"),
3377
+ entityId: score.entityId ?? "unknown",
3378
+ entityType: score.entityType ?? "unknown",
3379
+ traceId: score.traceId ?? "",
3380
+ spanId: score.spanId ?? ""
2616
3381
  }
2617
3382
  },
2618
3383
  error
@@ -2620,21 +3385,21 @@ var ScoresLibSQL = class extends ScoresStorage {
2620
3385
  }
2621
3386
  try {
2622
3387
  const id = crypto.randomUUID();
2623
- await this.operations.insert({
3388
+ const now = /* @__PURE__ */ new Date();
3389
+ await this.#db.insert({
2624
3390
  tableName: TABLE_SCORERS,
2625
3391
  record: {
3392
+ ...parsedScore,
2626
3393
  id,
2627
- createdAt: (/* @__PURE__ */ new Date()).toISOString(),
2628
- updatedAt: (/* @__PURE__ */ new Date()).toISOString(),
2629
- ...parsedScore
3394
+ createdAt: now.toISOString(),
3395
+ updatedAt: now.toISOString()
2630
3396
  }
2631
3397
  });
2632
- const scoreFromDb = await this.getScoreById({ id });
2633
- return { score: scoreFromDb };
3398
+ return { score: { ...parsedScore, id, createdAt: now, updatedAt: now } };
2634
3399
  } catch (error) {
2635
3400
  throw new MastraError(
2636
3401
  {
2637
- id: "LIBSQL_STORE_SAVE_SCORE_FAILED",
3402
+ id: createStorageErrorId("LIBSQL", "SAVE_SCORE", "FAILED"),
2638
3403
  domain: ErrorDomain.STORAGE,
2639
3404
  category: ErrorCategory.THIRD_PARTY
2640
3405
  },
@@ -2649,7 +3414,7 @@ var ScoresLibSQL = class extends ScoresStorage {
2649
3414
  }) {
2650
3415
  try {
2651
3416
  const { page, perPage: perPageInput } = pagination;
2652
- const countResult = await this.client.execute({
3417
+ const countResult = await this.#client.execute({
2653
3418
  sql: `SELECT COUNT(*) as count FROM ${TABLE_SCORERS} WHERE entityId = ? AND entityType = ?`,
2654
3419
  args: [entityId, entityType]
2655
3420
  });
@@ -2669,7 +3434,7 @@ var ScoresLibSQL = class extends ScoresStorage {
2669
3434
  const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2670
3435
  const limitValue = perPageInput === false ? total : perPage;
2671
3436
  const end = perPageInput === false ? total : start + perPage;
2672
- const result = await this.client.execute({
3437
+ const result = await this.#client.execute({
2673
3438
  sql: `SELECT * FROM ${TABLE_SCORERS} WHERE entityId = ? AND entityType = ? ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
2674
3439
  args: [entityId, entityType, limitValue, start]
2675
3440
  });
@@ -2686,7 +3451,7 @@ var ScoresLibSQL = class extends ScoresStorage {
2686
3451
  } catch (error) {
2687
3452
  throw new MastraError(
2688
3453
  {
2689
- id: "LIBSQL_STORE_GET_SCORES_BY_ENTITY_ID_FAILED",
3454
+ id: createStorageErrorId("LIBSQL", "LIST_SCORES_BY_ENTITY_ID", "FAILED"),
2690
3455
  domain: ErrorDomain.STORAGE,
2691
3456
  category: ErrorCategory.THIRD_PARTY
2692
3457
  },
@@ -2703,14 +3468,14 @@ var ScoresLibSQL = class extends ScoresStorage {
2703
3468
  const { page, perPage: perPageInput } = pagination;
2704
3469
  const perPage = normalizePerPage(perPageInput, 100);
2705
3470
  const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2706
- const countSQLResult = await this.client.execute({
3471
+ const countSQLResult = await this.#client.execute({
2707
3472
  sql: `SELECT COUNT(*) as count FROM ${TABLE_SCORERS} WHERE traceId = ? AND spanId = ?`,
2708
3473
  args: [traceId, spanId]
2709
3474
  });
2710
3475
  const total = Number(countSQLResult.rows?.[0]?.count ?? 0);
2711
3476
  const limitValue = perPageInput === false ? total : perPage;
2712
3477
  const end = perPageInput === false ? total : start + perPage;
2713
- const result = await this.client.execute({
3478
+ const result = await this.#client.execute({
2714
3479
  sql: `SELECT * FROM ${TABLE_SCORERS} WHERE traceId = ? AND spanId = ? ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
2715
3480
  args: [traceId, spanId, limitValue, start]
2716
3481
  });
@@ -2727,7 +3492,7 @@ var ScoresLibSQL = class extends ScoresStorage {
2727
3492
  } catch (error) {
2728
3493
  throw new MastraError(
2729
3494
  {
2730
- id: "LIBSQL_STORE_GET_SCORES_BY_SPAN_FAILED",
3495
+ id: createStorageErrorId("LIBSQL", "LIST_SCORES_BY_SPAN", "FAILED"),
2731
3496
  domain: ErrorDomain.STORAGE,
2732
3497
  category: ErrorCategory.THIRD_PARTY
2733
3498
  },
@@ -2755,37 +3520,49 @@ function parseWorkflowRun(row) {
2755
3520
  };
2756
3521
  }
2757
3522
  var WorkflowsLibSQL = class extends WorkflowsStorage {
2758
- operations;
2759
- client;
2760
- maxRetries;
2761
- initialBackoffMs;
2762
- constructor({
2763
- operations,
2764
- client,
2765
- maxRetries = 5,
2766
- initialBackoffMs = 500
2767
- }) {
3523
+ #db;
3524
+ #client;
3525
+ executeWithRetry;
3526
+ constructor(config) {
2768
3527
  super();
2769
- this.operations = operations;
2770
- this.client = client;
2771
- this.maxRetries = maxRetries;
2772
- this.initialBackoffMs = initialBackoffMs;
3528
+ const client = resolveClient(config);
3529
+ const maxRetries = config.maxRetries ?? 5;
3530
+ const initialBackoffMs = config.initialBackoffMs ?? 500;
3531
+ this.#client = client;
3532
+ this.#db = new LibSQLDB({ client, maxRetries, initialBackoffMs });
3533
+ this.executeWithRetry = createExecuteWriteOperationWithRetry({
3534
+ logger: this.logger,
3535
+ maxRetries,
3536
+ initialBackoffMs
3537
+ });
2773
3538
  this.setupPragmaSettings().catch(
2774
3539
  (err) => this.logger.warn("LibSQL Workflows: Failed to setup PRAGMA settings.", err)
2775
3540
  );
2776
3541
  }
3542
+ async init() {
3543
+ const schema = TABLE_SCHEMAS[TABLE_WORKFLOW_SNAPSHOT];
3544
+ await this.#db.createTable({ tableName: TABLE_WORKFLOW_SNAPSHOT, schema });
3545
+ await this.#db.alterTable({
3546
+ tableName: TABLE_WORKFLOW_SNAPSHOT,
3547
+ schema,
3548
+ ifNotExists: ["resourceId"]
3549
+ });
3550
+ }
3551
+ async dangerouslyClearAll() {
3552
+ await this.#db.deleteData({ tableName: TABLE_WORKFLOW_SNAPSHOT });
3553
+ }
2777
3554
  async setupPragmaSettings() {
2778
3555
  try {
2779
- await this.client.execute("PRAGMA busy_timeout = 10000;");
3556
+ await this.#client.execute("PRAGMA busy_timeout = 10000;");
2780
3557
  this.logger.debug("LibSQL Workflows: PRAGMA busy_timeout=10000 set.");
2781
3558
  try {
2782
- await this.client.execute("PRAGMA journal_mode = WAL;");
3559
+ await this.#client.execute("PRAGMA journal_mode = WAL;");
2783
3560
  this.logger.debug("LibSQL Workflows: PRAGMA journal_mode=WAL set.");
2784
3561
  } catch {
2785
3562
  this.logger.debug("LibSQL Workflows: WAL mode not supported, using default journal mode.");
2786
3563
  }
2787
3564
  try {
2788
- await this.client.execute("PRAGMA synchronous = NORMAL;");
3565
+ await this.#client.execute("PRAGMA synchronous = NORMAL;");
2789
3566
  this.logger.debug("LibSQL Workflows: PRAGMA synchronous=NORMAL set.");
2790
3567
  } catch {
2791
3568
  this.logger.debug("LibSQL Workflows: Failed to set synchronous mode.");
@@ -2794,44 +3571,6 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
2794
3571
  this.logger.warn("LibSQL Workflows: Failed to set PRAGMA settings.", err);
2795
3572
  }
2796
3573
  }
2797
- async executeWithRetry(operation) {
2798
- let attempts = 0;
2799
- let backoff = this.initialBackoffMs;
2800
- while (attempts < this.maxRetries) {
2801
- try {
2802
- return await operation();
2803
- } catch (error) {
2804
- this.logger.debug("LibSQL Workflows: Error caught in retry loop", {
2805
- errorType: error.constructor.name,
2806
- errorCode: error.code,
2807
- errorMessage: error.message,
2808
- attempts,
2809
- maxRetries: this.maxRetries
2810
- });
2811
- const isLockError = error.code === "SQLITE_BUSY" || error.code === "SQLITE_LOCKED" || error.message?.toLowerCase().includes("database is locked") || error.message?.toLowerCase().includes("database table is locked") || error.message?.toLowerCase().includes("table is locked") || error.constructor.name === "SqliteError" && error.message?.toLowerCase().includes("locked");
2812
- if (isLockError) {
2813
- attempts++;
2814
- if (attempts >= this.maxRetries) {
2815
- this.logger.error(
2816
- `LibSQL Workflows: Operation failed after ${this.maxRetries} attempts due to database lock: ${error.message}`,
2817
- { error, attempts, maxRetries: this.maxRetries }
2818
- );
2819
- throw error;
2820
- }
2821
- this.logger.warn(
2822
- `LibSQL Workflows: Attempt ${attempts} failed due to database lock. Retrying in ${backoff}ms...`,
2823
- { errorMessage: error.message, attempts, backoff, maxRetries: this.maxRetries }
2824
- );
2825
- await new Promise((resolve) => setTimeout(resolve, backoff));
2826
- backoff *= 2;
2827
- } else {
2828
- this.logger.error("LibSQL Workflows: Non-lock error occurred, not retrying", { error });
2829
- throw error;
2830
- }
2831
- }
2832
- }
2833
- throw new Error("LibSQL Workflows: Max retries reached, but no error was re-thrown from the loop.");
2834
- }
2835
3574
  async updateWorkflowResults({
2836
3575
  workflowName,
2837
3576
  runId,
@@ -2840,7 +3579,7 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
2840
3579
  requestContext
2841
3580
  }) {
2842
3581
  return this.executeWithRetry(async () => {
2843
- const tx = await this.client.transaction("write");
3582
+ const tx = await this.#client.transaction("write");
2844
3583
  try {
2845
3584
  const existingSnapshotResult = await tx.execute({
2846
3585
  sql: `SELECT snapshot FROM ${TABLE_WORKFLOW_SNAPSHOT} WHERE workflow_name = ? AND run_id = ?`,
@@ -2853,11 +3592,12 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
2853
3592
  activePaths: [],
2854
3593
  timestamp: Date.now(),
2855
3594
  suspendedPaths: {},
3595
+ activeStepsPath: {},
2856
3596
  resumeLabels: {},
2857
3597
  serializedStepGraph: [],
3598
+ status: "pending",
2858
3599
  value: {},
2859
3600
  waitingPaths: {},
2860
- status: "pending",
2861
3601
  runId,
2862
3602
  requestContext: {}
2863
3603
  };
@@ -2879,7 +3619,7 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
2879
3619
  }
2880
3620
  throw error;
2881
3621
  }
2882
- });
3622
+ }, "updateWorkflowResults");
2883
3623
  }
2884
3624
  async updateWorkflowState({
2885
3625
  workflowName,
@@ -2887,7 +3627,7 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
2887
3627
  opts
2888
3628
  }) {
2889
3629
  return this.executeWithRetry(async () => {
2890
- const tx = await this.client.transaction("write");
3630
+ const tx = await this.#client.transaction("write");
2891
3631
  try {
2892
3632
  const existingSnapshotResult = await tx.execute({
2893
3633
  sql: `SELECT snapshot FROM ${TABLE_WORKFLOW_SNAPSHOT} WHERE workflow_name = ? AND run_id = ?`,
@@ -2916,24 +3656,27 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
2916
3656
  }
2917
3657
  throw error;
2918
3658
  }
2919
- });
3659
+ }, "updateWorkflowState");
2920
3660
  }
2921
3661
  async persistWorkflowSnapshot({
2922
3662
  workflowName,
2923
3663
  runId,
2924
3664
  resourceId,
2925
- snapshot
3665
+ snapshot,
3666
+ createdAt,
3667
+ updatedAt
2926
3668
  }) {
3669
+ const now = /* @__PURE__ */ new Date();
2927
3670
  const data = {
2928
3671
  workflow_name: workflowName,
2929
3672
  run_id: runId,
2930
3673
  resourceId,
2931
3674
  snapshot,
2932
- createdAt: /* @__PURE__ */ new Date(),
2933
- updatedAt: /* @__PURE__ */ new Date()
3675
+ createdAt: createdAt ?? now,
3676
+ updatedAt: updatedAt ?? now
2934
3677
  };
2935
3678
  this.logger.debug("Persisting workflow snapshot", { workflowName, runId, data });
2936
- await this.operations.insert({
3679
+ await this.#db.insert({
2937
3680
  tableName: TABLE_WORKFLOW_SNAPSHOT,
2938
3681
  record: data
2939
3682
  });
@@ -2943,7 +3686,7 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
2943
3686
  runId
2944
3687
  }) {
2945
3688
  this.logger.debug("Loading workflow snapshot", { workflowName, runId });
2946
- const d = await this.operations.load({
3689
+ const d = await this.#db.select({
2947
3690
  tableName: TABLE_WORKFLOW_SNAPSHOT,
2948
3691
  keys: { workflow_name: workflowName, run_id: runId }
2949
3692
  });
@@ -2965,7 +3708,7 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
2965
3708
  }
2966
3709
  const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2967
3710
  try {
2968
- const result = await this.client.execute({
3711
+ const result = await this.#client.execute({
2969
3712
  sql: `SELECT * FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC LIMIT 1`,
2970
3713
  args
2971
3714
  });
@@ -2976,7 +3719,7 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
2976
3719
  } catch (error) {
2977
3720
  throw new MastraError(
2978
3721
  {
2979
- id: "LIBSQL_STORE_GET_WORKFLOW_RUN_BY_ID_FAILED",
3722
+ id: createStorageErrorId("LIBSQL", "GET_WORKFLOW_RUN_BY_ID", "FAILED"),
2980
3723
  domain: ErrorDomain.STORAGE,
2981
3724
  category: ErrorCategory.THIRD_PARTY
2982
3725
  },
@@ -2984,13 +3727,34 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
2984
3727
  );
2985
3728
  }
2986
3729
  }
3730
+ async deleteWorkflowRunById({ runId, workflowName }) {
3731
+ return this.executeWithRetry(async () => {
3732
+ try {
3733
+ await this.#client.execute({
3734
+ sql: `DELETE FROM ${TABLE_WORKFLOW_SNAPSHOT} WHERE workflow_name = ? AND run_id = ?`,
3735
+ args: [workflowName, runId]
3736
+ });
3737
+ } catch (error) {
3738
+ throw new MastraError(
3739
+ {
3740
+ id: createStorageErrorId("LIBSQL", "DELETE_WORKFLOW_RUN_BY_ID", "FAILED"),
3741
+ domain: ErrorDomain.STORAGE,
3742
+ category: ErrorCategory.THIRD_PARTY,
3743
+ details: { runId, workflowName }
3744
+ },
3745
+ error
3746
+ );
3747
+ }
3748
+ }, "deleteWorkflowRunById");
3749
+ }
2987
3750
  async listWorkflowRuns({
2988
3751
  workflowName,
2989
3752
  fromDate,
2990
3753
  toDate,
2991
3754
  page,
2992
3755
  perPage,
2993
- resourceId
3756
+ resourceId,
3757
+ status
2994
3758
  } = {}) {
2995
3759
  try {
2996
3760
  const conditions = [];
@@ -2999,6 +3763,10 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
2999
3763
  conditions.push("workflow_name = ?");
3000
3764
  args.push(workflowName);
3001
3765
  }
3766
+ if (status) {
3767
+ conditions.push("json_extract(snapshot, '$.status') = ?");
3768
+ args.push(status);
3769
+ }
3002
3770
  if (fromDate) {
3003
3771
  conditions.push("createdAt >= ?");
3004
3772
  args.push(fromDate.toISOString());
@@ -3008,7 +3776,7 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
3008
3776
  args.push(toDate.toISOString());
3009
3777
  }
3010
3778
  if (resourceId) {
3011
- const hasResourceId = await this.operations.hasColumn(TABLE_WORKFLOW_SNAPSHOT, "resourceId");
3779
+ const hasResourceId = await this.#db.hasColumn(TABLE_WORKFLOW_SNAPSHOT, "resourceId");
3012
3780
  if (hasResourceId) {
3013
3781
  conditions.push("resourceId = ?");
3014
3782
  args.push(resourceId);
@@ -3020,7 +3788,7 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
3020
3788
  let total = 0;
3021
3789
  const usePagination = typeof perPage === "number" && typeof page === "number";
3022
3790
  if (usePagination) {
3023
- const countResult = await this.client.execute({
3791
+ const countResult = await this.#client.execute({
3024
3792
  sql: `SELECT COUNT(*) as count FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
3025
3793
  args
3026
3794
  });
@@ -3028,7 +3796,7 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
3028
3796
  }
3029
3797
  const normalizedPerPage = usePagination ? normalizePerPage(perPage, Number.MAX_SAFE_INTEGER) : 0;
3030
3798
  const offset = usePagination ? page * normalizedPerPage : 0;
3031
- const result = await this.client.execute({
3799
+ const result = await this.#client.execute({
3032
3800
  sql: `SELECT * FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC${usePagination ? ` LIMIT ? OFFSET ?` : ""}`,
3033
3801
  args: usePagination ? [...args, normalizedPerPage, offset] : args
3034
3802
  });
@@ -3037,7 +3805,7 @@ var WorkflowsLibSQL = class extends WorkflowsStorage {
3037
3805
  } catch (error) {
3038
3806
  throw new MastraError(
3039
3807
  {
3040
- id: "LIBSQL_STORE_LIST_WORKFLOW_RUNS_FAILED",
3808
+ id: createStorageErrorId("LIBSQL", "LIST_WORKFLOW_RUNS", "FAILED"),
3041
3809
  domain: ErrorDomain.STORAGE,
3042
3810
  category: ErrorCategory.THIRD_PARTY
3043
3811
  },
@@ -3057,7 +3825,7 @@ var LibSQLStore = class extends MastraStorage {
3057
3825
  if (!config.id || typeof config.id !== "string" || config.id.trim() === "") {
3058
3826
  throw new Error("LibSQLStore: id must be provided and cannot be empty.");
3059
3827
  }
3060
- super({ id: config.id, name: `LibSQLStore` });
3828
+ super({ id: config.id, name: `LibSQLStore`, disableInit: config.disableInit });
3061
3829
  this.maxRetries = config.maxRetries ?? 5;
3062
3830
  this.initialBackoffMs = config.initialBackoffMs ?? 100;
3063
3831
  if ("url" in config) {
@@ -3075,214 +3843,24 @@ var LibSQLStore = class extends MastraStorage {
3075
3843
  } else {
3076
3844
  this.client = config.client;
3077
3845
  }
3078
- const operations = new StoreOperationsLibSQL({
3846
+ const domainConfig = {
3079
3847
  client: this.client,
3080
3848
  maxRetries: this.maxRetries,
3081
3849
  initialBackoffMs: this.initialBackoffMs
3082
- });
3083
- const scores = new ScoresLibSQL({ client: this.client, operations });
3084
- const workflows = new WorkflowsLibSQL({ client: this.client, operations });
3085
- const memory = new MemoryLibSQL({ client: this.client, operations });
3086
- const observability = new ObservabilityLibSQL({ operations });
3850
+ };
3851
+ const scores = new ScoresLibSQL(domainConfig);
3852
+ const workflows = new WorkflowsLibSQL(domainConfig);
3853
+ const memory = new MemoryLibSQL(domainConfig);
3854
+ const observability = new ObservabilityLibSQL(domainConfig);
3855
+ const agents = new AgentsLibSQL(domainConfig);
3087
3856
  this.stores = {
3088
- operations,
3089
3857
  scores,
3090
3858
  workflows,
3091
3859
  memory,
3092
- observability
3093
- };
3094
- }
3095
- get supports() {
3096
- return {
3097
- selectByIncludeResourceScope: true,
3098
- resourceWorkingMemory: true,
3099
- hasColumn: true,
3100
- createTable: true,
3101
- deleteMessages: true,
3102
- observabilityInstance: true,
3103
- listScoresBySpan: true
3860
+ observability,
3861
+ agents
3104
3862
  };
3105
3863
  }
3106
- async createTable({
3107
- tableName,
3108
- schema
3109
- }) {
3110
- await this.stores.operations.createTable({ tableName, schema });
3111
- }
3112
- /**
3113
- * Alters table schema to add columns if they don't exist
3114
- * @param tableName Name of the table
3115
- * @param schema Schema of the table
3116
- * @param ifNotExists Array of column names to add if they don't exist
3117
- */
3118
- async alterTable({
3119
- tableName,
3120
- schema,
3121
- ifNotExists
3122
- }) {
3123
- await this.stores.operations.alterTable({ tableName, schema, ifNotExists });
3124
- }
3125
- async clearTable({ tableName }) {
3126
- await this.stores.operations.clearTable({ tableName });
3127
- }
3128
- async dropTable({ tableName }) {
3129
- await this.stores.operations.dropTable({ tableName });
3130
- }
3131
- insert(args) {
3132
- return this.stores.operations.insert(args);
3133
- }
3134
- batchInsert(args) {
3135
- return this.stores.operations.batchInsert(args);
3136
- }
3137
- async load({ tableName, keys }) {
3138
- return this.stores.operations.load({ tableName, keys });
3139
- }
3140
- async getThreadById({ threadId }) {
3141
- return this.stores.memory.getThreadById({ threadId });
3142
- }
3143
- async saveThread({ thread }) {
3144
- return this.stores.memory.saveThread({ thread });
3145
- }
3146
- async updateThread({
3147
- id,
3148
- title,
3149
- metadata
3150
- }) {
3151
- return this.stores.memory.updateThread({ id, title, metadata });
3152
- }
3153
- async deleteThread({ threadId }) {
3154
- return this.stores.memory.deleteThread({ threadId });
3155
- }
3156
- async listMessagesById({ messageIds }) {
3157
- return this.stores.memory.listMessagesById({ messageIds });
3158
- }
3159
- async saveMessages(args) {
3160
- const result = await this.stores.memory.saveMessages({ messages: args.messages });
3161
- return { messages: result.messages };
3162
- }
3163
- async updateMessages({
3164
- messages
3165
- }) {
3166
- return this.stores.memory.updateMessages({ messages });
3167
- }
3168
- async deleteMessages(messageIds) {
3169
- return this.stores.memory.deleteMessages(messageIds);
3170
- }
3171
- async getScoreById({ id }) {
3172
- return this.stores.scores.getScoreById({ id });
3173
- }
3174
- async saveScore(score) {
3175
- return this.stores.scores.saveScore(score);
3176
- }
3177
- async listScoresByScorerId({
3178
- scorerId,
3179
- entityId,
3180
- entityType,
3181
- source,
3182
- pagination
3183
- }) {
3184
- return this.stores.scores.listScoresByScorerId({ scorerId, entityId, entityType, source, pagination });
3185
- }
3186
- async listScoresByRunId({
3187
- runId,
3188
- pagination
3189
- }) {
3190
- return this.stores.scores.listScoresByRunId({ runId, pagination });
3191
- }
3192
- async listScoresByEntityId({
3193
- entityId,
3194
- entityType,
3195
- pagination
3196
- }) {
3197
- return this.stores.scores.listScoresByEntityId({ entityId, entityType, pagination });
3198
- }
3199
- /**
3200
- * WORKFLOWS
3201
- */
3202
- async updateWorkflowResults({
3203
- workflowName,
3204
- runId,
3205
- stepId,
3206
- result,
3207
- requestContext
3208
- }) {
3209
- return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
3210
- }
3211
- async updateWorkflowState({
3212
- workflowName,
3213
- runId,
3214
- opts
3215
- }) {
3216
- return this.stores.workflows.updateWorkflowState({ workflowName, runId, opts });
3217
- }
3218
- async persistWorkflowSnapshot({
3219
- workflowName,
3220
- runId,
3221
- resourceId,
3222
- snapshot
3223
- }) {
3224
- return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, resourceId, snapshot });
3225
- }
3226
- async loadWorkflowSnapshot({
3227
- workflowName,
3228
- runId
3229
- }) {
3230
- return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
3231
- }
3232
- async listWorkflowRuns({
3233
- workflowName,
3234
- fromDate,
3235
- toDate,
3236
- perPage,
3237
- page,
3238
- resourceId
3239
- } = {}) {
3240
- return this.stores.workflows.listWorkflowRuns({ workflowName, fromDate, toDate, perPage, page, resourceId });
3241
- }
3242
- async getWorkflowRunById({
3243
- runId,
3244
- workflowName
3245
- }) {
3246
- return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
3247
- }
3248
- async getResourceById({ resourceId }) {
3249
- return this.stores.memory.getResourceById({ resourceId });
3250
- }
3251
- async saveResource({ resource }) {
3252
- return this.stores.memory.saveResource({ resource });
3253
- }
3254
- async updateResource({
3255
- resourceId,
3256
- workingMemory,
3257
- metadata
3258
- }) {
3259
- return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
3260
- }
3261
- async createSpan(span) {
3262
- return this.stores.observability.createSpan(span);
3263
- }
3264
- async updateSpan(params) {
3265
- return this.stores.observability.updateSpan(params);
3266
- }
3267
- async getTrace(traceId) {
3268
- return this.stores.observability.getTrace(traceId);
3269
- }
3270
- async getTracesPaginated(args) {
3271
- return this.stores.observability.getTracesPaginated(args);
3272
- }
3273
- async listScoresBySpan({
3274
- traceId,
3275
- spanId,
3276
- pagination
3277
- }) {
3278
- return this.stores.scores.listScoresBySpan({ traceId, spanId, pagination });
3279
- }
3280
- async batchCreateSpans(args) {
3281
- return this.stores.observability.batchCreateSpans(args);
3282
- }
3283
- async batchUpdateSpans(args) {
3284
- return this.stores.observability.batchUpdateSpans(args);
3285
- }
3286
3864
  };
3287
3865
 
3288
3866
  // src/vector/prompt.ts
@@ -3384,6 +3962,6 @@ Example Complex Query:
3384
3962
  ]
3385
3963
  }`;
3386
3964
 
3387
- export { LibSQLStore as DefaultStorage, LIBSQL_PROMPT, LibSQLStore, LibSQLVector };
3965
+ export { AgentsLibSQL, LibSQLStore as DefaultStorage, LIBSQL_PROMPT, LibSQLStore, LibSQLVector, MemoryLibSQL, ObservabilityLibSQL, ScoresLibSQL, WorkflowsLibSQL };
3388
3966
  //# sourceMappingURL=index.js.map
3389
3967
  //# sourceMappingURL=index.js.map