@mastra/mssql 0.0.0-new-scorer-api-20250801075530 → 0.0.0-new-button-export-20251219133013

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -1,378 +1,206 @@
1
1
  'use strict';
2
2
 
3
- var agent = require('@mastra/core/agent');
4
3
  var error = require('@mastra/core/error');
5
4
  var storage = require('@mastra/core/storage');
6
- var utils = require('@mastra/core/utils');
7
5
  var sql = require('mssql');
6
+ var base = require('@mastra/core/base');
7
+ var utils = require('@mastra/core/utils');
8
+ var agent = require('@mastra/core/agent');
9
+ var crypto = require('crypto');
10
+ var evals = require('@mastra/core/evals');
8
11
 
9
12
  function _interopDefault (e) { return e && e.__esModule ? e : { default: e }; }
10
13
 
11
14
  var sql__default = /*#__PURE__*/_interopDefault(sql);
12
15
 
13
16
  // src/storage/index.ts
14
- var MSSQLStore = class extends storage.MastraStorage {
17
+ function getSchemaName(schema) {
18
+ return schema ? `[${utils.parseSqlIdentifier(schema, "schema name")}]` : void 0;
19
+ }
20
+ function getTableName({ indexName, schemaName }) {
21
+ const parsedIndexName = utils.parseSqlIdentifier(indexName, "index name");
22
+ const quotedIndexName = `[${parsedIndexName}]`;
23
+ const quotedSchemaName = schemaName;
24
+ return quotedSchemaName ? `${quotedSchemaName}.${quotedIndexName}` : quotedIndexName;
25
+ }
26
+
27
+ // src/storage/db/index.ts
28
+ function resolveMssqlConfig(config) {
29
+ if ("pool" in config && "db" in config) {
30
+ return { pool: config.pool, db: config.db, schema: config.schema, needsConnect: false };
31
+ }
32
+ const pool = new sql__default.default.ConnectionPool({
33
+ server: config.server,
34
+ database: config.database,
35
+ user: config.user,
36
+ password: config.password,
37
+ port: config.port,
38
+ options: config.options || { encrypt: true, trustServerCertificate: true }
39
+ });
40
+ const db = new MssqlDB({ pool, schemaName: config.schemaName });
41
+ return { pool, db, schema: config.schemaName, needsConnect: true };
42
+ }
43
+ var MssqlDB = class extends base.MastraBase {
15
44
  pool;
16
- schema;
45
+ schemaName;
17
46
  setupSchemaPromise = null;
18
47
  schemaSetupComplete = void 0;
19
- isConnected = null;
20
- constructor(config) {
21
- super({ name: "MSSQLStore" });
22
- try {
23
- if ("connectionString" in config) {
24
- if (!config.connectionString || typeof config.connectionString !== "string" || config.connectionString.trim() === "") {
25
- throw new Error("MSSQLStore: connectionString must be provided and cannot be empty.");
26
- }
27
- } else {
28
- const required = ["server", "database", "user", "password"];
29
- for (const key of required) {
30
- if (!(key in config) || typeof config[key] !== "string" || config[key].trim() === "") {
31
- throw new Error(`MSSQLStore: ${key} must be provided and cannot be empty.`);
32
- }
48
+ getSqlType(type, isPrimaryKey = false, useLargeStorage = false) {
49
+ switch (type) {
50
+ case "text":
51
+ if (useLargeStorage) {
52
+ return "NVARCHAR(MAX)";
33
53
  }
34
- }
35
- this.schema = config.schemaName;
36
- this.pool = "connectionString" in config ? new sql__default.default.ConnectionPool(config.connectionString) : new sql__default.default.ConnectionPool({
37
- server: config.server,
38
- database: config.database,
39
- user: config.user,
40
- password: config.password,
41
- port: config.port,
42
- options: config.options || { encrypt: true, trustServerCertificate: true }
43
- });
44
- } catch (e) {
45
- throw new error.MastraError(
46
- {
47
- id: "MASTRA_STORAGE_MSSQL_STORE_INITIALIZATION_FAILED",
48
- domain: error.ErrorDomain.STORAGE,
49
- category: error.ErrorCategory.USER
50
- },
51
- e
52
- );
53
- }
54
- }
55
- async init() {
56
- if (this.isConnected === null) {
57
- this.isConnected = this._performInitializationAndStore();
58
- }
59
- try {
60
- await this.isConnected;
61
- await super.init();
62
- } catch (error$1) {
63
- this.isConnected = null;
64
- throw new error.MastraError(
65
- {
66
- id: "MASTRA_STORAGE_MSSQL_STORE_INIT_FAILED",
54
+ return isPrimaryKey ? "NVARCHAR(255)" : "NVARCHAR(400)";
55
+ case "timestamp":
56
+ return "DATETIME2(7)";
57
+ case "uuid":
58
+ return "UNIQUEIDENTIFIER";
59
+ case "jsonb":
60
+ return "NVARCHAR(MAX)";
61
+ case "integer":
62
+ return "INT";
63
+ case "bigint":
64
+ return "BIGINT";
65
+ case "float":
66
+ return "FLOAT";
67
+ case "boolean":
68
+ return "BIT";
69
+ default:
70
+ throw new error.MastraError({
71
+ id: storage.createStorageErrorId("MSSQL", "TYPE", "NOT_SUPPORTED"),
67
72
  domain: error.ErrorDomain.STORAGE,
68
73
  category: error.ErrorCategory.THIRD_PARTY
69
- },
70
- error$1
71
- );
72
- }
73
- }
74
- async _performInitializationAndStore() {
75
- try {
76
- await this.pool.connect();
77
- return true;
78
- } catch (err) {
79
- throw err;
74
+ });
80
75
  }
81
76
  }
82
- get supports() {
83
- return {
84
- selectByIncludeResourceScope: true,
85
- resourceWorkingMemory: true,
86
- hasColumn: true,
87
- createTable: true,
88
- deleteMessages: false
89
- };
90
- }
91
- getTableName(indexName) {
92
- const parsedIndexName = utils.parseSqlIdentifier(indexName, "index name");
93
- const quotedIndexName = `[${parsedIndexName}]`;
94
- const quotedSchemaName = this.getSchemaName();
95
- return quotedSchemaName ? `${quotedSchemaName}.${quotedIndexName}` : quotedIndexName;
96
- }
97
- getSchemaName() {
98
- return this.schema ? `[${utils.parseSqlIdentifier(this.schema, "schema name")}]` : void 0;
77
+ constructor({ pool, schemaName }) {
78
+ super({ component: "STORAGE", name: "MssqlDB" });
79
+ this.pool = pool;
80
+ this.schemaName = schemaName;
99
81
  }
100
- transformEvalRow(row) {
101
- let testInfoValue = null, resultValue = null;
102
- if (row.test_info) {
103
- try {
104
- testInfoValue = typeof row.test_info === "string" ? JSON.parse(row.test_info) : row.test_info;
105
- } catch {
106
- }
107
- }
108
- if (row.test_info) {
109
- try {
110
- resultValue = typeof row.result === "string" ? JSON.parse(row.result) : row.result;
111
- } catch {
112
- }
113
- }
114
- return {
115
- agentName: row.agent_name,
116
- input: row.input,
117
- output: row.output,
118
- result: resultValue,
119
- metricName: row.metric_name,
120
- instructions: row.instructions,
121
- testInfo: testInfoValue,
122
- globalRunId: row.global_run_id,
123
- runId: row.run_id,
124
- createdAt: row.created_at
125
- };
82
+ async hasColumn(table, column) {
83
+ const schema = this.schemaName || "dbo";
84
+ const request = this.pool.request();
85
+ request.input("schema", schema);
86
+ request.input("table", table);
87
+ request.input("column", column);
88
+ request.input("columnLower", column.toLowerCase());
89
+ const result = await request.query(
90
+ `SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @table AND (COLUMN_NAME = @column OR COLUMN_NAME = @columnLower)`
91
+ );
92
+ return result.recordset.length > 0;
126
93
  }
127
- /** @deprecated use getEvals instead */
128
- async getEvalsByAgentName(agentName, type) {
129
- try {
130
- let query = `SELECT * FROM ${this.getTableName(storage.TABLE_EVALS)} WHERE agent_name = @p1`;
131
- if (type === "test") {
132
- query += " AND test_info IS NOT NULL AND JSON_VALUE(test_info, '$.testPath') IS NOT NULL";
133
- } else if (type === "live") {
134
- query += " AND (test_info IS NULL OR JSON_VALUE(test_info, '$.testPath') IS NULL)";
135
- }
136
- query += " ORDER BY created_at DESC";
137
- const request = this.pool.request();
138
- request.input("p1", agentName);
139
- const result = await request.query(query);
140
- const rows = result.recordset;
141
- return typeof this.transformEvalRow === "function" ? rows?.map((row) => this.transformEvalRow(row)) ?? [] : rows ?? [];
142
- } catch (error) {
143
- if (error && error.number === 208 && error.message && error.message.includes("Invalid object name")) {
144
- return [];
145
- }
146
- console.error("Failed to get evals for the specified agent: " + error?.message);
147
- throw error;
94
+ async setupSchema() {
95
+ if (!this.schemaName || this.schemaSetupComplete) {
96
+ return;
148
97
  }
149
- }
150
- async batchInsert({ tableName, records }) {
151
- const transaction = this.pool.transaction();
152
- try {
153
- await transaction.begin();
154
- for (const record of records) {
155
- await this.insert({ tableName, record });
156
- }
157
- await transaction.commit();
158
- } catch (error$1) {
159
- await transaction.rollback();
160
- throw new error.MastraError(
161
- {
162
- id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_INSERT_FAILED",
163
- domain: error.ErrorDomain.STORAGE,
164
- category: error.ErrorCategory.THIRD_PARTY,
165
- details: {
166
- tableName,
167
- numberOfRecords: records.length
98
+ if (!this.setupSchemaPromise) {
99
+ this.setupSchemaPromise = (async () => {
100
+ try {
101
+ const checkRequest = this.pool.request();
102
+ checkRequest.input("schemaName", this.schemaName);
103
+ const checkResult = await checkRequest.query(`
104
+ SELECT 1 AS found FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = @schemaName
105
+ `);
106
+ const schemaExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
107
+ if (!schemaExists) {
108
+ try {
109
+ await this.pool.request().query(`CREATE SCHEMA [${this.schemaName}]`);
110
+ this.logger?.info?.(`Schema "${this.schemaName}" created successfully`);
111
+ } catch (error) {
112
+ this.logger?.error?.(`Failed to create schema "${this.schemaName}"`, { error });
113
+ throw new Error(
114
+ `Unable to create schema "${this.schemaName}". This requires CREATE privilege on the database. Either create the schema manually or grant CREATE privilege to the user.`
115
+ );
116
+ }
168
117
  }
169
- },
170
- error$1
171
- );
172
- }
173
- }
174
- /** @deprecated use getTracesPaginated instead*/
175
- async getTraces(args) {
176
- if (args.fromDate || args.toDate) {
177
- args.dateRange = {
178
- start: args.fromDate,
179
- end: args.toDate
180
- };
118
+ this.schemaSetupComplete = true;
119
+ this.logger?.debug?.(`Schema "${this.schemaName}" is ready for use`);
120
+ } catch (error) {
121
+ this.schemaSetupComplete = void 0;
122
+ this.setupSchemaPromise = null;
123
+ throw error;
124
+ } finally {
125
+ this.setupSchemaPromise = null;
126
+ }
127
+ })();
181
128
  }
182
- const result = await this.getTracesPaginated(args);
183
- return result.traces;
129
+ await this.setupSchemaPromise;
184
130
  }
185
- async getTracesPaginated(args) {
186
- const { name, scope, page = 0, perPage: perPageInput, attributes, filters, dateRange } = args;
187
- const fromDate = dateRange?.start;
188
- const toDate = dateRange?.end;
189
- const perPage = perPageInput !== void 0 ? perPageInput : 100;
190
- const currentOffset = page * perPage;
191
- const paramMap = {};
192
- const conditions = [];
193
- let paramIndex = 1;
194
- if (name) {
195
- const paramName = `p${paramIndex++}`;
196
- conditions.push(`[name] LIKE @${paramName}`);
197
- paramMap[paramName] = `${name}%`;
198
- }
199
- if (scope) {
200
- const paramName = `p${paramIndex++}`;
201
- conditions.push(`[scope] = @${paramName}`);
202
- paramMap[paramName] = scope;
203
- }
204
- if (attributes) {
205
- Object.entries(attributes).forEach(([key, value]) => {
206
- const parsedKey = utils.parseFieldKey(key);
207
- const paramName = `p${paramIndex++}`;
208
- conditions.push(`JSON_VALUE([attributes], '$.${parsedKey}') = @${paramName}`);
209
- paramMap[paramName] = value;
210
- });
211
- }
212
- if (filters) {
213
- Object.entries(filters).forEach(([key, value]) => {
214
- const parsedKey = utils.parseFieldKey(key);
215
- const paramName = `p${paramIndex++}`;
216
- conditions.push(`[${parsedKey}] = @${paramName}`);
217
- paramMap[paramName] = value;
218
- });
219
- }
220
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
221
- const paramName = `p${paramIndex++}`;
222
- conditions.push(`[createdAt] >= @${paramName}`);
223
- paramMap[paramName] = fromDate.toISOString();
224
- }
225
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
226
- const paramName = `p${paramIndex++}`;
227
- conditions.push(`[createdAt] <= @${paramName}`);
228
- paramMap[paramName] = toDate.toISOString();
229
- }
230
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
231
- const countQuery = `SELECT COUNT(*) as total FROM ${this.getTableName(storage.TABLE_TRACES)} ${whereClause}`;
232
- let total = 0;
131
+ async insert({
132
+ tableName,
133
+ record,
134
+ transaction
135
+ }) {
233
136
  try {
234
- const countRequest = this.pool.request();
235
- Object.entries(paramMap).forEach(([key, value]) => {
236
- if (value instanceof Date) {
237
- countRequest.input(key, sql__default.default.DateTime, value);
137
+ const columns = Object.keys(record);
138
+ const parsedColumns = columns.map((col) => utils.parseSqlIdentifier(col, "column name"));
139
+ const paramNames = columns.map((_, i) => `@param${i}`);
140
+ const insertSql = `INSERT INTO ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} (${parsedColumns.map((c) => `[${c}]`).join(", ")}) VALUES (${paramNames.join(", ")})`;
141
+ const request = transaction ? transaction.request() : this.pool.request();
142
+ columns.forEach((col, i) => {
143
+ const value = record[col];
144
+ const preparedValue = this.prepareValue(value, col, tableName);
145
+ if (preparedValue instanceof Date) {
146
+ request.input(`param${i}`, sql__default.default.DateTime2, preparedValue);
147
+ } else if (preparedValue === null || preparedValue === void 0) {
148
+ request.input(`param${i}`, this.getMssqlType(tableName, col), null);
238
149
  } else {
239
- countRequest.input(key, value);
150
+ request.input(`param${i}`, preparedValue);
240
151
  }
241
152
  });
242
- const countResult = await countRequest.query(countQuery);
243
- total = parseInt(countResult.recordset[0].total, 10);
153
+ await request.query(insertSql);
244
154
  } catch (error$1) {
245
155
  throw new error.MastraError(
246
156
  {
247
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_TRACES_PAGINATED_FAILED_TO_RETRIEVE_TOTAL_COUNT",
157
+ id: storage.createStorageErrorId("MSSQL", "INSERT", "FAILED"),
248
158
  domain: error.ErrorDomain.STORAGE,
249
159
  category: error.ErrorCategory.THIRD_PARTY,
250
160
  details: {
251
- name: args.name ?? "",
252
- scope: args.scope ?? ""
161
+ tableName
253
162
  }
254
163
  },
255
164
  error$1
256
165
  );
257
166
  }
258
- if (total === 0) {
259
- return {
260
- traces: [],
261
- total: 0,
262
- page,
263
- perPage,
264
- hasMore: false
265
- };
266
- }
267
- const dataQuery = `SELECT * FROM ${this.getTableName(storage.TABLE_TRACES)} ${whereClause} ORDER BY [seq_id] DESC OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
268
- const dataRequest = this.pool.request();
269
- Object.entries(paramMap).forEach(([key, value]) => {
270
- if (value instanceof Date) {
271
- dataRequest.input(key, sql__default.default.DateTime, value);
272
- } else {
273
- dataRequest.input(key, value);
274
- }
275
- });
276
- dataRequest.input("offset", currentOffset);
277
- dataRequest.input("limit", perPage);
167
+ }
168
+ async clearTable({ tableName }) {
169
+ const fullTableName = getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) });
278
170
  try {
279
- const rowsResult = await dataRequest.query(dataQuery);
280
- const rows = rowsResult.recordset;
281
- const traces = rows.map((row) => ({
282
- id: row.id,
283
- parentSpanId: row.parentSpanId,
284
- traceId: row.traceId,
285
- name: row.name,
286
- scope: row.scope,
287
- kind: row.kind,
288
- status: JSON.parse(row.status),
289
- events: JSON.parse(row.events),
290
- links: JSON.parse(row.links),
291
- attributes: JSON.parse(row.attributes),
292
- startTime: row.startTime,
293
- endTime: row.endTime,
294
- other: row.other,
295
- createdAt: row.createdAt
296
- }));
297
- return {
298
- traces,
299
- total,
300
- page,
301
- perPage,
302
- hasMore: currentOffset + traces.length < total
303
- };
171
+ try {
172
+ await this.pool.request().query(`TRUNCATE TABLE ${fullTableName}`);
173
+ } catch (truncateError) {
174
+ if (truncateError?.number === 4712) {
175
+ await this.pool.request().query(`DELETE FROM ${fullTableName}`);
176
+ } else {
177
+ throw truncateError;
178
+ }
179
+ }
304
180
  } catch (error$1) {
305
181
  throw new error.MastraError(
306
182
  {
307
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_TRACES_PAGINATED_FAILED_TO_RETRIEVE_TRACES",
183
+ id: storage.createStorageErrorId("MSSQL", "CLEAR_TABLE", "FAILED"),
308
184
  domain: error.ErrorDomain.STORAGE,
309
185
  category: error.ErrorCategory.THIRD_PARTY,
310
186
  details: {
311
- name: args.name ?? "",
312
- scope: args.scope ?? ""
187
+ tableName
313
188
  }
314
189
  },
315
190
  error$1
316
191
  );
317
192
  }
318
193
  }
319
- async setupSchema() {
320
- if (!this.schema || this.schemaSetupComplete) {
321
- return;
322
- }
323
- if (!this.setupSchemaPromise) {
324
- this.setupSchemaPromise = (async () => {
325
- try {
326
- const checkRequest = this.pool.request();
327
- checkRequest.input("schemaName", this.schema);
328
- const checkResult = await checkRequest.query(`
329
- SELECT 1 AS found FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = @schemaName
330
- `);
331
- const schemaExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
332
- if (!schemaExists) {
333
- try {
334
- await this.pool.request().query(`CREATE SCHEMA [${this.schema}]`);
335
- this.logger?.info?.(`Schema "${this.schema}" created successfully`);
336
- } catch (error) {
337
- this.logger?.error?.(`Failed to create schema "${this.schema}"`, { error });
338
- throw new Error(
339
- `Unable to create schema "${this.schema}". This requires CREATE privilege on the database. Either create the schema manually or grant CREATE privilege to the user.`
340
- );
341
- }
342
- }
343
- this.schemaSetupComplete = true;
344
- this.logger?.debug?.(`Schema "${this.schema}" is ready for use`);
345
- } catch (error) {
346
- this.schemaSetupComplete = void 0;
347
- this.setupSchemaPromise = null;
348
- throw error;
349
- } finally {
350
- this.setupSchemaPromise = null;
351
- }
352
- })();
353
- }
354
- await this.setupSchemaPromise;
355
- }
356
- getSqlType(type, isPrimaryKey = false) {
194
+ getDefaultValue(type) {
357
195
  switch (type) {
358
- case "text":
359
- return isPrimaryKey ? "NVARCHAR(255)" : "NVARCHAR(MAX)";
360
196
  case "timestamp":
361
- return "DATETIME2(7)";
362
- case "uuid":
363
- return "UNIQUEIDENTIFIER";
197
+ return "DEFAULT SYSUTCDATETIME()";
364
198
  case "jsonb":
365
- return "NVARCHAR(MAX)";
366
- case "integer":
367
- return "INT";
368
- case "bigint":
369
- return "BIGINT";
199
+ return "DEFAULT N'{}'";
200
+ case "boolean":
201
+ return "DEFAULT 0";
370
202
  default:
371
- throw new error.MastraError({
372
- id: "MASTRA_STORAGE_MSSQL_STORE_TYPE_NOT_SUPPORTED",
373
- domain: error.ErrorDomain.STORAGE,
374
- category: error.ErrorCategory.THIRD_PARTY
375
- });
203
+ return storage.getDefaultValue(type);
376
204
  }
377
205
  }
378
206
  async createTable({
@@ -381,25 +209,44 @@ var MSSQLStore = class extends storage.MastraStorage {
381
209
  }) {
382
210
  try {
383
211
  const uniqueConstraintColumns = tableName === storage.TABLE_WORKFLOW_SNAPSHOT ? ["workflow_name", "run_id"] : [];
212
+ const largeDataColumns = [
213
+ "workingMemory",
214
+ "snapshot",
215
+ "metadata",
216
+ "content",
217
+ // messages.content - can be very long conversation content
218
+ "input",
219
+ // evals.input - test input data
220
+ "output",
221
+ // evals.output - test output data
222
+ "instructions",
223
+ // evals.instructions - evaluation instructions
224
+ "other"
225
+ // traces.other - additional trace data
226
+ ];
384
227
  const columns = Object.entries(schema).map(([name, def]) => {
385
228
  const parsedName = utils.parseSqlIdentifier(name, "column name");
386
229
  const constraints = [];
387
230
  if (def.primaryKey) constraints.push("PRIMARY KEY");
388
231
  if (!def.nullable) constraints.push("NOT NULL");
389
232
  const isIndexed = !!def.primaryKey || uniqueConstraintColumns.includes(name);
390
- return `[${parsedName}] ${this.getSqlType(def.type, isIndexed)} ${constraints.join(" ")}`.trim();
233
+ const useLargeStorage = largeDataColumns.includes(name);
234
+ return `[${parsedName}] ${this.getSqlType(def.type, isIndexed, useLargeStorage)} ${constraints.join(" ")}`.trim();
391
235
  }).join(",\n");
392
- if (this.schema) {
236
+ if (this.schemaName) {
393
237
  await this.setupSchema();
394
238
  }
395
239
  const checkTableRequest = this.pool.request();
396
- checkTableRequest.input("tableName", this.getTableName(tableName).replace(/[[\]]/g, "").split(".").pop());
240
+ checkTableRequest.input(
241
+ "tableName",
242
+ getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) }).replace(/[[\]]/g, "").split(".").pop()
243
+ );
397
244
  const checkTableSql = `SELECT 1 AS found FROM INFORMATION_SCHEMA.TABLES WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @tableName`;
398
- checkTableRequest.input("schema", this.schema || "dbo");
245
+ checkTableRequest.input("schema", this.schemaName || "dbo");
399
246
  const checkTableResult = await checkTableRequest.query(checkTableSql);
400
247
  const tableExists = Array.isArray(checkTableResult.recordset) && checkTableResult.recordset.length > 0;
401
248
  if (!tableExists) {
402
- const createSql = `CREATE TABLE ${this.getTableName(tableName)} (
249
+ const createSql = `CREATE TABLE ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} (
403
250
  ${columns}
404
251
  )`;
405
252
  await this.pool.request().query(createSql);
@@ -410,12 +257,15 @@ ${columns}
410
257
  WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @tableName AND COLUMN_NAME = 'seq_id'
411
258
  `;
412
259
  const checkColumnRequest = this.pool.request();
413
- checkColumnRequest.input("schema", this.schema || "dbo");
414
- checkColumnRequest.input("tableName", this.getTableName(tableName).replace(/[[\]]/g, "").split(".").pop());
260
+ checkColumnRequest.input("schema", this.schemaName || "dbo");
261
+ checkColumnRequest.input(
262
+ "tableName",
263
+ getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) }).replace(/[[\]]/g, "").split(".").pop()
264
+ );
415
265
  const columnResult = await checkColumnRequest.query(columnCheckSql);
416
266
  const columnExists = Array.isArray(columnResult.recordset) && columnResult.recordset.length > 0;
417
267
  if (!columnExists) {
418
- const alterSql = `ALTER TABLE ${this.getTableName(tableName)} ADD seq_id BIGINT IDENTITY(1,1)`;
268
+ const alterSql = `ALTER TABLE ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} ADD seq_id BIGINT IDENTITY(1,1)`;
419
269
  await this.pool.request().query(alterSql);
420
270
  }
421
271
  if (tableName === storage.TABLE_WORKFLOW_SNAPSHOT) {
@@ -426,14 +276,14 @@ ${columns}
426
276
  const constraintResult = await checkConstraintRequest.query(checkConstraintSql);
427
277
  const constraintExists = Array.isArray(constraintResult.recordset) && constraintResult.recordset.length > 0;
428
278
  if (!constraintExists) {
429
- const addConstraintSql = `ALTER TABLE ${this.getTableName(tableName)} ADD CONSTRAINT ${constraintName} UNIQUE ([workflow_name], [run_id])`;
279
+ const addConstraintSql = `ALTER TABLE ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} ADD CONSTRAINT ${constraintName} UNIQUE ([workflow_name], [run_id])`;
430
280
  await this.pool.request().query(addConstraintSql);
431
281
  }
432
282
  }
433
283
  } catch (error$1) {
434
284
  throw new error.MastraError(
435
285
  {
436
- id: "MASTRA_STORAGE_MSSQL_STORE_CREATE_TABLE_FAILED",
286
+ id: storage.createStorageErrorId("MSSQL", "CREATE_TABLE", "FAILED"),
437
287
  domain: error.ErrorDomain.STORAGE,
438
288
  category: error.ErrorCategory.THIRD_PARTY,
439
289
  details: {
@@ -444,35 +294,43 @@ ${columns}
444
294
  );
445
295
  }
446
296
  }
447
- getDefaultValue(type) {
448
- switch (type) {
449
- case "timestamp":
450
- return "DEFAULT SYSDATETIMEOFFSET()";
451
- case "jsonb":
452
- return "DEFAULT N'{}'";
453
- default:
454
- return super.getDefaultValue(type);
455
- }
456
- }
297
+ /**
298
+ * Alters table schema to add columns if they don't exist
299
+ * @param tableName Name of the table
300
+ * @param schema Schema of the table
301
+ * @param ifNotExists Array of column names to add if they don't exist
302
+ */
457
303
  async alterTable({
458
304
  tableName,
459
305
  schema,
460
306
  ifNotExists
461
307
  }) {
462
- const fullTableName = this.getTableName(tableName);
308
+ const fullTableName = getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) });
463
309
  try {
464
310
  for (const columnName of ifNotExists) {
465
311
  if (schema[columnName]) {
466
312
  const columnCheckRequest = this.pool.request();
467
313
  columnCheckRequest.input("tableName", fullTableName.replace(/[[\]]/g, "").split(".").pop());
468
314
  columnCheckRequest.input("columnName", columnName);
469
- columnCheckRequest.input("schema", this.schema || "dbo");
315
+ columnCheckRequest.input("schema", this.schemaName || "dbo");
470
316
  const checkSql = `SELECT 1 AS found FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @tableName AND COLUMN_NAME = @columnName`;
471
317
  const checkResult = await columnCheckRequest.query(checkSql);
472
318
  const columnExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
473
319
  if (!columnExists) {
474
320
  const columnDef = schema[columnName];
475
- const sqlType = this.getSqlType(columnDef.type);
321
+ const largeDataColumns = [
322
+ "workingMemory",
323
+ "snapshot",
324
+ "metadata",
325
+ "content",
326
+ "input",
327
+ "output",
328
+ "instructions",
329
+ "other"
330
+ ];
331
+ const useLargeStorage = largeDataColumns.includes(columnName);
332
+ const isIndexed = !!columnDef.primaryKey;
333
+ const sqlType = this.getSqlType(columnDef.type, isIndexed, useLargeStorage);
476
334
  const nullable = columnDef.nullable === false ? "NOT NULL" : "";
477
335
  const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : "";
478
336
  const parsedColumnName = utils.parseSqlIdentifier(columnName, "column name");
@@ -485,69 +343,7 @@ ${columns}
485
343
  } catch (error$1) {
486
344
  throw new error.MastraError(
487
345
  {
488
- id: "MASTRA_STORAGE_MSSQL_STORE_ALTER_TABLE_FAILED",
489
- domain: error.ErrorDomain.STORAGE,
490
- category: error.ErrorCategory.THIRD_PARTY,
491
- details: {
492
- tableName
493
- }
494
- },
495
- error$1
496
- );
497
- }
498
- }
499
- async clearTable({ tableName }) {
500
- const fullTableName = this.getTableName(tableName);
501
- try {
502
- const fkQuery = `
503
- SELECT
504
- OBJECT_SCHEMA_NAME(fk.parent_object_id) AS schema_name,
505
- OBJECT_NAME(fk.parent_object_id) AS table_name
506
- FROM sys.foreign_keys fk
507
- WHERE fk.referenced_object_id = OBJECT_ID(@fullTableName)
508
- `;
509
- const fkResult = await this.pool.request().input("fullTableName", fullTableName).query(fkQuery);
510
- const childTables = fkResult.recordset || [];
511
- for (const child of childTables) {
512
- const childTableName = this.schema ? `[${child.schema_name}].[${child.table_name}]` : `[${child.table_name}]`;
513
- await this.clearTable({ tableName: childTableName });
514
- }
515
- await this.pool.request().query(`TRUNCATE TABLE ${fullTableName}`);
516
- } catch (error$1) {
517
- throw new error.MastraError(
518
- {
519
- id: "MASTRA_STORAGE_MSSQL_STORE_CLEAR_TABLE_FAILED",
520
- domain: error.ErrorDomain.STORAGE,
521
- category: error.ErrorCategory.THIRD_PARTY,
522
- details: {
523
- tableName
524
- }
525
- },
526
- error$1
527
- );
528
- }
529
- }
530
- async insert({ tableName, record }) {
531
- try {
532
- const columns = Object.keys(record).map((col) => utils.parseSqlIdentifier(col, "column name"));
533
- const values = Object.values(record);
534
- const paramNames = values.map((_, i) => `@param${i}`);
535
- const insertSql = `INSERT INTO ${this.getTableName(tableName)} (${columns.map((c) => `[${c}]`).join(", ")}) VALUES (${paramNames.join(", ")})`;
536
- const request = this.pool.request();
537
- values.forEach((value, i) => {
538
- if (value instanceof Date) {
539
- request.input(`param${i}`, sql__default.default.DateTime2, value);
540
- } else if (typeof value === "object" && value !== null) {
541
- request.input(`param${i}`, JSON.stringify(value));
542
- } else {
543
- request.input(`param${i}`, value);
544
- }
545
- });
546
- await request.query(insertSql);
547
- } catch (error$1) {
548
- throw new error.MastraError(
549
- {
550
- id: "MASTRA_STORAGE_MSSQL_STORE_INSERT_FAILED",
346
+ id: storage.createStorageErrorId("MSSQL", "ALTER_TABLE", "FAILED"),
551
347
  domain: error.ErrorDomain.STORAGE,
552
348
  category: error.ErrorCategory.THIRD_PARTY,
553
349
  details: {
@@ -562,13 +358,17 @@ ${columns}
562
358
  try {
563
359
  const keyEntries = Object.entries(keys).map(([key, value]) => [utils.parseSqlIdentifier(key, "column name"), value]);
564
360
  const conditions = keyEntries.map(([key], i) => `[${key}] = @param${i}`).join(" AND ");
565
- const values = keyEntries.map(([_, value]) => value);
566
- const sql2 = `SELECT * FROM ${this.getTableName(tableName)} WHERE ${conditions}`;
361
+ const sqlQuery = `SELECT * FROM ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} WHERE ${conditions}`;
567
362
  const request = this.pool.request();
568
- values.forEach((value, i) => {
569
- request.input(`param${i}`, value);
363
+ keyEntries.forEach(([key, value], i) => {
364
+ const preparedValue = this.prepareValue(value, key, tableName);
365
+ if (preparedValue === null || preparedValue === void 0) {
366
+ request.input(`param${i}`, this.getMssqlType(tableName, key), null);
367
+ } else {
368
+ request.input(`param${i}`, preparedValue);
369
+ }
570
370
  });
571
- const resultSet = await request.query(sql2);
371
+ const resultSet = await request.query(sqlQuery);
572
372
  const result = resultSet.recordset[0] || null;
573
373
  if (!result) {
574
374
  return null;
@@ -584,7 +384,7 @@ ${columns}
584
384
  } catch (error$1) {
585
385
  throw new error.MastraError(
586
386
  {
587
- id: "MASTRA_STORAGE_MSSQL_STORE_LOAD_FAILED",
387
+ id: storage.createStorageErrorId("MSSQL", "LOAD", "FAILED"),
588
388
  domain: error.ErrorDomain.STORAGE,
589
389
  category: error.ErrorCategory.THIRD_PARTY,
590
390
  details: {
@@ -595,1226 +395,3024 @@ ${columns}
595
395
  );
596
396
  }
597
397
  }
598
- async getThreadById({ threadId }) {
398
+ async batchInsert({ tableName, records }) {
399
+ const transaction = this.pool.transaction();
599
400
  try {
600
- const sql2 = `SELECT
601
- id,
602
- [resourceId],
603
- title,
604
- metadata,
605
- [createdAt],
606
- [updatedAt]
607
- FROM ${this.getTableName(storage.TABLE_THREADS)}
608
- WHERE id = @threadId`;
609
- const request = this.pool.request();
610
- request.input("threadId", threadId);
611
- const resultSet = await request.query(sql2);
612
- const thread = resultSet.recordset[0] || null;
613
- if (!thread) {
614
- return null;
401
+ await transaction.begin();
402
+ for (const record of records) {
403
+ await this.insert({ tableName, record, transaction });
615
404
  }
616
- return {
617
- ...thread,
618
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
619
- createdAt: thread.createdAt,
620
- updatedAt: thread.updatedAt
621
- };
405
+ await transaction.commit();
622
406
  } catch (error$1) {
407
+ await transaction.rollback();
623
408
  throw new error.MastraError(
624
409
  {
625
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_THREAD_BY_ID_FAILED",
410
+ id: storage.createStorageErrorId("MSSQL", "BATCH_INSERT", "FAILED"),
626
411
  domain: error.ErrorDomain.STORAGE,
627
412
  category: error.ErrorCategory.THIRD_PARTY,
628
413
  details: {
629
- threadId
414
+ tableName,
415
+ numberOfRecords: records.length
630
416
  }
631
417
  },
632
418
  error$1
633
419
  );
634
420
  }
635
421
  }
636
- async getThreadsByResourceIdPaginated(args) {
637
- const { resourceId, page = 0, perPage: perPageInput } = args;
422
+ async dropTable({ tableName }) {
638
423
  try {
639
- const perPage = perPageInput !== void 0 ? perPageInput : 100;
640
- const currentOffset = page * perPage;
641
- const baseQuery = `FROM ${this.getTableName(storage.TABLE_THREADS)} WHERE [resourceId] = @resourceId`;
642
- const countQuery = `SELECT COUNT(*) as count ${baseQuery}`;
643
- const countRequest = this.pool.request();
644
- countRequest.input("resourceId", resourceId);
645
- const countResult = await countRequest.query(countQuery);
646
- const total = parseInt(countResult.recordset[0]?.count ?? "0", 10);
647
- if (total === 0) {
648
- return {
649
- threads: [],
650
- total: 0,
651
- page,
652
- perPage,
653
- hasMore: false
654
- };
655
- }
656
- const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY [seq_id] DESC OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
657
- const dataRequest = this.pool.request();
658
- dataRequest.input("resourceId", resourceId);
659
- dataRequest.input("perPage", perPage);
660
- dataRequest.input("offset", currentOffset);
661
- const rowsResult = await dataRequest.query(dataQuery);
662
- const rows = rowsResult.recordset || [];
663
- const threads = rows.map((thread) => ({
664
- ...thread,
665
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
666
- createdAt: thread.createdAt,
667
- updatedAt: thread.updatedAt
668
- }));
669
- return {
670
- threads,
671
- total,
672
- page,
673
- perPage,
674
- hasMore: currentOffset + threads.length < total
675
- };
424
+ const tableNameWithSchema = getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) });
425
+ await this.pool.request().query(`DROP TABLE IF EXISTS ${tableNameWithSchema}`);
676
426
  } catch (error$1) {
677
- const mastraError = new error.MastraError(
427
+ throw new error.MastraError(
678
428
  {
679
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_THREADS_BY_RESOURCE_ID_PAGINATED_FAILED",
429
+ id: storage.createStorageErrorId("MSSQL", "DROP_TABLE", "FAILED"),
680
430
  domain: error.ErrorDomain.STORAGE,
681
431
  category: error.ErrorCategory.THIRD_PARTY,
682
432
  details: {
683
- resourceId,
684
- page
433
+ tableName
685
434
  }
686
435
  },
687
436
  error$1
688
437
  );
689
- this.logger?.error?.(mastraError.toString());
690
- this.logger?.trackException?.(mastraError);
691
- return { threads: [], total: 0, page, perPage: perPageInput || 100, hasMore: false };
692
438
  }
693
439
  }
694
- async saveThread({ thread }) {
695
- try {
696
- const table = this.getTableName(storage.TABLE_THREADS);
697
- const mergeSql = `MERGE INTO ${table} WITH (HOLDLOCK) AS target
698
- USING (SELECT @id AS id) AS source
699
- ON (target.id = source.id)
700
- WHEN MATCHED THEN
701
- UPDATE SET
702
- [resourceId] = @resourceId,
703
- title = @title,
704
- metadata = @metadata,
705
- [createdAt] = @createdAt,
706
- [updatedAt] = @updatedAt
707
- WHEN NOT MATCHED THEN
708
- INSERT (id, [resourceId], title, metadata, [createdAt], [updatedAt])
709
- VALUES (@id, @resourceId, @title, @metadata, @createdAt, @updatedAt);`;
710
- const req = this.pool.request();
711
- req.input("id", thread.id);
712
- req.input("resourceId", thread.resourceId);
713
- req.input("title", thread.title);
714
- req.input("metadata", thread.metadata ? JSON.stringify(thread.metadata) : null);
715
- req.input("createdAt", thread.createdAt);
716
- req.input("updatedAt", thread.updatedAt);
717
- await req.query(mergeSql);
718
- return thread;
719
- } catch (error$1) {
720
- throw new error.MastraError(
721
- {
722
- id: "MASTRA_STORAGE_MSSQL_STORE_SAVE_THREAD_FAILED",
723
- domain: error.ErrorDomain.STORAGE,
724
- category: error.ErrorCategory.THIRD_PARTY,
725
- details: {
726
- threadId: thread.id
440
+ /**
441
+ * Prepares a value for database operations, handling Date objects and JSON serialization
442
+ */
443
+ prepareValue(value, columnName, tableName) {
444
+ if (value === null || value === void 0) {
445
+ return value;
446
+ }
447
+ if (value instanceof Date) {
448
+ return value;
449
+ }
450
+ const schema = storage.TABLE_SCHEMAS[tableName];
451
+ const columnSchema = schema?.[columnName];
452
+ if (columnSchema?.type === "boolean") {
453
+ return value ? 1 : 0;
454
+ }
455
+ if (columnSchema?.type === "jsonb") {
456
+ if (typeof value === "string") {
457
+ const trimmed = value.trim();
458
+ if (trimmed.length > 0) {
459
+ try {
460
+ JSON.parse(trimmed);
461
+ return trimmed;
462
+ } catch {
727
463
  }
728
- },
729
- error$1
730
- );
464
+ }
465
+ return JSON.stringify(value);
466
+ }
467
+ if (typeof value === "bigint") {
468
+ return value.toString();
469
+ }
470
+ return JSON.stringify(value);
731
471
  }
472
+ if (typeof value === "object") {
473
+ return JSON.stringify(value);
474
+ }
475
+ return value;
732
476
  }
733
477
  /**
734
- * @deprecated use getThreadsByResourceIdPaginated instead
478
+ * Maps TABLE_SCHEMAS types to mssql param types (used when value is null)
735
479
  */
736
- async getThreadsByResourceId(args) {
737
- const { resourceId } = args;
738
- try {
739
- const baseQuery = `FROM ${this.getTableName(storage.TABLE_THREADS)} WHERE [resourceId] = @resourceId`;
740
- const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY [seq_id] DESC`;
741
- const request = this.pool.request();
742
- request.input("resourceId", resourceId);
743
- const resultSet = await request.query(dataQuery);
744
- const rows = resultSet.recordset || [];
745
- return rows.map((thread) => ({
746
- ...thread,
747
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
748
- createdAt: thread.createdAt,
749
- updatedAt: thread.updatedAt
750
- }));
751
- } catch (error) {
752
- this.logger?.error?.(`Error getting threads for resource ${resourceId}:`, error);
753
- return [];
480
+ getMssqlType(tableName, columnName) {
481
+ const col = storage.TABLE_SCHEMAS[tableName]?.[columnName];
482
+ switch (col?.type) {
483
+ case "text":
484
+ return sql__default.default.NVarChar;
485
+ case "timestamp":
486
+ return sql__default.default.DateTime2;
487
+ case "uuid":
488
+ return sql__default.default.UniqueIdentifier;
489
+ case "jsonb":
490
+ return sql__default.default.NVarChar;
491
+ case "integer":
492
+ return sql__default.default.Int;
493
+ case "bigint":
494
+ return sql__default.default.BigInt;
495
+ case "float":
496
+ return sql__default.default.Float;
497
+ case "boolean":
498
+ return sql__default.default.Bit;
499
+ default:
500
+ return sql__default.default.NVarChar;
754
501
  }
755
502
  }
756
503
  /**
757
- * Updates a thread's title and metadata, merging with existing metadata. Returns the updated thread.
504
+ * Update a single record in the database
758
505
  */
759
- async updateThread({
760
- id,
761
- title,
762
- metadata
506
+ async update({
507
+ tableName,
508
+ keys,
509
+ data,
510
+ transaction
763
511
  }) {
764
- const existingThread = await this.getThreadById({ threadId: id });
765
- if (!existingThread) {
766
- throw new error.MastraError({
767
- id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_THREAD_FAILED",
768
- domain: error.ErrorDomain.STORAGE,
769
- category: error.ErrorCategory.USER,
770
- text: `Thread ${id} not found`,
771
- details: {
772
- threadId: id,
773
- title
774
- }
775
- });
776
- }
777
- const mergedMetadata = {
778
- ...existingThread.metadata,
779
- ...metadata
780
- };
781
512
  try {
782
- const table = this.getTableName(storage.TABLE_THREADS);
783
- const sql2 = `UPDATE ${table}
784
- SET title = @title,
785
- metadata = @metadata,
786
- [updatedAt] = @updatedAt
787
- OUTPUT INSERTED.*
788
- WHERE id = @id`;
789
- const req = this.pool.request();
790
- req.input("id", id);
791
- req.input("title", title);
792
- req.input("metadata", JSON.stringify(mergedMetadata));
793
- req.input("updatedAt", (/* @__PURE__ */ new Date()).toISOString());
794
- const result = await req.query(sql2);
795
- let thread = result.recordset && result.recordset[0];
796
- if (thread && "seq_id" in thread) {
797
- const { seq_id, ...rest } = thread;
798
- thread = rest;
513
+ if (!data || Object.keys(data).length === 0) {
514
+ throw new error.MastraError({
515
+ id: storage.createStorageErrorId("MSSQL", "UPDATE", "EMPTY_DATA"),
516
+ domain: error.ErrorDomain.STORAGE,
517
+ category: error.ErrorCategory.USER,
518
+ text: "Cannot update with empty data payload"
519
+ });
799
520
  }
800
- if (!thread) {
521
+ if (!keys || Object.keys(keys).length === 0) {
801
522
  throw new error.MastraError({
802
- id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_THREAD_FAILED",
523
+ id: storage.createStorageErrorId("MSSQL", "UPDATE", "EMPTY_KEYS"),
803
524
  domain: error.ErrorDomain.STORAGE,
804
525
  category: error.ErrorCategory.USER,
805
- text: `Thread ${id} not found after update`,
806
- details: {
807
- threadId: id,
808
- title
809
- }
526
+ text: "Cannot update without keys to identify records"
810
527
  });
811
528
  }
812
- return {
813
- ...thread,
814
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
815
- createdAt: thread.createdAt,
816
- updatedAt: thread.updatedAt
817
- };
529
+ const setClauses = [];
530
+ const request = transaction ? transaction.request() : this.pool.request();
531
+ let paramIndex = 0;
532
+ Object.entries(data).forEach(([key, value]) => {
533
+ const parsedKey = utils.parseSqlIdentifier(key, "column name");
534
+ const paramName = `set${paramIndex++}`;
535
+ setClauses.push(`[${parsedKey}] = @${paramName}`);
536
+ const preparedValue = this.prepareValue(value, key, tableName);
537
+ if (preparedValue === null || preparedValue === void 0) {
538
+ request.input(paramName, this.getMssqlType(tableName, key), null);
539
+ } else {
540
+ request.input(paramName, preparedValue);
541
+ }
542
+ });
543
+ const whereConditions = [];
544
+ Object.entries(keys).forEach(([key, value]) => {
545
+ const parsedKey = utils.parseSqlIdentifier(key, "column name");
546
+ const paramName = `where${paramIndex++}`;
547
+ whereConditions.push(`[${parsedKey}] = @${paramName}`);
548
+ const preparedValue = this.prepareValue(value, key, tableName);
549
+ if (preparedValue === null || preparedValue === void 0) {
550
+ request.input(paramName, this.getMssqlType(tableName, key), null);
551
+ } else {
552
+ request.input(paramName, preparedValue);
553
+ }
554
+ });
555
+ const tableName_ = getTableName({
556
+ indexName: tableName,
557
+ schemaName: getSchemaName(this.schemaName)
558
+ });
559
+ const updateSql = `UPDATE ${tableName_} SET ${setClauses.join(", ")} WHERE ${whereConditions.join(" AND ")}`;
560
+ await request.query(updateSql);
818
561
  } catch (error$1) {
819
562
  throw new error.MastraError(
820
563
  {
821
- id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_THREAD_FAILED",
564
+ id: storage.createStorageErrorId("MSSQL", "UPDATE", "FAILED"),
822
565
  domain: error.ErrorDomain.STORAGE,
823
566
  category: error.ErrorCategory.THIRD_PARTY,
824
567
  details: {
825
- threadId: id,
826
- title
568
+ tableName
827
569
  }
828
570
  },
829
571
  error$1
830
572
  );
831
573
  }
832
574
  }
833
- async deleteThread({ threadId }) {
834
- const messagesTable = this.getTableName(storage.TABLE_MESSAGES);
835
- const threadsTable = this.getTableName(storage.TABLE_THREADS);
836
- const deleteMessagesSql = `DELETE FROM ${messagesTable} WHERE [thread_id] = @threadId`;
837
- const deleteThreadSql = `DELETE FROM ${threadsTable} WHERE id = @threadId`;
838
- const tx = this.pool.transaction();
575
+ /**
576
+ * Update multiple records in a single batch transaction
577
+ */
578
+ async batchUpdate({
579
+ tableName,
580
+ updates
581
+ }) {
582
+ const transaction = this.pool.transaction();
839
583
  try {
840
- await tx.begin();
841
- const req = tx.request();
842
- req.input("threadId", threadId);
843
- await req.query(deleteMessagesSql);
844
- await req.query(deleteThreadSql);
845
- await tx.commit();
584
+ await transaction.begin();
585
+ for (const { keys, data } of updates) {
586
+ await this.update({ tableName, keys, data, transaction });
587
+ }
588
+ await transaction.commit();
846
589
  } catch (error$1) {
847
- await tx.rollback().catch(() => {
848
- });
590
+ await transaction.rollback();
849
591
  throw new error.MastraError(
850
592
  {
851
- id: "MASTRA_STORAGE_MSSQL_STORE_DELETE_THREAD_FAILED",
593
+ id: storage.createStorageErrorId("MSSQL", "BATCH_UPDATE", "FAILED"),
852
594
  domain: error.ErrorDomain.STORAGE,
853
595
  category: error.ErrorCategory.THIRD_PARTY,
854
596
  details: {
855
- threadId
597
+ tableName,
598
+ numberOfRecords: updates.length
856
599
  }
857
600
  },
858
601
  error$1
859
602
  );
860
603
  }
861
604
  }
862
- async _getIncludedMessages({
863
- threadId,
864
- selectBy,
865
- orderByStatement
866
- }) {
867
- const include = selectBy?.include;
868
- if (!include) return null;
869
- const unionQueries = [];
870
- const paramValues = [];
871
- let paramIdx = 1;
872
- const paramNames = [];
873
- for (const inc of include) {
874
- const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
875
- const searchId = inc.threadId || threadId;
876
- const pThreadId = `@p${paramIdx}`;
877
- const pId = `@p${paramIdx + 1}`;
878
- const pPrev = `@p${paramIdx + 2}`;
879
- const pNext = `@p${paramIdx + 3}`;
880
- unionQueries.push(
881
- `
882
- SELECT
883
- m.id,
884
- m.content,
885
- m.role,
886
- m.type,
887
- m.[createdAt],
888
- m.thread_id AS threadId,
889
- m.[resourceId],
890
- m.seq_id
891
- FROM (
892
- SELECT *, ROW_NUMBER() OVER (${orderByStatement}) as row_num
893
- FROM ${this.getTableName(storage.TABLE_MESSAGES)}
894
- WHERE [thread_id] = ${pThreadId}
895
- ) AS m
896
- WHERE m.id = ${pId}
897
- OR EXISTS (
898
- SELECT 1
899
- FROM (
900
- SELECT *, ROW_NUMBER() OVER (${orderByStatement}) as row_num
901
- FROM ${this.getTableName(storage.TABLE_MESSAGES)}
902
- WHERE [thread_id] = ${pThreadId}
903
- ) AS target
904
- WHERE target.id = ${pId}
905
- AND (
906
- (m.row_num <= target.row_num + ${pPrev} AND m.row_num > target.row_num)
907
- OR
908
- (m.row_num >= target.row_num - ${pNext} AND m.row_num < target.row_num)
909
- )
910
- )
911
- `
912
- );
913
- paramValues.push(searchId, id, withPreviousMessages, withNextMessages);
914
- paramNames.push(`p${paramIdx}`, `p${paramIdx + 1}`, `p${paramIdx + 2}`, `p${paramIdx + 3}`);
915
- paramIdx += 4;
916
- }
917
- const finalQuery = `
918
- SELECT * FROM (
919
- ${unionQueries.join(" UNION ALL ")}
920
- ) AS union_result
921
- ORDER BY [seq_id] ASC
922
- `;
923
- const req = this.pool.request();
924
- for (let i = 0; i < paramValues.length; ++i) {
925
- req.input(paramNames[i], paramValues[i]);
605
+ /**
606
+ * Delete multiple records by keys
607
+ */
608
+ async batchDelete({ tableName, keys }) {
609
+ if (keys.length === 0) {
610
+ return;
926
611
  }
927
- const result = await req.query(finalQuery);
928
- const includedRows = result.recordset || [];
929
- const seen = /* @__PURE__ */ new Set();
930
- const dedupedRows = includedRows.filter((row) => {
931
- if (seen.has(row.id)) return false;
932
- seen.add(row.id);
933
- return true;
612
+ const tableName_ = getTableName({
613
+ indexName: tableName,
614
+ schemaName: getSchemaName(this.schemaName)
934
615
  });
935
- return dedupedRows;
936
- }
937
- async getMessages(args) {
938
- const { threadId, format, selectBy } = args;
939
- const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId`;
940
- const orderByStatement = `ORDER BY [seq_id] DESC`;
941
- const limit = this.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
616
+ const transaction = this.pool.transaction();
942
617
  try {
943
- let rows = [];
944
- const include = selectBy?.include || [];
945
- if (include?.length) {
946
- const includeMessages = await this._getIncludedMessages({ threadId, selectBy, orderByStatement });
947
- if (includeMessages) {
948
- rows.push(...includeMessages);
949
- }
950
- }
951
- const excludeIds = rows.map((m) => m.id).filter(Boolean);
952
- let query = `${selectStatement} FROM ${this.getTableName(storage.TABLE_MESSAGES)} WHERE [thread_id] = @threadId`;
953
- const request = this.pool.request();
954
- request.input("threadId", threadId);
955
- if (excludeIds.length > 0) {
956
- const excludeParams = excludeIds.map((_, idx) => `@id${idx}`);
957
- query += ` AND id NOT IN (${excludeParams.join(", ")})`;
958
- excludeIds.forEach((id, idx) => {
959
- request.input(`id${idx}`, id);
618
+ await transaction.begin();
619
+ for (const keySet of keys) {
620
+ const conditions = [];
621
+ const request = transaction.request();
622
+ let paramIndex = 0;
623
+ Object.entries(keySet).forEach(([key, value]) => {
624
+ const parsedKey = utils.parseSqlIdentifier(key, "column name");
625
+ const paramName = `p${paramIndex++}`;
626
+ conditions.push(`[${parsedKey}] = @${paramName}`);
627
+ const preparedValue = this.prepareValue(value, key, tableName);
628
+ if (preparedValue === null || preparedValue === void 0) {
629
+ request.input(paramName, this.getMssqlType(tableName, key), null);
630
+ } else {
631
+ request.input(paramName, preparedValue);
632
+ }
960
633
  });
634
+ const deleteSql = `DELETE FROM ${tableName_} WHERE ${conditions.join(" AND ")}`;
635
+ await request.query(deleteSql);
961
636
  }
962
- query += ` ${orderByStatement} OFFSET 0 ROWS FETCH NEXT @limit ROWS ONLY`;
963
- request.input("limit", limit);
964
- const result = await request.query(query);
965
- const remainingRows = result.recordset || [];
966
- rows.push(...remainingRows);
967
- rows.sort((a, b) => {
968
- const timeDiff = a.seq_id - b.seq_id;
969
- return timeDiff;
970
- });
971
- rows = rows.map(({ seq_id, ...rest }) => rest);
972
- const fetchedMessages = (rows || []).map((message) => {
973
- if (typeof message.content === "string") {
974
- try {
975
- message.content = JSON.parse(message.content);
976
- } catch {
977
- }
978
- }
979
- if (format === "v1") {
980
- if (Array.isArray(message.content)) ; else if (typeof message.content === "object" && message.content && Array.isArray(message.content.parts)) {
981
- message.content = message.content.parts;
982
- } else {
983
- message.content = [{ type: "text", text: "" }];
637
+ await transaction.commit();
638
+ } catch (error$1) {
639
+ await transaction.rollback();
640
+ throw new error.MastraError(
641
+ {
642
+ id: storage.createStorageErrorId("MSSQL", "BATCH_DELETE", "FAILED"),
643
+ domain: error.ErrorDomain.STORAGE,
644
+ category: error.ErrorCategory.THIRD_PARTY,
645
+ details: {
646
+ tableName,
647
+ numberOfRecords: keys.length
984
648
  }
985
- } else {
986
- if (typeof message.content !== "object" || !message.content || !("parts" in message.content)) {
987
- message.content = { format: 2, parts: [{ type: "text", text: "" }] };
649
+ },
650
+ error$1
651
+ );
652
+ }
653
+ }
654
+ /**
655
+ * Create a new index on a table
656
+ */
657
+ async createIndex(options) {
658
+ try {
659
+ const { name, table, columns, unique = false, where } = options;
660
+ const schemaName = this.schemaName || "dbo";
661
+ const fullTableName = getTableName({
662
+ indexName: table,
663
+ schemaName: getSchemaName(this.schemaName)
664
+ });
665
+ const indexNameSafe = utils.parseSqlIdentifier(name, "index name");
666
+ const checkRequest = this.pool.request();
667
+ checkRequest.input("indexName", indexNameSafe);
668
+ checkRequest.input("schemaName", schemaName);
669
+ checkRequest.input("tableName", table);
670
+ const indexExists = await checkRequest.query(`
671
+ SELECT 1 as found
672
+ FROM sys.indexes i
673
+ INNER JOIN sys.tables t ON i.object_id = t.object_id
674
+ INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
675
+ WHERE i.name = @indexName
676
+ AND s.name = @schemaName
677
+ AND t.name = @tableName
678
+ `);
679
+ if (indexExists.recordset && indexExists.recordset.length > 0) {
680
+ return;
681
+ }
682
+ const uniqueStr = unique ? "UNIQUE " : "";
683
+ const columnsStr = columns.map((col) => {
684
+ if (col.includes(" DESC") || col.includes(" ASC")) {
685
+ const [colName, ...modifiers] = col.split(" ");
686
+ if (!colName) {
687
+ throw new Error(`Invalid column specification: ${col}`);
988
688
  }
689
+ return `[${utils.parseSqlIdentifier(colName, "column name")}] ${modifiers.join(" ")}`;
989
690
  }
990
- if (message.type === "v2") delete message.type;
991
- return message;
992
- });
993
- return format === "v2" ? fetchedMessages.map(
994
- (m) => ({ ...m, content: m.content || { format: 2, parts: [{ type: "text", text: "" }] } })
995
- ) : fetchedMessages;
691
+ return `[${utils.parseSqlIdentifier(col, "column name")}]`;
692
+ }).join(", ");
693
+ const whereStr = where ? ` WHERE ${where}` : "";
694
+ const createIndexSql = `CREATE ${uniqueStr}INDEX [${indexNameSafe}] ON ${fullTableName} (${columnsStr})${whereStr}`;
695
+ await this.pool.request().query(createIndexSql);
996
696
  } catch (error$1) {
997
- const mastraError = new error.MastraError(
697
+ throw new error.MastraError(
998
698
  {
999
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_MESSAGES_FAILED",
699
+ id: storage.createStorageErrorId("MSSQL", "INDEX_CREATE", "FAILED"),
1000
700
  domain: error.ErrorDomain.STORAGE,
1001
701
  category: error.ErrorCategory.THIRD_PARTY,
1002
702
  details: {
1003
- threadId
703
+ indexName: options.name,
704
+ tableName: options.table
1004
705
  }
1005
706
  },
1006
707
  error$1
1007
708
  );
1008
- this.logger?.error?.(mastraError.toString());
1009
- this.logger?.trackException(mastraError);
1010
- return [];
1011
709
  }
1012
710
  }
1013
- async getMessagesPaginated(args) {
1014
- const { threadId, selectBy } = args;
1015
- const { page = 0, perPage: perPageInput } = selectBy?.pagination || {};
1016
- const orderByStatement = `ORDER BY [seq_id] DESC`;
1017
- if (selectBy?.include?.length) {
1018
- await this._getIncludedMessages({ threadId, selectBy, orderByStatement });
1019
- }
711
+ /**
712
+ * Drop an existing index
713
+ */
714
+ async dropIndex(indexName) {
1020
715
  try {
1021
- const { threadId: threadId2, format, selectBy: selectBy2 } = args;
1022
- const { page: page2 = 0, perPage: perPageInput2, dateRange } = selectBy2?.pagination || {};
1023
- const fromDate = dateRange?.start;
1024
- const toDate = dateRange?.end;
1025
- const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId`;
1026
- const orderByStatement2 = `ORDER BY [seq_id] DESC`;
1027
- let messages2 = [];
1028
- if (selectBy2?.include?.length) {
1029
- const includeMessages = await this._getIncludedMessages({ threadId: threadId2, selectBy: selectBy2, orderByStatement: orderByStatement2 });
1030
- if (includeMessages) messages2.push(...includeMessages);
1031
- }
1032
- const perPage = perPageInput2 !== void 0 ? perPageInput2 : this.resolveMessageLimit({ last: selectBy2?.last, defaultLimit: 40 });
1033
- const currentOffset = page2 * perPage;
1034
- const conditions = ["[thread_id] = @threadId"];
1035
- const request = this.pool.request();
1036
- request.input("threadId", threadId2);
1037
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
1038
- conditions.push("[createdAt] >= @fromDate");
1039
- request.input("fromDate", fromDate.toISOString());
1040
- }
1041
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
1042
- conditions.push("[createdAt] <= @toDate");
1043
- request.input("toDate", toDate.toISOString());
716
+ const schemaName = this.schemaName || "dbo";
717
+ const indexNameSafe = utils.parseSqlIdentifier(indexName, "index name");
718
+ const checkRequest = this.pool.request();
719
+ checkRequest.input("indexName", indexNameSafe);
720
+ checkRequest.input("schemaName", schemaName);
721
+ const result = await checkRequest.query(`
722
+ SELECT t.name as table_name
723
+ FROM sys.indexes i
724
+ INNER JOIN sys.tables t ON i.object_id = t.object_id
725
+ INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
726
+ WHERE i.name = @indexName
727
+ AND s.name = @schemaName
728
+ `);
729
+ if (!result.recordset || result.recordset.length === 0) {
730
+ return;
1044
731
  }
1045
- const whereClause = `WHERE ${conditions.join(" AND ")}`;
1046
- const countQuery = `SELECT COUNT(*) as total FROM ${this.getTableName(storage.TABLE_MESSAGES)} ${whereClause}`;
1047
- const countResult = await request.query(countQuery);
1048
- const total = parseInt(countResult.recordset[0]?.total, 10) || 0;
1049
- if (total === 0 && messages2.length > 0) {
1050
- const parsedIncluded = this._parseAndFormatMessages(messages2, format);
1051
- return {
1052
- messages: parsedIncluded,
1053
- total: parsedIncluded.length,
1054
- page: page2,
1055
- perPage,
1056
- hasMore: false
1057
- };
732
+ if (result.recordset.length > 1) {
733
+ const tables = result.recordset.map((r) => r.table_name).join(", ");
734
+ throw new error.MastraError({
735
+ id: storage.createStorageErrorId("MSSQL", "INDEX", "AMBIGUOUS"),
736
+ domain: error.ErrorDomain.STORAGE,
737
+ category: error.ErrorCategory.USER,
738
+ text: `Index "${indexNameSafe}" exists on multiple tables (${tables}) in schema "${schemaName}". Please drop indexes manually or ensure unique index names.`
739
+ });
1058
740
  }
1059
- const excludeIds = messages2.map((m) => m.id);
1060
- if (excludeIds.length > 0) {
1061
- const excludeParams = excludeIds.map((_, idx) => `@id${idx}`);
1062
- conditions.push(`id NOT IN (${excludeParams.join(", ")})`);
1063
- excludeIds.forEach((id, idx) => request.input(`id${idx}`, id));
1064
- }
1065
- const finalWhereClause = `WHERE ${conditions.join(" AND ")}`;
1066
- const dataQuery = `${selectStatement} FROM ${this.getTableName(storage.TABLE_MESSAGES)} ${finalWhereClause} ${orderByStatement2} OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
1067
- request.input("offset", currentOffset);
1068
- request.input("limit", perPage);
1069
- const rowsResult = await request.query(dataQuery);
1070
- const rows = rowsResult.recordset || [];
1071
- rows.sort((a, b) => a.seq_id - b.seq_id);
1072
- messages2.push(...rows);
1073
- const parsed = this._parseAndFormatMessages(messages2, format);
1074
- return {
1075
- messages: parsed,
1076
- total: total + excludeIds.length,
1077
- page: page2,
1078
- perPage,
1079
- hasMore: currentOffset + rows.length < total
1080
- };
741
+ const tableName = result.recordset[0].table_name;
742
+ const fullTableName = getTableName({
743
+ indexName: tableName,
744
+ schemaName: getSchemaName(this.schemaName)
745
+ });
746
+ const dropSql = `DROP INDEX [${indexNameSafe}] ON ${fullTableName}`;
747
+ await this.pool.request().query(dropSql);
1081
748
  } catch (error$1) {
1082
- const mastraError = new error.MastraError(
749
+ throw new error.MastraError(
1083
750
  {
1084
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_MESSAGES_PAGINATED_FAILED",
751
+ id: storage.createStorageErrorId("MSSQL", "INDEX_DROP", "FAILED"),
1085
752
  domain: error.ErrorDomain.STORAGE,
1086
753
  category: error.ErrorCategory.THIRD_PARTY,
1087
754
  details: {
1088
- threadId,
1089
- page
755
+ indexName
1090
756
  }
1091
757
  },
1092
758
  error$1
1093
759
  );
1094
- this.logger?.error?.(mastraError.toString());
1095
- this.logger?.trackException(mastraError);
1096
- return { messages: [], total: 0, page, perPage: perPageInput || 40, hasMore: false };
1097
760
  }
1098
761
  }
1099
- _parseAndFormatMessages(messages, format) {
1100
- const parsedMessages = messages.map((message) => {
1101
- let parsed = message;
1102
- if (typeof parsed.content === "string") {
1103
- try {
1104
- parsed = { ...parsed, content: JSON.parse(parsed.content) };
1105
- } catch {
1106
- }
1107
- }
1108
- if (format === "v1") {
1109
- if (Array.isArray(parsed.content)) ; else if (parsed.content?.parts) {
1110
- parsed.content = parsed.content.parts;
1111
- } else {
1112
- parsed.content = [{ type: "text", text: "" }];
1113
- }
762
+ /**
763
+ * List indexes for a specific table or all tables
764
+ */
765
+ async listIndexes(tableName) {
766
+ try {
767
+ const schemaName = this.schemaName || "dbo";
768
+ let query;
769
+ const request = this.pool.request();
770
+ request.input("schemaName", schemaName);
771
+ if (tableName) {
772
+ query = `
773
+ SELECT
774
+ i.name as name,
775
+ o.name as [table],
776
+ i.is_unique as is_unique,
777
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size
778
+ FROM sys.indexes i
779
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
780
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
781
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
782
+ WHERE sch.name = @schemaName
783
+ AND o.name = @tableName
784
+ AND i.name IS NOT NULL
785
+ GROUP BY i.name, o.name, i.is_unique
786
+ `;
787
+ request.input("tableName", tableName);
1114
788
  } else {
1115
- if (!parsed.content?.parts) {
1116
- parsed = { ...parsed, content: { format: 2, parts: [{ type: "text", text: "" }] } };
1117
- }
789
+ query = `
790
+ SELECT
791
+ i.name as name,
792
+ o.name as [table],
793
+ i.is_unique as is_unique,
794
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size
795
+ FROM sys.indexes i
796
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
797
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
798
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
799
+ WHERE sch.name = @schemaName
800
+ AND i.name IS NOT NULL
801
+ GROUP BY i.name, o.name, i.is_unique
802
+ `;
1118
803
  }
1119
- return parsed;
1120
- });
1121
- const list = new agent.MessageList().add(parsedMessages, "memory");
1122
- return format === "v2" ? list.get.all.v2() : list.get.all.v1();
1123
- }
1124
- async saveMessages({
1125
- messages,
1126
- format
1127
- }) {
1128
- if (messages.length === 0) return messages;
1129
- const threadId = messages[0]?.threadId;
1130
- if (!threadId) {
1131
- throw new error.MastraError({
1132
- id: "MASTRA_STORAGE_MSSQL_STORE_SAVE_MESSAGES_FAILED",
1133
- domain: error.ErrorDomain.STORAGE,
1134
- category: error.ErrorCategory.THIRD_PARTY,
1135
- text: `Thread ID is required`
1136
- });
1137
- }
1138
- const thread = await this.getThreadById({ threadId });
1139
- if (!thread) {
1140
- throw new error.MastraError({
1141
- id: "MASTRA_STORAGE_MSSQL_STORE_SAVE_MESSAGES_FAILED",
1142
- domain: error.ErrorDomain.STORAGE,
1143
- category: error.ErrorCategory.THIRD_PARTY,
1144
- text: `Thread ${threadId} not found`,
1145
- details: { threadId }
1146
- });
1147
- }
1148
- const tableMessages = this.getTableName(storage.TABLE_MESSAGES);
1149
- const tableThreads = this.getTableName(storage.TABLE_THREADS);
1150
- try {
1151
- const transaction = this.pool.transaction();
1152
- await transaction.begin();
1153
- try {
1154
- for (const message of messages) {
1155
- if (!message.threadId) {
1156
- throw new Error(
1157
- `Expected to find a threadId for message, but couldn't find one. An unexpected error has occurred.`
1158
- );
1159
- }
1160
- if (!message.resourceId) {
1161
- throw new Error(
1162
- `Expected to find a resourceId for message, but couldn't find one. An unexpected error has occurred.`
1163
- );
1164
- }
1165
- const request = transaction.request();
1166
- request.input("id", message.id);
1167
- request.input("thread_id", message.threadId);
1168
- request.input(
1169
- "content",
1170
- typeof message.content === "string" ? message.content : JSON.stringify(message.content)
1171
- );
1172
- request.input("createdAt", message.createdAt.toISOString() || (/* @__PURE__ */ new Date()).toISOString());
1173
- request.input("role", message.role);
1174
- request.input("type", message.type || "v2");
1175
- request.input("resourceId", message.resourceId);
1176
- const mergeSql = `MERGE INTO ${tableMessages} AS target
1177
- USING (SELECT @id AS id) AS src
1178
- ON target.id = src.id
1179
- WHEN MATCHED THEN UPDATE SET
1180
- thread_id = @thread_id,
1181
- content = @content,
1182
- [createdAt] = @createdAt,
1183
- role = @role,
1184
- type = @type,
1185
- resourceId = @resourceId
1186
- WHEN NOT MATCHED THEN INSERT (id, thread_id, content, [createdAt], role, type, resourceId)
1187
- VALUES (@id, @thread_id, @content, @createdAt, @role, @type, @resourceId);`;
1188
- await request.query(mergeSql);
1189
- }
1190
- const threadReq = transaction.request();
1191
- threadReq.input("updatedAt", (/* @__PURE__ */ new Date()).toISOString());
1192
- threadReq.input("id", threadId);
1193
- await threadReq.query(`UPDATE ${tableThreads} SET [updatedAt] = @updatedAt WHERE id = @id`);
1194
- await transaction.commit();
1195
- } catch (error) {
1196
- await transaction.rollback();
1197
- throw error;
804
+ const result = await request.query(query);
805
+ const indexes = [];
806
+ for (const row of result.recordset) {
807
+ const colRequest = this.pool.request();
808
+ colRequest.input("indexName", row.name);
809
+ colRequest.input("schemaName", schemaName);
810
+ const colResult = await colRequest.query(`
811
+ SELECT c.name as column_name
812
+ FROM sys.indexes i
813
+ INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
814
+ INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
815
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
816
+ INNER JOIN sys.schemas s ON o.schema_id = s.schema_id
817
+ WHERE i.name = @indexName
818
+ AND s.name = @schemaName
819
+ ORDER BY ic.key_ordinal
820
+ `);
821
+ indexes.push({
822
+ name: row.name,
823
+ table: row.table,
824
+ columns: colResult.recordset.map((c) => c.column_name),
825
+ unique: row.is_unique || false,
826
+ size: row.size || "0 MB",
827
+ definition: ""
828
+ // MSSQL doesn't store definition like PG
829
+ });
1198
830
  }
1199
- const messagesWithParsedContent = messages.map((message) => {
1200
- if (typeof message.content === "string") {
1201
- try {
1202
- return { ...message, content: JSON.parse(message.content) };
1203
- } catch {
1204
- return message;
1205
- }
1206
- }
1207
- return message;
1208
- });
1209
- const list = new agent.MessageList().add(messagesWithParsedContent, "memory");
1210
- if (format === "v2") return list.get.all.v2();
1211
- return list.get.all.v1();
831
+ return indexes;
1212
832
  } catch (error$1) {
1213
833
  throw new error.MastraError(
1214
834
  {
1215
- id: "MASTRA_STORAGE_MSSQL_STORE_SAVE_MESSAGES_FAILED",
835
+ id: storage.createStorageErrorId("MSSQL", "INDEX_LIST", "FAILED"),
1216
836
  domain: error.ErrorDomain.STORAGE,
1217
837
  category: error.ErrorCategory.THIRD_PARTY,
1218
- details: { threadId }
838
+ details: tableName ? {
839
+ tableName
840
+ } : {}
1219
841
  },
1220
842
  error$1
1221
843
  );
1222
844
  }
1223
845
  }
1224
- async persistWorkflowSnapshot({
1225
- workflowName,
1226
- runId,
1227
- snapshot
1228
- }) {
1229
- const table = this.getTableName(storage.TABLE_WORKFLOW_SNAPSHOT);
1230
- const now = (/* @__PURE__ */ new Date()).toISOString();
846
+ /**
847
+ * Get detailed statistics for a specific index
848
+ */
849
+ async describeIndex(indexName) {
1231
850
  try {
851
+ const schemaName = this.schemaName || "dbo";
1232
852
  const request = this.pool.request();
1233
- request.input("workflow_name", workflowName);
1234
- request.input("run_id", runId);
1235
- request.input("snapshot", JSON.stringify(snapshot));
1236
- request.input("createdAt", now);
1237
- request.input("updatedAt", now);
1238
- const mergeSql = `MERGE INTO ${table} AS target
1239
- USING (SELECT @workflow_name AS workflow_name, @run_id AS run_id) AS src
1240
- ON target.workflow_name = src.workflow_name AND target.run_id = src.run_id
1241
- WHEN MATCHED THEN UPDATE SET
1242
- snapshot = @snapshot,
1243
- [updatedAt] = @updatedAt
1244
- WHEN NOT MATCHED THEN INSERT (workflow_name, run_id, snapshot, [createdAt], [updatedAt])
1245
- VALUES (@workflow_name, @run_id, @snapshot, @createdAt, @updatedAt);`;
1246
- await request.query(mergeSql);
853
+ request.input("indexName", indexName);
854
+ request.input("schemaName", schemaName);
855
+ const query = `
856
+ SELECT
857
+ i.name as name,
858
+ o.name as [table],
859
+ i.is_unique as is_unique,
860
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size,
861
+ i.type_desc as method,
862
+ ISNULL(us.user_scans, 0) as scans,
863
+ ISNULL(us.user_seeks + us.user_scans, 0) as tuples_read,
864
+ ISNULL(us.user_lookups, 0) as tuples_fetched
865
+ FROM sys.indexes i
866
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
867
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
868
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
869
+ LEFT JOIN sys.dm_db_index_usage_stats us ON i.object_id = us.object_id AND i.index_id = us.index_id
870
+ WHERE i.name = @indexName
871
+ AND sch.name = @schemaName
872
+ GROUP BY i.name, o.name, i.is_unique, i.type_desc, us.user_seeks, us.user_scans, us.user_lookups
873
+ `;
874
+ const result = await request.query(query);
875
+ if (!result.recordset || result.recordset.length === 0) {
876
+ throw new Error(`Index "${indexName}" not found in schema "${schemaName}"`);
877
+ }
878
+ const row = result.recordset[0];
879
+ const colRequest = this.pool.request();
880
+ colRequest.input("indexName", indexName);
881
+ colRequest.input("schemaName", schemaName);
882
+ const colResult = await colRequest.query(`
883
+ SELECT c.name as column_name
884
+ FROM sys.indexes i
885
+ INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
886
+ INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
887
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
888
+ INNER JOIN sys.schemas s ON o.schema_id = s.schema_id
889
+ WHERE i.name = @indexName
890
+ AND s.name = @schemaName
891
+ ORDER BY ic.key_ordinal
892
+ `);
893
+ return {
894
+ name: row.name,
895
+ table: row.table,
896
+ columns: colResult.recordset.map((c) => c.column_name),
897
+ unique: row.is_unique || false,
898
+ size: row.size || "0 MB",
899
+ definition: "",
900
+ method: row.method?.toLowerCase() || "nonclustered",
901
+ scans: Number(row.scans) || 0,
902
+ tuples_read: Number(row.tuples_read) || 0,
903
+ tuples_fetched: Number(row.tuples_fetched) || 0
904
+ };
1247
905
  } catch (error$1) {
1248
906
  throw new error.MastraError(
1249
907
  {
1250
- id: "MASTRA_STORAGE_MSSQL_STORE_PERSIST_WORKFLOW_SNAPSHOT_FAILED",
908
+ id: storage.createStorageErrorId("MSSQL", "INDEX_DESCRIBE", "FAILED"),
1251
909
  domain: error.ErrorDomain.STORAGE,
1252
910
  category: error.ErrorCategory.THIRD_PARTY,
1253
911
  details: {
1254
- workflowName,
1255
- runId
912
+ indexName
1256
913
  }
1257
914
  },
1258
915
  error$1
1259
916
  );
1260
917
  }
1261
918
  }
1262
- async loadWorkflowSnapshot({
1263
- workflowName,
1264
- runId
1265
- }) {
919
+ /**
920
+ * Returns definitions for automatic performance indexes
921
+ * IMPORTANT: Uses seq_id DESC instead of createdAt DESC for MSSQL due to millisecond accuracy limitations
922
+ * NOTE: Using NVARCHAR(400) for text columns (800 bytes) leaves room for composite indexes
923
+ */
924
+ getAutomaticIndexDefinitions() {
925
+ const schemaPrefix = this.schemaName ? `${this.schemaName}_` : "";
926
+ return [
927
+ // Composite indexes for optimal filtering + sorting performance
928
+ // NVARCHAR(400) = 800 bytes, plus BIGINT (8 bytes) = 808 bytes total (under 900-byte limit)
929
+ {
930
+ name: `${schemaPrefix}mastra_threads_resourceid_seqid_idx`,
931
+ table: storage.TABLE_THREADS,
932
+ columns: ["resourceId", "seq_id DESC"]
933
+ },
934
+ {
935
+ name: `${schemaPrefix}mastra_messages_thread_id_seqid_idx`,
936
+ table: storage.TABLE_MESSAGES,
937
+ columns: ["thread_id", "seq_id DESC"]
938
+ },
939
+ {
940
+ name: `${schemaPrefix}mastra_traces_name_seqid_idx`,
941
+ table: storage.TABLE_TRACES,
942
+ columns: ["name", "seq_id DESC"]
943
+ },
944
+ {
945
+ name: `${schemaPrefix}mastra_scores_trace_id_span_id_seqid_idx`,
946
+ table: storage.TABLE_SCORERS,
947
+ columns: ["traceId", "spanId", "seq_id DESC"]
948
+ },
949
+ // Spans indexes for optimal trace querying
950
+ {
951
+ name: `${schemaPrefix}mastra_ai_spans_traceid_startedat_idx`,
952
+ table: storage.TABLE_SPANS,
953
+ columns: ["traceId", "startedAt DESC"]
954
+ },
955
+ {
956
+ name: `${schemaPrefix}mastra_ai_spans_parentspanid_startedat_idx`,
957
+ table: storage.TABLE_SPANS,
958
+ columns: ["parentSpanId", "startedAt DESC"]
959
+ },
960
+ {
961
+ name: `${schemaPrefix}mastra_ai_spans_name_idx`,
962
+ table: storage.TABLE_SPANS,
963
+ columns: ["name"]
964
+ },
965
+ {
966
+ name: `${schemaPrefix}mastra_ai_spans_spantype_startedat_idx`,
967
+ table: storage.TABLE_SPANS,
968
+ columns: ["spanType", "startedAt DESC"]
969
+ }
970
+ ];
971
+ }
972
+ /**
973
+ * Creates automatic indexes for optimal query performance
974
+ * Uses getAutomaticIndexDefinitions() to determine which indexes to create
975
+ */
976
+ async createAutomaticIndexes() {
1266
977
  try {
1267
- const result = await this.load({
1268
- tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
1269
- keys: {
1270
- workflow_name: workflowName,
1271
- run_id: runId
978
+ const indexes = this.getAutomaticIndexDefinitions();
979
+ for (const indexOptions of indexes) {
980
+ try {
981
+ await this.createIndex(indexOptions);
982
+ } catch (error) {
983
+ this.logger?.warn?.(`Failed to create index ${indexOptions.name}:`, error);
1272
984
  }
1273
- });
1274
- if (!result) {
1275
- return null;
1276
985
  }
1277
- return result.snapshot;
1278
986
  } catch (error$1) {
1279
987
  throw new error.MastraError(
1280
988
  {
1281
- id: "MASTRA_STORAGE_MSSQL_STORE_LOAD_WORKFLOW_SNAPSHOT_FAILED",
989
+ id: storage.createStorageErrorId("MSSQL", "CREATE_PERFORMANCE_INDEXES", "FAILED"),
1282
990
  domain: error.ErrorDomain.STORAGE,
1283
- category: error.ErrorCategory.THIRD_PARTY,
1284
- details: {
1285
- workflowName,
1286
- runId
1287
- }
991
+ category: error.ErrorCategory.THIRD_PARTY
1288
992
  },
1289
993
  error$1
1290
994
  );
1291
995
  }
1292
996
  }
1293
- async hasColumn(table, column) {
1294
- const schema = this.schema || "dbo";
1295
- const request = this.pool.request();
1296
- request.input("schema", schema);
1297
- request.input("table", table);
1298
- request.input("column", column);
1299
- request.input("columnLower", column.toLowerCase());
1300
- const result = await request.query(
1301
- `SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @table AND (COLUMN_NAME = @column OR COLUMN_NAME = @columnLower)`
1302
- );
1303
- return result.recordset.length > 0;
997
+ };
998
+ function getSchemaName2(schema) {
999
+ return schema ? `[${utils.parseSqlIdentifier(schema, "schema name")}]` : void 0;
1000
+ }
1001
+ function getTableName2({ indexName, schemaName }) {
1002
+ const parsedIndexName = utils.parseSqlIdentifier(indexName, "index name");
1003
+ const quotedIndexName = `[${parsedIndexName}]`;
1004
+ const quotedSchemaName = schemaName;
1005
+ return quotedSchemaName ? `${quotedSchemaName}.${quotedIndexName}` : quotedIndexName;
1006
+ }
1007
+ function buildDateRangeFilter(dateRange, fieldName) {
1008
+ const filters = {};
1009
+ if (dateRange?.start) {
1010
+ filters[`${fieldName}_gte`] = dateRange.start;
1304
1011
  }
1305
- parseWorkflowRun(row) {
1306
- let parsedSnapshot = row.snapshot;
1307
- if (typeof parsedSnapshot === "string") {
1308
- try {
1309
- parsedSnapshot = JSON.parse(row.snapshot);
1310
- } catch (e) {
1311
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1312
- }
1313
- }
1314
- return {
1315
- workflowName: row.workflow_name,
1316
- runId: row.run_id,
1317
- snapshot: parsedSnapshot,
1318
- createdAt: row.createdAt,
1319
- updatedAt: row.updatedAt,
1320
- resourceId: row.resourceId
1321
- };
1012
+ if (dateRange?.end) {
1013
+ filters[`${fieldName}_lte`] = dateRange.end;
1322
1014
  }
1323
- async getWorkflowRuns({
1324
- workflowName,
1325
- fromDate,
1326
- toDate,
1327
- limit,
1328
- offset,
1329
- resourceId
1330
- } = {}) {
1331
- try {
1332
- const conditions = [];
1333
- const paramMap = {};
1334
- if (workflowName) {
1335
- conditions.push(`[workflow_name] = @workflowName`);
1336
- paramMap["workflowName"] = workflowName;
1337
- }
1338
- if (resourceId) {
1339
- const hasResourceId = await this.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
1340
- if (hasResourceId) {
1341
- conditions.push(`[resourceId] = @resourceId`);
1342
- paramMap["resourceId"] = resourceId;
1343
- } else {
1344
- console.warn(`[${storage.TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
1015
+ return filters;
1016
+ }
1017
+ function isInOperator(value) {
1018
+ return typeof value === "object" && value !== null && "$in" in value && Array.isArray(value.$in);
1019
+ }
1020
+ function prepareWhereClause(filters, _schema) {
1021
+ const conditions = [];
1022
+ const params = {};
1023
+ let paramIndex = 1;
1024
+ Object.entries(filters).forEach(([key, value]) => {
1025
+ if (value === void 0) return;
1026
+ if (key.endsWith("_gte")) {
1027
+ const paramName = `p${paramIndex++}`;
1028
+ const fieldName = key.slice(0, -4);
1029
+ conditions.push(`[${utils.parseSqlIdentifier(fieldName, "field name")}] >= @${paramName}`);
1030
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
1031
+ } else if (key.endsWith("_lte")) {
1032
+ const paramName = `p${paramIndex++}`;
1033
+ const fieldName = key.slice(0, -4);
1034
+ conditions.push(`[${utils.parseSqlIdentifier(fieldName, "field name")}] <= @${paramName}`);
1035
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
1036
+ } else if (value === null) {
1037
+ conditions.push(`[${utils.parseSqlIdentifier(key, "field name")}] IS NULL`);
1038
+ } else if (isInOperator(value)) {
1039
+ const inValues = value.$in;
1040
+ if (inValues.length === 0) {
1041
+ conditions.push("1 = 0");
1042
+ } else if (inValues.length === 1) {
1043
+ const paramName = `p${paramIndex++}`;
1044
+ conditions.push(`[${utils.parseSqlIdentifier(key, "field name")}] = @${paramName}`);
1045
+ params[paramName] = inValues[0] instanceof Date ? inValues[0].toISOString() : inValues[0];
1046
+ } else {
1047
+ const inParamNames = [];
1048
+ for (const item of inValues) {
1049
+ const paramName = `p${paramIndex++}`;
1050
+ inParamNames.push(`@${paramName}`);
1051
+ params[paramName] = item instanceof Date ? item.toISOString() : item;
1345
1052
  }
1053
+ conditions.push(`[${utils.parseSqlIdentifier(key, "field name")}] IN (${inParamNames.join(", ")})`);
1346
1054
  }
1347
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
1348
- conditions.push(`[createdAt] >= @fromDate`);
1349
- paramMap[`fromDate`] = fromDate.toISOString();
1055
+ } else if (Array.isArray(value)) {
1056
+ if (value.length === 0) {
1057
+ conditions.push("1 = 0");
1058
+ } else if (value.length === 1) {
1059
+ const paramName = `p${paramIndex++}`;
1060
+ conditions.push(`[${utils.parseSqlIdentifier(key, "field name")}] = @${paramName}`);
1061
+ params[paramName] = value[0] instanceof Date ? value[0].toISOString() : value[0];
1062
+ } else {
1063
+ const inParamNames = [];
1064
+ for (const item of value) {
1065
+ const paramName = `p${paramIndex++}`;
1066
+ inParamNames.push(`@${paramName}`);
1067
+ params[paramName] = item instanceof Date ? item.toISOString() : item;
1068
+ }
1069
+ conditions.push(`[${utils.parseSqlIdentifier(key, "field name")}] IN (${inParamNames.join(", ")})`);
1350
1070
  }
1351
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
1352
- conditions.push(`[createdAt] <= @toDate`);
1353
- paramMap[`toDate`] = toDate.toISOString();
1071
+ } else {
1072
+ const paramName = `p${paramIndex++}`;
1073
+ conditions.push(`[${utils.parseSqlIdentifier(key, "field name")}] = @${paramName}`);
1074
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
1075
+ }
1076
+ });
1077
+ return {
1078
+ sql: conditions.length > 0 ? ` WHERE ${conditions.join(" AND ")}` : "",
1079
+ params
1080
+ };
1081
+ }
1082
+ function transformFromSqlRow({
1083
+ tableName,
1084
+ sqlRow
1085
+ }) {
1086
+ const schema = storage.TABLE_SCHEMAS[tableName];
1087
+ const result = {};
1088
+ Object.entries(sqlRow).forEach(([key, value]) => {
1089
+ const columnSchema = schema?.[key];
1090
+ if (columnSchema?.type === "jsonb" && typeof value === "string") {
1091
+ try {
1092
+ result[key] = JSON.parse(value);
1093
+ } catch {
1094
+ result[key] = value;
1354
1095
  }
1355
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1356
- let total = 0;
1357
- const tableName = this.getTableName(storage.TABLE_WORKFLOW_SNAPSHOT);
1358
- const request = this.pool.request();
1359
- Object.entries(paramMap).forEach(([key, value]) => {
1360
- if (value instanceof Date) {
1361
- request.input(key, sql__default.default.DateTime, value);
1362
- } else {
1363
- request.input(key, value);
1096
+ } else if (columnSchema?.type === "timestamp" && value && typeof value === "string") {
1097
+ result[key] = new Date(value);
1098
+ } else if (columnSchema?.type === "timestamp" && value instanceof Date) {
1099
+ result[key] = value;
1100
+ } else if (columnSchema?.type === "boolean") {
1101
+ result[key] = Boolean(value);
1102
+ } else {
1103
+ result[key] = value;
1104
+ }
1105
+ });
1106
+ return result;
1107
+ }
1108
+
1109
+ // src/storage/domains/memory/index.ts
1110
+ var MemoryMSSQL = class extends storage.MemoryStorage {
1111
+ pool;
1112
+ schema;
1113
+ db;
1114
+ needsConnect;
1115
+ _parseAndFormatMessages(messages, format) {
1116
+ const messagesWithParsedContent = messages.map((message) => {
1117
+ if (typeof message.content === "string") {
1118
+ try {
1119
+ return { ...message, content: JSON.parse(message.content) };
1120
+ } catch {
1121
+ return message;
1364
1122
  }
1365
- });
1366
- if (limit !== void 0 && offset !== void 0) {
1367
- const countQuery = `SELECT COUNT(*) as count FROM ${tableName} ${whereClause}`;
1368
- const countResult = await request.query(countQuery);
1369
- total = Number(countResult.recordset[0]?.count || 0);
1370
1123
  }
1371
- let query = `SELECT * FROM ${tableName} ${whereClause} ORDER BY [seq_id] DESC`;
1372
- if (limit !== void 0 && offset !== void 0) {
1373
- query += ` OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
1374
- request.input("limit", limit);
1375
- request.input("offset", offset);
1124
+ return message;
1125
+ });
1126
+ const cleanMessages = messagesWithParsedContent.map(({ seq_id, ...rest }) => rest);
1127
+ const list = new agent.MessageList().add(cleanMessages, "memory");
1128
+ return format === "v2" ? list.get.all.db() : list.get.all.v1();
1129
+ }
1130
+ constructor(config) {
1131
+ super();
1132
+ const { pool, db, schema, needsConnect } = resolveMssqlConfig(config);
1133
+ this.pool = pool;
1134
+ this.schema = schema;
1135
+ this.db = db;
1136
+ this.needsConnect = needsConnect;
1137
+ }
1138
+ async init() {
1139
+ if (this.needsConnect) {
1140
+ await this.pool.connect();
1141
+ this.needsConnect = false;
1142
+ }
1143
+ await this.db.createTable({ tableName: storage.TABLE_THREADS, schema: storage.TABLE_SCHEMAS[storage.TABLE_THREADS] });
1144
+ await this.db.createTable({ tableName: storage.TABLE_MESSAGES, schema: storage.TABLE_SCHEMAS[storage.TABLE_MESSAGES] });
1145
+ await this.db.createTable({ tableName: storage.TABLE_RESOURCES, schema: storage.TABLE_SCHEMAS[storage.TABLE_RESOURCES] });
1146
+ }
1147
+ async dangerouslyClearAll() {
1148
+ await this.db.clearTable({ tableName: storage.TABLE_MESSAGES });
1149
+ await this.db.clearTable({ tableName: storage.TABLE_THREADS });
1150
+ await this.db.clearTable({ tableName: storage.TABLE_RESOURCES });
1151
+ }
1152
+ async getThreadById({ threadId }) {
1153
+ try {
1154
+ const sql5 = `SELECT
1155
+ id,
1156
+ [resourceId],
1157
+ title,
1158
+ metadata,
1159
+ [createdAt],
1160
+ [updatedAt]
1161
+ FROM ${getTableName2({ indexName: storage.TABLE_THREADS, schemaName: getSchemaName2(this.schema) })}
1162
+ WHERE id = @threadId`;
1163
+ const request = this.pool.request();
1164
+ request.input("threadId", threadId);
1165
+ const resultSet = await request.query(sql5);
1166
+ const thread = resultSet.recordset[0] || null;
1167
+ if (!thread) {
1168
+ return null;
1376
1169
  }
1377
- const result = await request.query(query);
1378
- const runs = (result.recordset || []).map((row) => this.parseWorkflowRun(row));
1379
- return { runs, total: total || runs.length };
1170
+ return {
1171
+ ...thread,
1172
+ metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
1173
+ createdAt: thread.createdAt,
1174
+ updatedAt: thread.updatedAt
1175
+ };
1380
1176
  } catch (error$1) {
1381
1177
  throw new error.MastraError(
1382
1178
  {
1383
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_WORKFLOW_RUNS_FAILED",
1179
+ id: storage.createStorageErrorId("MSSQL", "GET_THREAD_BY_ID", "FAILED"),
1384
1180
  domain: error.ErrorDomain.STORAGE,
1385
1181
  category: error.ErrorCategory.THIRD_PARTY,
1386
1182
  details: {
1387
- workflowName: workflowName || "all"
1183
+ threadId
1388
1184
  }
1389
1185
  },
1390
1186
  error$1
1391
1187
  );
1392
1188
  }
1393
1189
  }
1394
- async getWorkflowRunById({
1395
- runId,
1396
- workflowName
1397
- }) {
1190
+ async listThreadsByResourceId(args) {
1191
+ const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
1192
+ if (page < 0) {
1193
+ throw new error.MastraError({
1194
+ id: storage.createStorageErrorId("MSSQL", "LIST_THREADS_BY_RESOURCE_ID", "INVALID_PAGE"),
1195
+ domain: error.ErrorDomain.STORAGE,
1196
+ category: error.ErrorCategory.USER,
1197
+ text: "Page number must be non-negative",
1198
+ details: {
1199
+ resourceId,
1200
+ page
1201
+ }
1202
+ });
1203
+ }
1204
+ const perPage = storage.normalizePerPage(perPageInput, 100);
1205
+ const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1206
+ const { field, direction } = this.parseOrderBy(orderBy);
1398
1207
  try {
1399
- const conditions = [];
1400
- const paramMap = {};
1401
- if (runId) {
1402
- conditions.push(`[run_id] = @runId`);
1403
- paramMap["runId"] = runId;
1208
+ const baseQuery = `FROM ${getTableName2({ indexName: storage.TABLE_THREADS, schemaName: getSchemaName2(this.schema) })} WHERE [resourceId] = @resourceId`;
1209
+ const countQuery = `SELECT COUNT(*) as count ${baseQuery}`;
1210
+ const countRequest = this.pool.request();
1211
+ countRequest.input("resourceId", resourceId);
1212
+ const countResult = await countRequest.query(countQuery);
1213
+ const total = parseInt(countResult.recordset[0]?.count ?? "0", 10);
1214
+ if (total === 0) {
1215
+ return {
1216
+ threads: [],
1217
+ total: 0,
1218
+ page,
1219
+ perPage: perPageForResponse,
1220
+ hasMore: false
1221
+ };
1404
1222
  }
1405
- if (workflowName) {
1406
- conditions.push(`[workflow_name] = @workflowName`);
1407
- paramMap["workflowName"] = workflowName;
1223
+ const orderByField = field === "createdAt" ? "[createdAt]" : "[updatedAt]";
1224
+ const dir = (direction || "DESC").toUpperCase() === "ASC" ? "ASC" : "DESC";
1225
+ const limitValue = perPageInput === false ? total : perPage;
1226
+ const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY ${orderByField} ${dir} OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
1227
+ const dataRequest = this.pool.request();
1228
+ dataRequest.input("resourceId", resourceId);
1229
+ dataRequest.input("offset", offset);
1230
+ if (limitValue > 2147483647) {
1231
+ dataRequest.input("perPage", sql__default.default.BigInt, limitValue);
1232
+ } else {
1233
+ dataRequest.input("perPage", limitValue);
1408
1234
  }
1409
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1410
- const tableName = this.getTableName(storage.TABLE_WORKFLOW_SNAPSHOT);
1411
- const query = `SELECT * FROM ${tableName} ${whereClause}`;
1412
- const request = this.pool.request();
1413
- Object.entries(paramMap).forEach(([key, value]) => request.input(key, value));
1414
- const result = await request.query(query);
1415
- if (!result.recordset || result.recordset.length === 0) {
1416
- return null;
1235
+ const rowsResult = await dataRequest.query(dataQuery);
1236
+ const rows = rowsResult.recordset || [];
1237
+ const threads = rows.map((thread) => ({
1238
+ ...thread,
1239
+ metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
1240
+ createdAt: thread.createdAt,
1241
+ updatedAt: thread.updatedAt
1242
+ }));
1243
+ return {
1244
+ threads,
1245
+ total,
1246
+ page,
1247
+ perPage: perPageForResponse,
1248
+ hasMore: perPageInput === false ? false : offset + perPage < total
1249
+ };
1250
+ } catch (error$1) {
1251
+ const mastraError = new error.MastraError(
1252
+ {
1253
+ id: storage.createStorageErrorId("MSSQL", "LIST_THREADS_BY_RESOURCE_ID", "FAILED"),
1254
+ domain: error.ErrorDomain.STORAGE,
1255
+ category: error.ErrorCategory.THIRD_PARTY,
1256
+ details: {
1257
+ resourceId,
1258
+ page
1259
+ }
1260
+ },
1261
+ error$1
1262
+ );
1263
+ this.logger?.error?.(mastraError.toString());
1264
+ this.logger?.trackException?.(mastraError);
1265
+ return {
1266
+ threads: [],
1267
+ total: 0,
1268
+ page,
1269
+ perPage: perPageForResponse,
1270
+ hasMore: false
1271
+ };
1272
+ }
1273
+ }
1274
+ async saveThread({ thread }) {
1275
+ try {
1276
+ const table = getTableName2({ indexName: storage.TABLE_THREADS, schemaName: getSchemaName2(this.schema) });
1277
+ const mergeSql = `MERGE INTO ${table} WITH (HOLDLOCK) AS target
1278
+ USING (SELECT @id AS id) AS source
1279
+ ON (target.id = source.id)
1280
+ WHEN MATCHED THEN
1281
+ UPDATE SET
1282
+ [resourceId] = @resourceId,
1283
+ title = @title,
1284
+ metadata = @metadata,
1285
+ [updatedAt] = @updatedAt
1286
+ WHEN NOT MATCHED THEN
1287
+ INSERT (id, [resourceId], title, metadata, [createdAt], [updatedAt])
1288
+ VALUES (@id, @resourceId, @title, @metadata, @createdAt, @updatedAt);`;
1289
+ const req = this.pool.request();
1290
+ req.input("id", thread.id);
1291
+ req.input("resourceId", thread.resourceId);
1292
+ req.input("title", thread.title);
1293
+ const metadata = thread.metadata ? JSON.stringify(thread.metadata) : null;
1294
+ if (metadata === null) {
1295
+ req.input("metadata", sql__default.default.NVarChar, null);
1296
+ } else {
1297
+ req.input("metadata", metadata);
1417
1298
  }
1418
- return this.parseWorkflowRun(result.recordset[0]);
1299
+ req.input("createdAt", sql__default.default.DateTime2, thread.createdAt);
1300
+ req.input("updatedAt", sql__default.default.DateTime2, thread.updatedAt);
1301
+ await req.query(mergeSql);
1302
+ return thread;
1419
1303
  } catch (error$1) {
1420
1304
  throw new error.MastraError(
1421
1305
  {
1422
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_WORKFLOW_RUN_BY_ID_FAILED",
1306
+ id: storage.createStorageErrorId("MSSQL", "SAVE_THREAD", "FAILED"),
1423
1307
  domain: error.ErrorDomain.STORAGE,
1424
1308
  category: error.ErrorCategory.THIRD_PARTY,
1425
1309
  details: {
1426
- runId,
1427
- workflowName: workflowName || ""
1310
+ threadId: thread.id
1428
1311
  }
1429
1312
  },
1430
1313
  error$1
1431
1314
  );
1432
1315
  }
1433
1316
  }
1434
- async updateMessages({
1435
- messages
1317
+ /**
1318
+ * Updates a thread's title and metadata, merging with existing metadata. Returns the updated thread.
1319
+ */
1320
+ async updateThread({
1321
+ id,
1322
+ title,
1323
+ metadata
1436
1324
  }) {
1437
- if (!messages || messages.length === 0) {
1438
- return [];
1439
- }
1440
- const messageIds = messages.map((m) => m.id);
1325
+ const existingThread = await this.getThreadById({ threadId: id });
1326
+ if (!existingThread) {
1327
+ throw new error.MastraError({
1328
+ id: storage.createStorageErrorId("MSSQL", "UPDATE_THREAD", "NOT_FOUND"),
1329
+ domain: error.ErrorDomain.STORAGE,
1330
+ category: error.ErrorCategory.USER,
1331
+ text: `Thread ${id} not found`,
1332
+ details: {
1333
+ threadId: id,
1334
+ title
1335
+ }
1336
+ });
1337
+ }
1338
+ const mergedMetadata = {
1339
+ ...existingThread.metadata,
1340
+ ...metadata
1341
+ };
1342
+ try {
1343
+ const table = getTableName2({ indexName: storage.TABLE_THREADS, schemaName: getSchemaName2(this.schema) });
1344
+ const sql5 = `UPDATE ${table}
1345
+ SET title = @title,
1346
+ metadata = @metadata,
1347
+ [updatedAt] = @updatedAt
1348
+ OUTPUT INSERTED.*
1349
+ WHERE id = @id`;
1350
+ const req = this.pool.request();
1351
+ req.input("id", id);
1352
+ req.input("title", title);
1353
+ req.input("metadata", JSON.stringify(mergedMetadata));
1354
+ req.input("updatedAt", /* @__PURE__ */ new Date());
1355
+ const result = await req.query(sql5);
1356
+ let thread = result.recordset && result.recordset[0];
1357
+ if (thread && "seq_id" in thread) {
1358
+ const { seq_id, ...rest } = thread;
1359
+ thread = rest;
1360
+ }
1361
+ if (!thread) {
1362
+ throw new error.MastraError({
1363
+ id: storage.createStorageErrorId("MSSQL", "UPDATE_THREAD", "NOT_FOUND"),
1364
+ domain: error.ErrorDomain.STORAGE,
1365
+ category: error.ErrorCategory.USER,
1366
+ text: `Thread ${id} not found after update`,
1367
+ details: {
1368
+ threadId: id,
1369
+ title
1370
+ }
1371
+ });
1372
+ }
1373
+ return {
1374
+ ...thread,
1375
+ metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
1376
+ createdAt: thread.createdAt,
1377
+ updatedAt: thread.updatedAt
1378
+ };
1379
+ } catch (error$1) {
1380
+ throw new error.MastraError(
1381
+ {
1382
+ id: storage.createStorageErrorId("MSSQL", "UPDATE_THREAD", "FAILED"),
1383
+ domain: error.ErrorDomain.STORAGE,
1384
+ category: error.ErrorCategory.THIRD_PARTY,
1385
+ details: {
1386
+ threadId: id,
1387
+ title
1388
+ }
1389
+ },
1390
+ error$1
1391
+ );
1392
+ }
1393
+ }
1394
+ async deleteThread({ threadId }) {
1395
+ const messagesTable = getTableName2({ indexName: storage.TABLE_MESSAGES, schemaName: getSchemaName2(this.schema) });
1396
+ const threadsTable = getTableName2({ indexName: storage.TABLE_THREADS, schemaName: getSchemaName2(this.schema) });
1397
+ const deleteMessagesSql = `DELETE FROM ${messagesTable} WHERE [thread_id] = @threadId`;
1398
+ const deleteThreadSql = `DELETE FROM ${threadsTable} WHERE id = @threadId`;
1399
+ const tx = this.pool.transaction();
1400
+ try {
1401
+ await tx.begin();
1402
+ const req = tx.request();
1403
+ req.input("threadId", threadId);
1404
+ await req.query(deleteMessagesSql);
1405
+ await req.query(deleteThreadSql);
1406
+ await tx.commit();
1407
+ } catch (error$1) {
1408
+ await tx.rollback().catch(() => {
1409
+ });
1410
+ throw new error.MastraError(
1411
+ {
1412
+ id: storage.createStorageErrorId("MSSQL", "DELETE_THREAD", "FAILED"),
1413
+ domain: error.ErrorDomain.STORAGE,
1414
+ category: error.ErrorCategory.THIRD_PARTY,
1415
+ details: {
1416
+ threadId
1417
+ }
1418
+ },
1419
+ error$1
1420
+ );
1421
+ }
1422
+ }
1423
+ async _getIncludedMessages({ include }) {
1424
+ if (!include || include.length === 0) return null;
1425
+ const unionQueries = [];
1426
+ const paramValues = [];
1427
+ let paramIdx = 1;
1428
+ const paramNames = [];
1429
+ const tableName = getTableName2({ indexName: storage.TABLE_MESSAGES, schemaName: getSchemaName2(this.schema) });
1430
+ for (const inc of include) {
1431
+ const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
1432
+ const pId = `@p${paramIdx}`;
1433
+ const pPrev = `@p${paramIdx + 1}`;
1434
+ const pNext = `@p${paramIdx + 2}`;
1435
+ unionQueries.push(
1436
+ `
1437
+ SELECT
1438
+ m.id,
1439
+ m.content,
1440
+ m.role,
1441
+ m.type,
1442
+ m.[createdAt],
1443
+ m.thread_id AS threadId,
1444
+ m.[resourceId],
1445
+ m.seq_id
1446
+ FROM (
1447
+ SELECT *, ROW_NUMBER() OVER (ORDER BY [createdAt] ASC) as row_num
1448
+ FROM ${tableName}
1449
+ WHERE [thread_id] = (SELECT thread_id FROM ${tableName} WHERE id = ${pId})
1450
+ ) AS m
1451
+ WHERE m.id = ${pId}
1452
+ OR EXISTS (
1453
+ SELECT 1
1454
+ FROM (
1455
+ SELECT *, ROW_NUMBER() OVER (ORDER BY [createdAt] ASC) as row_num
1456
+ FROM ${tableName}
1457
+ WHERE [thread_id] = (SELECT thread_id FROM ${tableName} WHERE id = ${pId})
1458
+ ) AS target
1459
+ WHERE target.id = ${pId}
1460
+ AND (
1461
+ -- Get previous messages (messages that come BEFORE the target)
1462
+ (m.row_num < target.row_num AND m.row_num >= target.row_num - ${pPrev})
1463
+ OR
1464
+ -- Get next messages (messages that come AFTER the target)
1465
+ (m.row_num > target.row_num AND m.row_num <= target.row_num + ${pNext})
1466
+ )
1467
+ )
1468
+ `
1469
+ );
1470
+ paramValues.push(id, withPreviousMessages, withNextMessages);
1471
+ paramNames.push(`p${paramIdx}`, `p${paramIdx + 1}`, `p${paramIdx + 2}`);
1472
+ paramIdx += 3;
1473
+ }
1474
+ const finalQuery = `
1475
+ SELECT * FROM (
1476
+ ${unionQueries.join(" UNION ALL ")}
1477
+ ) AS union_result
1478
+ ORDER BY [seq_id] ASC
1479
+ `;
1480
+ const req = this.pool.request();
1481
+ for (let i = 0; i < paramValues.length; ++i) {
1482
+ req.input(paramNames[i], paramValues[i]);
1483
+ }
1484
+ const result = await req.query(finalQuery);
1485
+ const includedRows = result.recordset || [];
1486
+ const seen = /* @__PURE__ */ new Set();
1487
+ const dedupedRows = includedRows.filter((row) => {
1488
+ if (seen.has(row.id)) return false;
1489
+ seen.add(row.id);
1490
+ return true;
1491
+ });
1492
+ return dedupedRows;
1493
+ }
1494
+ async listMessagesById({ messageIds }) {
1495
+ if (messageIds.length === 0) return { messages: [] };
1496
+ const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId, resourceId`;
1497
+ const orderByStatement = `ORDER BY [seq_id] DESC`;
1498
+ try {
1499
+ let rows = [];
1500
+ let query = `${selectStatement} FROM ${getTableName2({ indexName: storage.TABLE_MESSAGES, schemaName: getSchemaName2(this.schema) })} WHERE [id] IN (${messageIds.map((_, i) => `@id${i}`).join(", ")})`;
1501
+ const request = this.pool.request();
1502
+ messageIds.forEach((id, i) => request.input(`id${i}`, id));
1503
+ query += ` ${orderByStatement}`;
1504
+ const result = await request.query(query);
1505
+ const remainingRows = result.recordset || [];
1506
+ rows.push(...remainingRows);
1507
+ rows.sort((a, b) => {
1508
+ const timeDiff = a.seq_id - b.seq_id;
1509
+ return timeDiff;
1510
+ });
1511
+ const messagesWithParsedContent = rows.map((row) => {
1512
+ if (typeof row.content === "string") {
1513
+ try {
1514
+ return { ...row, content: JSON.parse(row.content) };
1515
+ } catch {
1516
+ return row;
1517
+ }
1518
+ }
1519
+ return row;
1520
+ });
1521
+ const cleanMessages = messagesWithParsedContent.map(({ seq_id, ...rest }) => rest);
1522
+ const list = new agent.MessageList().add(cleanMessages, "memory");
1523
+ return { messages: list.get.all.db() };
1524
+ } catch (error$1) {
1525
+ const mastraError = new error.MastraError(
1526
+ {
1527
+ id: storage.createStorageErrorId("MSSQL", "LIST_MESSAGES_BY_ID", "FAILED"),
1528
+ domain: error.ErrorDomain.STORAGE,
1529
+ category: error.ErrorCategory.THIRD_PARTY,
1530
+ details: {
1531
+ messageIds: JSON.stringify(messageIds)
1532
+ }
1533
+ },
1534
+ error$1
1535
+ );
1536
+ this.logger?.error?.(mastraError.toString());
1537
+ this.logger?.trackException?.(mastraError);
1538
+ return { messages: [] };
1539
+ }
1540
+ }
1541
+ async listMessages(args) {
1542
+ const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
1543
+ const threadIds = Array.isArray(threadId) ? threadId : [threadId];
1544
+ if (threadIds.length === 0 || threadIds.some((id) => !id.trim())) {
1545
+ throw new error.MastraError(
1546
+ {
1547
+ id: storage.createStorageErrorId("MSSQL", "LIST_MESSAGES", "INVALID_THREAD_ID"),
1548
+ domain: error.ErrorDomain.STORAGE,
1549
+ category: error.ErrorCategory.THIRD_PARTY,
1550
+ details: { threadId: Array.isArray(threadId) ? threadId.join(",") : threadId }
1551
+ },
1552
+ new Error("threadId must be a non-empty string or array of non-empty strings")
1553
+ );
1554
+ }
1555
+ if (page < 0) {
1556
+ throw new error.MastraError({
1557
+ id: storage.createStorageErrorId("MSSQL", "LIST_MESSAGES", "INVALID_PAGE"),
1558
+ domain: error.ErrorDomain.STORAGE,
1559
+ category: error.ErrorCategory.USER,
1560
+ text: "Page number must be non-negative",
1561
+ details: {
1562
+ threadId: Array.isArray(threadId) ? threadId.join(",") : threadId,
1563
+ page
1564
+ }
1565
+ });
1566
+ }
1567
+ const perPage = storage.normalizePerPage(perPageInput, 40);
1568
+ const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
1569
+ try {
1570
+ const { field, direction } = this.parseOrderBy(orderBy, "ASC");
1571
+ const orderByStatement = `ORDER BY [${field}] ${direction}, [seq_id] ${direction}`;
1572
+ const tableName = getTableName2({ indexName: storage.TABLE_MESSAGES, schemaName: getSchemaName2(this.schema) });
1573
+ const baseQuery = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId, resourceId FROM ${tableName}`;
1574
+ const filters = {
1575
+ thread_id: threadIds.length === 1 ? threadIds[0] : { $in: threadIds },
1576
+ ...resourceId ? { resourceId } : {},
1577
+ ...buildDateRangeFilter(filter?.dateRange, "createdAt")
1578
+ };
1579
+ const { sql: actualWhereClause = "", params: whereParams } = prepareWhereClause(
1580
+ filters);
1581
+ const bindWhereParams = (req) => {
1582
+ Object.entries(whereParams).forEach(([paramName, paramValue]) => req.input(paramName, paramValue));
1583
+ };
1584
+ const countRequest = this.pool.request();
1585
+ bindWhereParams(countRequest);
1586
+ const countResult = await countRequest.query(`SELECT COUNT(*) as total FROM ${tableName}${actualWhereClause}`);
1587
+ const total = parseInt(countResult.recordset[0]?.total, 10) || 0;
1588
+ const fetchBaseMessages = async () => {
1589
+ const request = this.pool.request();
1590
+ bindWhereParams(request);
1591
+ if (perPageInput === false) {
1592
+ const result2 = await request.query(`${baseQuery}${actualWhereClause} ${orderByStatement}`);
1593
+ return result2.recordset || [];
1594
+ }
1595
+ request.input("offset", offset);
1596
+ request.input("limit", perPage > 2147483647 ? sql__default.default.BigInt : sql__default.default.Int, perPage);
1597
+ const result = await request.query(
1598
+ `${baseQuery}${actualWhereClause} ${orderByStatement} OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`
1599
+ );
1600
+ return result.recordset || [];
1601
+ };
1602
+ const baseRows = perPage === 0 ? [] : await fetchBaseMessages();
1603
+ const messages = [...baseRows];
1604
+ const seqById = /* @__PURE__ */ new Map();
1605
+ messages.forEach((msg) => {
1606
+ if (typeof msg.seq_id === "number") seqById.set(msg.id, msg.seq_id);
1607
+ });
1608
+ if (total === 0 && messages.length === 0 && (!include || include.length === 0)) {
1609
+ return {
1610
+ messages: [],
1611
+ total: 0,
1612
+ page,
1613
+ perPage: perPageForResponse,
1614
+ hasMore: false
1615
+ };
1616
+ }
1617
+ if (include?.length) {
1618
+ const messageIds = new Set(messages.map((m) => m.id));
1619
+ const includeMessages = await this._getIncludedMessages({ include });
1620
+ includeMessages?.forEach((msg) => {
1621
+ if (!messageIds.has(msg.id)) {
1622
+ messages.push(msg);
1623
+ messageIds.add(msg.id);
1624
+ if (typeof msg.seq_id === "number") seqById.set(msg.id, msg.seq_id);
1625
+ }
1626
+ });
1627
+ }
1628
+ const parsed = this._parseAndFormatMessages(messages, "v2");
1629
+ const mult = direction === "ASC" ? 1 : -1;
1630
+ const finalMessages = parsed.sort((a, b) => {
1631
+ const aVal = field === "createdAt" ? new Date(a.createdAt).getTime() : a[field];
1632
+ const bVal = field === "createdAt" ? new Date(b.createdAt).getTime() : b[field];
1633
+ if (aVal == null || bVal == null) {
1634
+ return aVal == null && bVal == null ? a.id.localeCompare(b.id) : aVal == null ? 1 : -1;
1635
+ }
1636
+ const diff = (typeof aVal === "number" && typeof bVal === "number" ? aVal - bVal : String(aVal).localeCompare(String(bVal))) * mult;
1637
+ if (diff !== 0) return diff;
1638
+ const seqA = seqById.get(a.id);
1639
+ const seqB = seqById.get(b.id);
1640
+ return seqA != null && seqB != null ? (seqA - seqB) * mult : a.id.localeCompare(b.id);
1641
+ });
1642
+ const threadIdSet = new Set(threadIds);
1643
+ const returnedThreadMessageCount = finalMessages.filter((m) => m.threadId && threadIdSet.has(m.threadId)).length;
1644
+ const hasMore = perPageInput !== false && returnedThreadMessageCount < total && offset + perPage < total;
1645
+ return {
1646
+ messages: finalMessages,
1647
+ total,
1648
+ page,
1649
+ perPage: perPageForResponse,
1650
+ hasMore
1651
+ };
1652
+ } catch (error$1) {
1653
+ const mastraError = new error.MastraError(
1654
+ {
1655
+ id: storage.createStorageErrorId("MSSQL", "LIST_MESSAGES", "FAILED"),
1656
+ domain: error.ErrorDomain.STORAGE,
1657
+ category: error.ErrorCategory.THIRD_PARTY,
1658
+ details: {
1659
+ threadId: Array.isArray(threadId) ? threadId.join(",") : threadId,
1660
+ resourceId: resourceId ?? ""
1661
+ }
1662
+ },
1663
+ error$1
1664
+ );
1665
+ this.logger?.error?.(mastraError.toString());
1666
+ this.logger?.trackException?.(mastraError);
1667
+ return {
1668
+ messages: [],
1669
+ total: 0,
1670
+ page,
1671
+ perPage: perPageForResponse,
1672
+ hasMore: false
1673
+ };
1674
+ }
1675
+ }
1676
+ async saveMessages({ messages }) {
1677
+ if (messages.length === 0) return { messages: [] };
1678
+ const threadId = messages[0]?.threadId;
1679
+ if (!threadId) {
1680
+ throw new error.MastraError({
1681
+ id: storage.createStorageErrorId("MSSQL", "SAVE_MESSAGES", "INVALID_THREAD_ID"),
1682
+ domain: error.ErrorDomain.STORAGE,
1683
+ category: error.ErrorCategory.THIRD_PARTY,
1684
+ text: `Thread ID is required`
1685
+ });
1686
+ }
1687
+ const thread = await this.getThreadById({ threadId });
1688
+ if (!thread) {
1689
+ throw new error.MastraError({
1690
+ id: storage.createStorageErrorId("MSSQL", "SAVE_MESSAGES", "THREAD_NOT_FOUND"),
1691
+ domain: error.ErrorDomain.STORAGE,
1692
+ category: error.ErrorCategory.THIRD_PARTY,
1693
+ text: `Thread ${threadId} not found`,
1694
+ details: { threadId }
1695
+ });
1696
+ }
1697
+ const tableMessages = getTableName2({ indexName: storage.TABLE_MESSAGES, schemaName: getSchemaName2(this.schema) });
1698
+ const tableThreads = getTableName2({ indexName: storage.TABLE_THREADS, schemaName: getSchemaName2(this.schema) });
1699
+ try {
1700
+ const transaction = this.pool.transaction();
1701
+ await transaction.begin();
1702
+ try {
1703
+ for (const message of messages) {
1704
+ if (!message.threadId) {
1705
+ throw new Error(
1706
+ `Expected to find a threadId for message, but couldn't find one. An unexpected error has occurred.`
1707
+ );
1708
+ }
1709
+ if (!message.resourceId) {
1710
+ throw new Error(
1711
+ `Expected to find a resourceId for message, but couldn't find one. An unexpected error has occurred.`
1712
+ );
1713
+ }
1714
+ const request = transaction.request();
1715
+ request.input("id", message.id);
1716
+ request.input("thread_id", message.threadId);
1717
+ request.input(
1718
+ "content",
1719
+ typeof message.content === "string" ? message.content : JSON.stringify(message.content)
1720
+ );
1721
+ request.input("createdAt", sql__default.default.DateTime2, message.createdAt);
1722
+ request.input("role", message.role);
1723
+ request.input("type", message.type || "v2");
1724
+ request.input("resourceId", message.resourceId);
1725
+ const mergeSql = `MERGE INTO ${tableMessages} AS target
1726
+ USING (SELECT @id AS id) AS src
1727
+ ON target.id = src.id
1728
+ WHEN MATCHED THEN UPDATE SET
1729
+ thread_id = @thread_id,
1730
+ content = @content,
1731
+ [createdAt] = @createdAt,
1732
+ role = @role,
1733
+ type = @type,
1734
+ resourceId = @resourceId
1735
+ WHEN NOT MATCHED THEN INSERT (id, thread_id, content, [createdAt], role, type, resourceId)
1736
+ VALUES (@id, @thread_id, @content, @createdAt, @role, @type, @resourceId);`;
1737
+ await request.query(mergeSql);
1738
+ }
1739
+ const threadReq = transaction.request();
1740
+ threadReq.input("updatedAt", sql__default.default.DateTime2, /* @__PURE__ */ new Date());
1741
+ threadReq.input("id", threadId);
1742
+ await threadReq.query(`UPDATE ${tableThreads} SET [updatedAt] = @updatedAt WHERE id = @id`);
1743
+ await transaction.commit();
1744
+ } catch (error) {
1745
+ await transaction.rollback();
1746
+ throw error;
1747
+ }
1748
+ const messagesWithParsedContent = messages.map((message) => {
1749
+ if (typeof message.content === "string") {
1750
+ try {
1751
+ return { ...message, content: JSON.parse(message.content) };
1752
+ } catch {
1753
+ return message;
1754
+ }
1755
+ }
1756
+ return message;
1757
+ });
1758
+ const list = new agent.MessageList().add(messagesWithParsedContent, "memory");
1759
+ return { messages: list.get.all.db() };
1760
+ } catch (error$1) {
1761
+ throw new error.MastraError(
1762
+ {
1763
+ id: storage.createStorageErrorId("MSSQL", "SAVE_MESSAGES", "FAILED"),
1764
+ domain: error.ErrorDomain.STORAGE,
1765
+ category: error.ErrorCategory.THIRD_PARTY,
1766
+ details: { threadId }
1767
+ },
1768
+ error$1
1769
+ );
1770
+ }
1771
+ }
1772
+ async updateMessages({
1773
+ messages
1774
+ }) {
1775
+ if (!messages || messages.length === 0) {
1776
+ return [];
1777
+ }
1778
+ const messageIds = messages.map((m) => m.id);
1441
1779
  const idParams = messageIds.map((_, i) => `@id${i}`).join(", ");
1442
- let selectQuery = `SELECT id, content, role, type, createdAt, thread_id AS threadId, resourceId FROM ${this.getTableName(storage.TABLE_MESSAGES)}`;
1780
+ let selectQuery = `SELECT id, content, role, type, createdAt, thread_id AS threadId, resourceId FROM ${getTableName2({ indexName: storage.TABLE_MESSAGES, schemaName: getSchemaName2(this.schema) })}`;
1443
1781
  if (idParams.length > 0) {
1444
1782
  selectQuery += ` WHERE id IN (${idParams})`;
1445
1783
  } else {
1446
1784
  return [];
1447
1785
  }
1448
- const selectReq = this.pool.request();
1449
- messageIds.forEach((id, i) => selectReq.input(`id${i}`, id));
1450
- const existingMessagesDb = (await selectReq.query(selectQuery)).recordset;
1451
- if (!existingMessagesDb || existingMessagesDb.length === 0) {
1452
- return [];
1786
+ const selectReq = this.pool.request();
1787
+ messageIds.forEach((id, i) => selectReq.input(`id${i}`, id));
1788
+ const existingMessagesDb = (await selectReq.query(selectQuery)).recordset;
1789
+ if (!existingMessagesDb || existingMessagesDb.length === 0) {
1790
+ return [];
1791
+ }
1792
+ const existingMessages = existingMessagesDb.map((msg) => {
1793
+ if (typeof msg.content === "string") {
1794
+ try {
1795
+ msg.content = JSON.parse(msg.content);
1796
+ } catch {
1797
+ }
1798
+ }
1799
+ return msg;
1800
+ });
1801
+ const threadIdsToUpdate = /* @__PURE__ */ new Set();
1802
+ const transaction = this.pool.transaction();
1803
+ try {
1804
+ await transaction.begin();
1805
+ for (const existingMessage of existingMessages) {
1806
+ const updatePayload = messages.find((m) => m.id === existingMessage.id);
1807
+ if (!updatePayload) continue;
1808
+ const { id, ...fieldsToUpdate } = updatePayload;
1809
+ if (Object.keys(fieldsToUpdate).length === 0) continue;
1810
+ threadIdsToUpdate.add(existingMessage.threadId);
1811
+ if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
1812
+ threadIdsToUpdate.add(updatePayload.threadId);
1813
+ }
1814
+ const setClauses = [];
1815
+ const req = transaction.request();
1816
+ req.input("id", id);
1817
+ const columnMapping = { threadId: "thread_id" };
1818
+ const updatableFields = { ...fieldsToUpdate };
1819
+ if (updatableFields.content) {
1820
+ const newContent = {
1821
+ ...existingMessage.content,
1822
+ ...updatableFields.content,
1823
+ ...existingMessage.content?.metadata && updatableFields.content.metadata ? { metadata: { ...existingMessage.content.metadata, ...updatableFields.content.metadata } } : {}
1824
+ };
1825
+ setClauses.push(`content = @content`);
1826
+ req.input("content", JSON.stringify(newContent));
1827
+ delete updatableFields.content;
1828
+ }
1829
+ for (const key in updatableFields) {
1830
+ if (Object.prototype.hasOwnProperty.call(updatableFields, key)) {
1831
+ const dbColumn = columnMapping[key] || key;
1832
+ setClauses.push(`[${dbColumn}] = @${dbColumn}`);
1833
+ req.input(dbColumn, updatableFields[key]);
1834
+ }
1835
+ }
1836
+ if (setClauses.length > 0) {
1837
+ const updateSql = `UPDATE ${getTableName2({ indexName: storage.TABLE_MESSAGES, schemaName: getSchemaName2(this.schema) })} SET ${setClauses.join(", ")} WHERE id = @id`;
1838
+ await req.query(updateSql);
1839
+ }
1840
+ }
1841
+ if (threadIdsToUpdate.size > 0) {
1842
+ const threadIdParams = Array.from(threadIdsToUpdate).map((_, i) => `@tid${i}`).join(", ");
1843
+ const threadReq = transaction.request();
1844
+ Array.from(threadIdsToUpdate).forEach((tid, i) => threadReq.input(`tid${i}`, tid));
1845
+ threadReq.input("updatedAt", (/* @__PURE__ */ new Date()).toISOString());
1846
+ const threadSql = `UPDATE ${getTableName2({ indexName: storage.TABLE_THREADS, schemaName: getSchemaName2(this.schema) })} SET updatedAt = @updatedAt WHERE id IN (${threadIdParams})`;
1847
+ await threadReq.query(threadSql);
1848
+ }
1849
+ await transaction.commit();
1850
+ } catch (error$1) {
1851
+ await transaction.rollback();
1852
+ throw new error.MastraError(
1853
+ {
1854
+ id: storage.createStorageErrorId("MSSQL", "UPDATE_MESSAGES", "FAILED"),
1855
+ domain: error.ErrorDomain.STORAGE,
1856
+ category: error.ErrorCategory.THIRD_PARTY
1857
+ },
1858
+ error$1
1859
+ );
1860
+ }
1861
+ const refetchReq = this.pool.request();
1862
+ messageIds.forEach((id, i) => refetchReq.input(`id${i}`, id));
1863
+ const updatedMessages = (await refetchReq.query(selectQuery)).recordset;
1864
+ return (updatedMessages || []).map((message) => {
1865
+ if (typeof message.content === "string") {
1866
+ try {
1867
+ message.content = JSON.parse(message.content);
1868
+ } catch {
1869
+ }
1870
+ }
1871
+ return message;
1872
+ });
1873
+ }
1874
+ async deleteMessages(messageIds) {
1875
+ if (!messageIds || messageIds.length === 0) {
1876
+ return;
1877
+ }
1878
+ try {
1879
+ const messageTableName = getTableName2({ indexName: storage.TABLE_MESSAGES, schemaName: getSchemaName2(this.schema) });
1880
+ const threadTableName = getTableName2({ indexName: storage.TABLE_THREADS, schemaName: getSchemaName2(this.schema) });
1881
+ const placeholders = messageIds.map((_, idx) => `@p${idx + 1}`).join(",");
1882
+ const request = this.pool.request();
1883
+ messageIds.forEach((id, idx) => {
1884
+ request.input(`p${idx + 1}`, id);
1885
+ });
1886
+ const messages = await request.query(
1887
+ `SELECT DISTINCT [thread_id] FROM ${messageTableName} WHERE [id] IN (${placeholders})`
1888
+ );
1889
+ const threadIds = messages.recordset?.map((msg) => msg.thread_id).filter(Boolean) || [];
1890
+ const transaction = this.pool.transaction();
1891
+ await transaction.begin();
1892
+ try {
1893
+ const deleteRequest = transaction.request();
1894
+ messageIds.forEach((id, idx) => {
1895
+ deleteRequest.input(`p${idx + 1}`, id);
1896
+ });
1897
+ await deleteRequest.query(`DELETE FROM ${messageTableName} WHERE [id] IN (${placeholders})`);
1898
+ if (threadIds.length > 0) {
1899
+ for (const threadId of threadIds) {
1900
+ const updateRequest = transaction.request();
1901
+ updateRequest.input("p1", threadId);
1902
+ await updateRequest.query(`UPDATE ${threadTableName} SET [updatedAt] = GETDATE() WHERE [id] = @p1`);
1903
+ }
1904
+ }
1905
+ await transaction.commit();
1906
+ } catch (error) {
1907
+ try {
1908
+ await transaction.rollback();
1909
+ } catch {
1910
+ }
1911
+ throw error;
1912
+ }
1913
+ } catch (error$1) {
1914
+ throw new error.MastraError(
1915
+ {
1916
+ id: storage.createStorageErrorId("MSSQL", "DELETE_MESSAGES", "FAILED"),
1917
+ domain: error.ErrorDomain.STORAGE,
1918
+ category: error.ErrorCategory.THIRD_PARTY,
1919
+ details: { messageIds: messageIds.join(", ") }
1920
+ },
1921
+ error$1
1922
+ );
1923
+ }
1924
+ }
1925
+ async getResourceById({ resourceId }) {
1926
+ const tableName = getTableName2({ indexName: storage.TABLE_RESOURCES, schemaName: getSchemaName2(this.schema) });
1927
+ try {
1928
+ const req = this.pool.request();
1929
+ req.input("resourceId", resourceId);
1930
+ const result = (await req.query(`SELECT * FROM ${tableName} WHERE id = @resourceId`)).recordset[0];
1931
+ if (!result) {
1932
+ return null;
1933
+ }
1934
+ return {
1935
+ id: result.id,
1936
+ createdAt: result.createdAt,
1937
+ updatedAt: result.updatedAt,
1938
+ workingMemory: result.workingMemory,
1939
+ metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
1940
+ };
1941
+ } catch (error$1) {
1942
+ const mastraError = new error.MastraError(
1943
+ {
1944
+ id: storage.createStorageErrorId("MSSQL", "GET_RESOURCE_BY_ID", "FAILED"),
1945
+ domain: error.ErrorDomain.STORAGE,
1946
+ category: error.ErrorCategory.THIRD_PARTY,
1947
+ details: { resourceId }
1948
+ },
1949
+ error$1
1950
+ );
1951
+ this.logger?.error?.(mastraError.toString());
1952
+ this.logger?.trackException?.(mastraError);
1953
+ throw mastraError;
1954
+ }
1955
+ }
1956
+ async saveResource({ resource }) {
1957
+ await this.db.insert({
1958
+ tableName: storage.TABLE_RESOURCES,
1959
+ record: {
1960
+ ...resource,
1961
+ metadata: resource.metadata
1962
+ }
1963
+ });
1964
+ return resource;
1965
+ }
1966
+ async updateResource({
1967
+ resourceId,
1968
+ workingMemory,
1969
+ metadata
1970
+ }) {
1971
+ try {
1972
+ const existingResource = await this.getResourceById({ resourceId });
1973
+ if (!existingResource) {
1974
+ const newResource = {
1975
+ id: resourceId,
1976
+ workingMemory,
1977
+ metadata: metadata || {},
1978
+ createdAt: /* @__PURE__ */ new Date(),
1979
+ updatedAt: /* @__PURE__ */ new Date()
1980
+ };
1981
+ return this.saveResource({ resource: newResource });
1982
+ }
1983
+ const updatedResource = {
1984
+ ...existingResource,
1985
+ workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
1986
+ metadata: {
1987
+ ...existingResource.metadata,
1988
+ ...metadata
1989
+ },
1990
+ updatedAt: /* @__PURE__ */ new Date()
1991
+ };
1992
+ const tableName = getTableName2({ indexName: storage.TABLE_RESOURCES, schemaName: getSchemaName2(this.schema) });
1993
+ const updates = [];
1994
+ const req = this.pool.request();
1995
+ if (workingMemory !== void 0) {
1996
+ updates.push("workingMemory = @workingMemory");
1997
+ req.input("workingMemory", workingMemory);
1998
+ }
1999
+ if (metadata) {
2000
+ updates.push("metadata = @metadata");
2001
+ req.input("metadata", JSON.stringify(updatedResource.metadata));
2002
+ }
2003
+ updates.push("updatedAt = @updatedAt");
2004
+ req.input("updatedAt", updatedResource.updatedAt.toISOString());
2005
+ req.input("id", resourceId);
2006
+ await req.query(`UPDATE ${tableName} SET ${updates.join(", ")} WHERE id = @id`);
2007
+ return updatedResource;
2008
+ } catch (error$1) {
2009
+ const mastraError = new error.MastraError(
2010
+ {
2011
+ id: storage.createStorageErrorId("MSSQL", "UPDATE_RESOURCE", "FAILED"),
2012
+ domain: error.ErrorDomain.STORAGE,
2013
+ category: error.ErrorCategory.THIRD_PARTY,
2014
+ details: { resourceId }
2015
+ },
2016
+ error$1
2017
+ );
2018
+ this.logger?.error?.(mastraError.toString());
2019
+ this.logger?.trackException?.(mastraError);
2020
+ throw mastraError;
2021
+ }
2022
+ }
2023
+ };
2024
+ var ObservabilityMSSQL = class extends storage.ObservabilityStorage {
2025
+ pool;
2026
+ db;
2027
+ schema;
2028
+ needsConnect;
2029
+ constructor(config) {
2030
+ super();
2031
+ const { pool, db, schema, needsConnect } = resolveMssqlConfig(config);
2032
+ this.pool = pool;
2033
+ this.db = db;
2034
+ this.schema = schema;
2035
+ this.needsConnect = needsConnect;
2036
+ }
2037
+ async init() {
2038
+ if (this.needsConnect) {
2039
+ await this.pool.connect();
2040
+ this.needsConnect = false;
2041
+ }
2042
+ await this.db.createTable({ tableName: storage.TABLE_SPANS, schema: storage.SPAN_SCHEMA });
2043
+ }
2044
+ async dangerouslyClearAll() {
2045
+ await this.db.clearTable({ tableName: storage.TABLE_SPANS });
2046
+ }
2047
+ get tracingStrategy() {
2048
+ return {
2049
+ preferred: "batch-with-updates",
2050
+ supported: ["batch-with-updates", "insert-only"]
2051
+ };
2052
+ }
2053
+ async createSpan(span) {
2054
+ try {
2055
+ const startedAt = span.startedAt instanceof Date ? span.startedAt.toISOString() : span.startedAt;
2056
+ const endedAt = span.endedAt instanceof Date ? span.endedAt.toISOString() : span.endedAt;
2057
+ const record = {
2058
+ ...span,
2059
+ startedAt,
2060
+ endedAt
2061
+ // Note: createdAt/updatedAt will be set by default values
2062
+ };
2063
+ return this.db.insert({ tableName: storage.TABLE_SPANS, record });
2064
+ } catch (error$1) {
2065
+ throw new error.MastraError(
2066
+ {
2067
+ id: storage.createStorageErrorId("MSSQL", "CREATE_SPAN", "FAILED"),
2068
+ domain: error.ErrorDomain.STORAGE,
2069
+ category: error.ErrorCategory.USER,
2070
+ details: {
2071
+ spanId: span.spanId,
2072
+ traceId: span.traceId,
2073
+ spanType: span.spanType,
2074
+ spanName: span.name
2075
+ }
2076
+ },
2077
+ error$1
2078
+ );
2079
+ }
2080
+ }
2081
+ async getTrace(traceId) {
2082
+ try {
2083
+ const tableName = getTableName2({
2084
+ indexName: storage.TABLE_SPANS,
2085
+ schemaName: getSchemaName2(this.schema)
2086
+ });
2087
+ const request = this.pool.request();
2088
+ request.input("traceId", traceId);
2089
+ const result = await request.query(
2090
+ `SELECT
2091
+ [traceId], [spanId], [parentSpanId], [name], [scope], [spanType],
2092
+ [attributes], [metadata], [links], [input], [output], [error], [isEvent],
2093
+ [startedAt], [endedAt], [createdAt], [updatedAt]
2094
+ FROM ${tableName}
2095
+ WHERE [traceId] = @traceId
2096
+ ORDER BY [startedAt] DESC`
2097
+ );
2098
+ if (!result.recordset || result.recordset.length === 0) {
2099
+ return null;
2100
+ }
2101
+ return {
2102
+ traceId,
2103
+ spans: result.recordset.map(
2104
+ (span) => transformFromSqlRow({
2105
+ tableName: storage.TABLE_SPANS,
2106
+ sqlRow: span
2107
+ })
2108
+ )
2109
+ };
2110
+ } catch (error$1) {
2111
+ throw new error.MastraError(
2112
+ {
2113
+ id: storage.createStorageErrorId("MSSQL", "GET_TRACE", "FAILED"),
2114
+ domain: error.ErrorDomain.STORAGE,
2115
+ category: error.ErrorCategory.USER,
2116
+ details: {
2117
+ traceId
2118
+ }
2119
+ },
2120
+ error$1
2121
+ );
2122
+ }
2123
+ }
2124
+ async updateSpan({
2125
+ spanId,
2126
+ traceId,
2127
+ updates
2128
+ }) {
2129
+ try {
2130
+ const data = { ...updates };
2131
+ if (data.endedAt instanceof Date) {
2132
+ data.endedAt = data.endedAt.toISOString();
2133
+ }
2134
+ if (data.startedAt instanceof Date) {
2135
+ data.startedAt = data.startedAt.toISOString();
2136
+ }
2137
+ await this.db.update({
2138
+ tableName: storage.TABLE_SPANS,
2139
+ keys: { spanId, traceId },
2140
+ data
2141
+ });
2142
+ } catch (error$1) {
2143
+ throw new error.MastraError(
2144
+ {
2145
+ id: storage.createStorageErrorId("MSSQL", "UPDATE_SPAN", "FAILED"),
2146
+ domain: error.ErrorDomain.STORAGE,
2147
+ category: error.ErrorCategory.USER,
2148
+ details: {
2149
+ spanId,
2150
+ traceId
2151
+ }
2152
+ },
2153
+ error$1
2154
+ );
2155
+ }
2156
+ }
2157
+ async getTracesPaginated({
2158
+ filters,
2159
+ pagination
2160
+ }) {
2161
+ const page = pagination?.page ?? 0;
2162
+ const perPage = pagination?.perPage ?? 10;
2163
+ const { entityId, entityType, ...actualFilters } = filters || {};
2164
+ const filtersWithDateRange = {
2165
+ ...actualFilters,
2166
+ ...buildDateRangeFilter(pagination?.dateRange, "startedAt"),
2167
+ parentSpanId: null
2168
+ // Only get root spans for traces
2169
+ };
2170
+ const whereClause = prepareWhereClause(filtersWithDateRange);
2171
+ let actualWhereClause = whereClause.sql;
2172
+ const params = { ...whereClause.params };
2173
+ let currentParamIndex = Object.keys(params).length + 1;
2174
+ if (entityId && entityType) {
2175
+ let name = "";
2176
+ if (entityType === "workflow") {
2177
+ name = `workflow run: '${entityId}'`;
2178
+ } else if (entityType === "agent") {
2179
+ name = `agent run: '${entityId}'`;
2180
+ } else {
2181
+ const error$1 = new error.MastraError({
2182
+ id: storage.createStorageErrorId("MSSQL", "GET_TRACES_PAGINATED", "INVALID_ENTITY_TYPE"),
2183
+ domain: error.ErrorDomain.STORAGE,
2184
+ category: error.ErrorCategory.USER,
2185
+ details: {
2186
+ entityType
2187
+ },
2188
+ text: `Cannot filter by entity type: ${entityType}`
2189
+ });
2190
+ throw error$1;
2191
+ }
2192
+ const entityParam = `p${currentParamIndex++}`;
2193
+ if (actualWhereClause) {
2194
+ actualWhereClause += ` AND [name] = @${entityParam}`;
2195
+ } else {
2196
+ actualWhereClause = ` WHERE [name] = @${entityParam}`;
2197
+ }
2198
+ params[entityParam] = name;
2199
+ }
2200
+ const tableName = getTableName2({
2201
+ indexName: storage.TABLE_SPANS,
2202
+ schemaName: getSchemaName2(this.schema)
2203
+ });
2204
+ try {
2205
+ const countRequest = this.pool.request();
2206
+ Object.entries(params).forEach(([key, value]) => {
2207
+ countRequest.input(key, value);
2208
+ });
2209
+ const countResult = await countRequest.query(
2210
+ `SELECT COUNT(*) as count FROM ${tableName}${actualWhereClause}`
2211
+ );
2212
+ const total = countResult.recordset[0]?.count ?? 0;
2213
+ if (total === 0) {
2214
+ return {
2215
+ pagination: {
2216
+ total: 0,
2217
+ page,
2218
+ perPage,
2219
+ hasMore: false
2220
+ },
2221
+ spans: []
2222
+ };
2223
+ }
2224
+ const dataRequest = this.pool.request();
2225
+ Object.entries(params).forEach(([key, value]) => {
2226
+ dataRequest.input(key, value);
2227
+ });
2228
+ dataRequest.input("offset", page * perPage);
2229
+ dataRequest.input("limit", perPage);
2230
+ const dataResult = await dataRequest.query(
2231
+ `SELECT * FROM ${tableName}${actualWhereClause} ORDER BY [startedAt] DESC OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`
2232
+ );
2233
+ const spans = dataResult.recordset.map(
2234
+ (row) => transformFromSqlRow({
2235
+ tableName: storage.TABLE_SPANS,
2236
+ sqlRow: row
2237
+ })
2238
+ );
2239
+ return {
2240
+ pagination: {
2241
+ total,
2242
+ page,
2243
+ perPage,
2244
+ hasMore: (page + 1) * perPage < total
2245
+ },
2246
+ spans
2247
+ };
2248
+ } catch (error$1) {
2249
+ throw new error.MastraError(
2250
+ {
2251
+ id: storage.createStorageErrorId("MSSQL", "GET_TRACES_PAGINATED", "FAILED"),
2252
+ domain: error.ErrorDomain.STORAGE,
2253
+ category: error.ErrorCategory.USER
2254
+ },
2255
+ error$1
2256
+ );
2257
+ }
2258
+ }
2259
+ async batchCreateSpans(args) {
2260
+ if (!args.records || args.records.length === 0) {
2261
+ return;
1453
2262
  }
1454
- const existingMessages = existingMessagesDb.map((msg) => {
1455
- if (typeof msg.content === "string") {
1456
- try {
1457
- msg.content = JSON.parse(msg.content);
1458
- } catch {
2263
+ try {
2264
+ await this.db.batchInsert({
2265
+ tableName: storage.TABLE_SPANS,
2266
+ records: args.records.map((span) => ({
2267
+ ...span,
2268
+ startedAt: span.startedAt instanceof Date ? span.startedAt.toISOString() : span.startedAt,
2269
+ endedAt: span.endedAt instanceof Date ? span.endedAt.toISOString() : span.endedAt
2270
+ }))
2271
+ });
2272
+ } catch (error$1) {
2273
+ throw new error.MastraError(
2274
+ {
2275
+ id: storage.createStorageErrorId("MSSQL", "BATCH_CREATE_SPANS", "FAILED"),
2276
+ domain: error.ErrorDomain.STORAGE,
2277
+ category: error.ErrorCategory.USER,
2278
+ details: {
2279
+ count: args.records.length
2280
+ }
2281
+ },
2282
+ error$1
2283
+ );
2284
+ }
2285
+ }
2286
+ async batchUpdateSpans(args) {
2287
+ if (!args.records || args.records.length === 0) {
2288
+ return;
2289
+ }
2290
+ try {
2291
+ const updates = args.records.map(({ traceId, spanId, updates: data }) => {
2292
+ const processedData = { ...data };
2293
+ if (processedData.endedAt instanceof Date) {
2294
+ processedData.endedAt = processedData.endedAt.toISOString();
2295
+ }
2296
+ if (processedData.startedAt instanceof Date) {
2297
+ processedData.startedAt = processedData.startedAt.toISOString();
2298
+ }
2299
+ return {
2300
+ keys: { spanId, traceId },
2301
+ data: processedData
2302
+ };
2303
+ });
2304
+ await this.db.batchUpdate({
2305
+ tableName: storage.TABLE_SPANS,
2306
+ updates
2307
+ });
2308
+ } catch (error$1) {
2309
+ throw new error.MastraError(
2310
+ {
2311
+ id: storage.createStorageErrorId("MSSQL", "BATCH_UPDATE_SPANS", "FAILED"),
2312
+ domain: error.ErrorDomain.STORAGE,
2313
+ category: error.ErrorCategory.USER,
2314
+ details: {
2315
+ count: args.records.length
2316
+ }
2317
+ },
2318
+ error$1
2319
+ );
2320
+ }
2321
+ }
2322
+ async batchDeleteTraces(args) {
2323
+ if (!args.traceIds || args.traceIds.length === 0) {
2324
+ return;
2325
+ }
2326
+ try {
2327
+ const keys = args.traceIds.map((traceId) => ({ traceId }));
2328
+ await this.db.batchDelete({
2329
+ tableName: storage.TABLE_SPANS,
2330
+ keys
2331
+ });
2332
+ } catch (error$1) {
2333
+ throw new error.MastraError(
2334
+ {
2335
+ id: storage.createStorageErrorId("MSSQL", "BATCH_DELETE_TRACES", "FAILED"),
2336
+ domain: error.ErrorDomain.STORAGE,
2337
+ category: error.ErrorCategory.USER,
2338
+ details: {
2339
+ count: args.traceIds.length
2340
+ }
2341
+ },
2342
+ error$1
2343
+ );
2344
+ }
2345
+ }
2346
+ };
2347
+ function transformScoreRow(row) {
2348
+ return storage.transformScoreRow(row, {
2349
+ convertTimestamps: true
2350
+ });
2351
+ }
2352
+ var ScoresMSSQL = class extends storage.ScoresStorage {
2353
+ pool;
2354
+ db;
2355
+ schema;
2356
+ needsConnect;
2357
+ constructor(config) {
2358
+ super();
2359
+ const { pool, db, schema, needsConnect } = resolveMssqlConfig(config);
2360
+ this.pool = pool;
2361
+ this.db = db;
2362
+ this.schema = schema;
2363
+ this.needsConnect = needsConnect;
2364
+ }
2365
+ async init() {
2366
+ if (this.needsConnect) {
2367
+ await this.pool.connect();
2368
+ this.needsConnect = false;
2369
+ }
2370
+ await this.db.createTable({ tableName: storage.TABLE_SCORERS, schema: storage.TABLE_SCHEMAS[storage.TABLE_SCORERS] });
2371
+ }
2372
+ async dangerouslyClearAll() {
2373
+ await this.db.clearTable({ tableName: storage.TABLE_SCORERS });
2374
+ }
2375
+ async getScoreById({ id }) {
2376
+ try {
2377
+ const request = this.pool.request();
2378
+ request.input("p1", id);
2379
+ const result = await request.query(
2380
+ `SELECT * FROM ${getTableName2({ indexName: storage.TABLE_SCORERS, schemaName: getSchemaName2(this.schema) })} WHERE id = @p1`
2381
+ );
2382
+ if (result.recordset.length === 0) {
2383
+ return null;
2384
+ }
2385
+ return transformScoreRow(result.recordset[0]);
2386
+ } catch (error$1) {
2387
+ throw new error.MastraError(
2388
+ {
2389
+ id: storage.createStorageErrorId("MSSQL", "GET_SCORE_BY_ID", "FAILED"),
2390
+ domain: error.ErrorDomain.STORAGE,
2391
+ category: error.ErrorCategory.THIRD_PARTY,
2392
+ details: { id }
2393
+ },
2394
+ error$1
2395
+ );
2396
+ }
2397
+ }
2398
+ async saveScore(score) {
2399
+ let validatedScore;
2400
+ try {
2401
+ validatedScore = evals.saveScorePayloadSchema.parse(score);
2402
+ } catch (error$1) {
2403
+ throw new error.MastraError(
2404
+ {
2405
+ id: storage.createStorageErrorId("MSSQL", "SAVE_SCORE", "VALIDATION_FAILED"),
2406
+ domain: error.ErrorDomain.STORAGE,
2407
+ category: error.ErrorCategory.USER,
2408
+ details: {
2409
+ scorer: score.scorer?.id ?? "unknown",
2410
+ entityId: score.entityId ?? "unknown",
2411
+ entityType: score.entityType ?? "unknown",
2412
+ traceId: score.traceId ?? "",
2413
+ spanId: score.spanId ?? ""
2414
+ }
2415
+ },
2416
+ error$1
2417
+ );
2418
+ }
2419
+ try {
2420
+ const scoreId = crypto.randomUUID();
2421
+ const now = /* @__PURE__ */ new Date();
2422
+ const {
2423
+ scorer,
2424
+ preprocessStepResult,
2425
+ analyzeStepResult,
2426
+ metadata,
2427
+ input,
2428
+ output,
2429
+ additionalContext,
2430
+ requestContext,
2431
+ entity,
2432
+ ...rest
2433
+ } = validatedScore;
2434
+ await this.db.insert({
2435
+ tableName: storage.TABLE_SCORERS,
2436
+ record: {
2437
+ id: scoreId,
2438
+ ...rest,
2439
+ input: input || "",
2440
+ output: output || "",
2441
+ preprocessStepResult: preprocessStepResult || null,
2442
+ analyzeStepResult: analyzeStepResult || null,
2443
+ metadata: metadata || null,
2444
+ additionalContext: additionalContext || null,
2445
+ requestContext: requestContext || null,
2446
+ entity: entity || null,
2447
+ scorer: scorer || null,
2448
+ createdAt: now.toISOString(),
2449
+ updatedAt: now.toISOString()
1459
2450
  }
2451
+ });
2452
+ return { score: { ...validatedScore, id: scoreId, createdAt: now, updatedAt: now } };
2453
+ } catch (error$1) {
2454
+ throw new error.MastraError(
2455
+ {
2456
+ id: storage.createStorageErrorId("MSSQL", "SAVE_SCORE", "FAILED"),
2457
+ domain: error.ErrorDomain.STORAGE,
2458
+ category: error.ErrorCategory.THIRD_PARTY
2459
+ },
2460
+ error$1
2461
+ );
2462
+ }
2463
+ }
2464
+ async listScoresByScorerId({
2465
+ scorerId,
2466
+ pagination,
2467
+ entityId,
2468
+ entityType,
2469
+ source
2470
+ }) {
2471
+ try {
2472
+ const conditions = ["[scorerId] = @p1"];
2473
+ const params = { p1: scorerId };
2474
+ let paramIndex = 2;
2475
+ if (entityId) {
2476
+ conditions.push(`[entityId] = @p${paramIndex}`);
2477
+ params[`p${paramIndex}`] = entityId;
2478
+ paramIndex++;
1460
2479
  }
1461
- return msg;
1462
- });
1463
- const threadIdsToUpdate = /* @__PURE__ */ new Set();
2480
+ if (entityType) {
2481
+ conditions.push(`[entityType] = @p${paramIndex}`);
2482
+ params[`p${paramIndex}`] = entityType;
2483
+ paramIndex++;
2484
+ }
2485
+ if (source) {
2486
+ conditions.push(`[source] = @p${paramIndex}`);
2487
+ params[`p${paramIndex}`] = source;
2488
+ paramIndex++;
2489
+ }
2490
+ const whereClause = conditions.join(" AND ");
2491
+ const tableName = getTableName2({ indexName: storage.TABLE_SCORERS, schemaName: getSchemaName2(this.schema) });
2492
+ const countRequest = this.pool.request();
2493
+ Object.entries(params).forEach(([key, value]) => {
2494
+ countRequest.input(key, value);
2495
+ });
2496
+ const totalResult = await countRequest.query(`SELECT COUNT(*) as count FROM ${tableName} WHERE ${whereClause}`);
2497
+ const total = totalResult.recordset[0]?.count || 0;
2498
+ const { page, perPage: perPageInput } = pagination;
2499
+ if (total === 0) {
2500
+ return {
2501
+ pagination: {
2502
+ total: 0,
2503
+ page,
2504
+ perPage: perPageInput,
2505
+ hasMore: false
2506
+ },
2507
+ scores: []
2508
+ };
2509
+ }
2510
+ const perPage = storage.normalizePerPage(perPageInput, 100);
2511
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
2512
+ const limitValue = perPageInput === false ? total : perPage;
2513
+ const end = perPageInput === false ? total : start + perPage;
2514
+ const dataRequest = this.pool.request();
2515
+ Object.entries(params).forEach(([key, value]) => {
2516
+ dataRequest.input(key, value);
2517
+ });
2518
+ dataRequest.input("perPage", limitValue);
2519
+ dataRequest.input("offset", start);
2520
+ const dataQuery = `SELECT * FROM ${tableName} WHERE ${whereClause} ORDER BY [createdAt] DESC OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
2521
+ const result = await dataRequest.query(dataQuery);
2522
+ return {
2523
+ pagination: {
2524
+ total: Number(total),
2525
+ page,
2526
+ perPage: perPageForResponse,
2527
+ hasMore: end < total
2528
+ },
2529
+ scores: result.recordset.map((row) => transformScoreRow(row))
2530
+ };
2531
+ } catch (error$1) {
2532
+ throw new error.MastraError(
2533
+ {
2534
+ id: storage.createStorageErrorId("MSSQL", "LIST_SCORES_BY_SCORER_ID", "FAILED"),
2535
+ domain: error.ErrorDomain.STORAGE,
2536
+ category: error.ErrorCategory.THIRD_PARTY,
2537
+ details: { scorerId }
2538
+ },
2539
+ error$1
2540
+ );
2541
+ }
2542
+ }
2543
+ async listScoresByRunId({
2544
+ runId,
2545
+ pagination
2546
+ }) {
2547
+ try {
2548
+ const request = this.pool.request();
2549
+ request.input("p1", runId);
2550
+ const totalResult = await request.query(
2551
+ `SELECT COUNT(*) as count FROM ${getTableName2({ indexName: storage.TABLE_SCORERS, schemaName: getSchemaName2(this.schema) })} WHERE [runId] = @p1`
2552
+ );
2553
+ const total = totalResult.recordset[0]?.count || 0;
2554
+ const { page, perPage: perPageInput } = pagination;
2555
+ if (total === 0) {
2556
+ return {
2557
+ pagination: {
2558
+ total: 0,
2559
+ page,
2560
+ perPage: perPageInput,
2561
+ hasMore: false
2562
+ },
2563
+ scores: []
2564
+ };
2565
+ }
2566
+ const perPage = storage.normalizePerPage(perPageInput, 100);
2567
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
2568
+ const limitValue = perPageInput === false ? total : perPage;
2569
+ const end = perPageInput === false ? total : start + perPage;
2570
+ const dataRequest = this.pool.request();
2571
+ dataRequest.input("p1", runId);
2572
+ dataRequest.input("p2", limitValue);
2573
+ dataRequest.input("p3", start);
2574
+ const result = await dataRequest.query(
2575
+ `SELECT * FROM ${getTableName2({ indexName: storage.TABLE_SCORERS, schemaName: getSchemaName2(this.schema) })} WHERE [runId] = @p1 ORDER BY [createdAt] DESC OFFSET @p3 ROWS FETCH NEXT @p2 ROWS ONLY`
2576
+ );
2577
+ return {
2578
+ pagination: {
2579
+ total: Number(total),
2580
+ page,
2581
+ perPage: perPageForResponse,
2582
+ hasMore: end < total
2583
+ },
2584
+ scores: result.recordset.map((row) => transformScoreRow(row))
2585
+ };
2586
+ } catch (error$1) {
2587
+ throw new error.MastraError(
2588
+ {
2589
+ id: storage.createStorageErrorId("MSSQL", "LIST_SCORES_BY_RUN_ID", "FAILED"),
2590
+ domain: error.ErrorDomain.STORAGE,
2591
+ category: error.ErrorCategory.THIRD_PARTY,
2592
+ details: { runId }
2593
+ },
2594
+ error$1
2595
+ );
2596
+ }
2597
+ }
2598
+ async listScoresByEntityId({
2599
+ entityId,
2600
+ entityType,
2601
+ pagination
2602
+ }) {
2603
+ try {
2604
+ const request = this.pool.request();
2605
+ request.input("p1", entityId);
2606
+ request.input("p2", entityType);
2607
+ const totalResult = await request.query(
2608
+ `SELECT COUNT(*) as count FROM ${getTableName2({ indexName: storage.TABLE_SCORERS, schemaName: getSchemaName2(this.schema) })} WHERE [entityId] = @p1 AND [entityType] = @p2`
2609
+ );
2610
+ const total = totalResult.recordset[0]?.count || 0;
2611
+ const { page, perPage: perPageInput } = pagination;
2612
+ const perPage = storage.normalizePerPage(perPageInput, 100);
2613
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
2614
+ if (total === 0) {
2615
+ return {
2616
+ pagination: {
2617
+ total: 0,
2618
+ page,
2619
+ perPage: perPageForResponse,
2620
+ hasMore: false
2621
+ },
2622
+ scores: []
2623
+ };
2624
+ }
2625
+ const limitValue = perPageInput === false ? total : perPage;
2626
+ const end = perPageInput === false ? total : start + perPage;
2627
+ const dataRequest = this.pool.request();
2628
+ dataRequest.input("p1", entityId);
2629
+ dataRequest.input("p2", entityType);
2630
+ dataRequest.input("p3", limitValue);
2631
+ dataRequest.input("p4", start);
2632
+ const result = await dataRequest.query(
2633
+ `SELECT * FROM ${getTableName2({ indexName: storage.TABLE_SCORERS, schemaName: getSchemaName2(this.schema) })} WHERE [entityId] = @p1 AND [entityType] = @p2 ORDER BY [createdAt] DESC OFFSET @p4 ROWS FETCH NEXT @p3 ROWS ONLY`
2634
+ );
2635
+ return {
2636
+ pagination: {
2637
+ total: Number(total),
2638
+ page,
2639
+ perPage: perPageForResponse,
2640
+ hasMore: end < total
2641
+ },
2642
+ scores: result.recordset.map((row) => transformScoreRow(row))
2643
+ };
2644
+ } catch (error$1) {
2645
+ throw new error.MastraError(
2646
+ {
2647
+ id: storage.createStorageErrorId("MSSQL", "LIST_SCORES_BY_ENTITY_ID", "FAILED"),
2648
+ domain: error.ErrorDomain.STORAGE,
2649
+ category: error.ErrorCategory.THIRD_PARTY,
2650
+ details: { entityId, entityType }
2651
+ },
2652
+ error$1
2653
+ );
2654
+ }
2655
+ }
2656
+ async listScoresBySpan({
2657
+ traceId,
2658
+ spanId,
2659
+ pagination
2660
+ }) {
2661
+ try {
2662
+ const request = this.pool.request();
2663
+ request.input("p1", traceId);
2664
+ request.input("p2", spanId);
2665
+ const totalResult = await request.query(
2666
+ `SELECT COUNT(*) as count FROM ${getTableName2({ indexName: storage.TABLE_SCORERS, schemaName: getSchemaName2(this.schema) })} WHERE [traceId] = @p1 AND [spanId] = @p2`
2667
+ );
2668
+ const total = totalResult.recordset[0]?.count || 0;
2669
+ const { page, perPage: perPageInput } = pagination;
2670
+ const perPage = storage.normalizePerPage(perPageInput, 100);
2671
+ const { offset: start, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
2672
+ if (total === 0) {
2673
+ return {
2674
+ pagination: {
2675
+ total: 0,
2676
+ page,
2677
+ perPage: perPageForResponse,
2678
+ hasMore: false
2679
+ },
2680
+ scores: []
2681
+ };
2682
+ }
2683
+ const limitValue = perPageInput === false ? total : perPage;
2684
+ const end = perPageInput === false ? total : start + perPage;
2685
+ const dataRequest = this.pool.request();
2686
+ dataRequest.input("p1", traceId);
2687
+ dataRequest.input("p2", spanId);
2688
+ dataRequest.input("p3", limitValue);
2689
+ dataRequest.input("p4", start);
2690
+ const result = await dataRequest.query(
2691
+ `SELECT * FROM ${getTableName2({ indexName: storage.TABLE_SCORERS, schemaName: getSchemaName2(this.schema) })} WHERE [traceId] = @p1 AND [spanId] = @p2 ORDER BY [createdAt] DESC OFFSET @p4 ROWS FETCH NEXT @p3 ROWS ONLY`
2692
+ );
2693
+ return {
2694
+ pagination: {
2695
+ total: Number(total),
2696
+ page,
2697
+ perPage: perPageForResponse,
2698
+ hasMore: end < total
2699
+ },
2700
+ scores: result.recordset.map((row) => transformScoreRow(row))
2701
+ };
2702
+ } catch (error$1) {
2703
+ throw new error.MastraError(
2704
+ {
2705
+ id: storage.createStorageErrorId("MSSQL", "LIST_SCORES_BY_SPAN", "FAILED"),
2706
+ domain: error.ErrorDomain.STORAGE,
2707
+ category: error.ErrorCategory.THIRD_PARTY,
2708
+ details: { traceId, spanId }
2709
+ },
2710
+ error$1
2711
+ );
2712
+ }
2713
+ }
2714
+ };
2715
+ var WorkflowsMSSQL = class extends storage.WorkflowsStorage {
2716
+ pool;
2717
+ db;
2718
+ schema;
2719
+ needsConnect;
2720
+ constructor(config) {
2721
+ super();
2722
+ const { pool, db, schema, needsConnect } = resolveMssqlConfig(config);
2723
+ this.pool = pool;
2724
+ this.db = db;
2725
+ this.schema = schema;
2726
+ this.needsConnect = needsConnect;
2727
+ }
2728
+ async init() {
2729
+ if (this.needsConnect) {
2730
+ await this.pool.connect();
2731
+ this.needsConnect = false;
2732
+ }
2733
+ await this.db.createTable({ tableName: storage.TABLE_WORKFLOW_SNAPSHOT, schema: storage.TABLE_SCHEMAS[storage.TABLE_WORKFLOW_SNAPSHOT] });
2734
+ }
2735
+ async dangerouslyClearAll() {
2736
+ await this.db.clearTable({ tableName: storage.TABLE_WORKFLOW_SNAPSHOT });
2737
+ }
2738
+ parseWorkflowRun(row) {
2739
+ let parsedSnapshot = row.snapshot;
2740
+ if (typeof parsedSnapshot === "string") {
2741
+ try {
2742
+ parsedSnapshot = JSON.parse(row.snapshot);
2743
+ } catch (e) {
2744
+ this.logger?.warn?.(`Failed to parse snapshot for workflow ${row.workflow_name}:`, e);
2745
+ }
2746
+ }
2747
+ return {
2748
+ workflowName: row.workflow_name,
2749
+ runId: row.run_id,
2750
+ snapshot: parsedSnapshot,
2751
+ createdAt: row.createdAt,
2752
+ updatedAt: row.updatedAt,
2753
+ resourceId: row.resourceId
2754
+ };
2755
+ }
2756
+ async updateWorkflowResults({
2757
+ workflowName,
2758
+ runId,
2759
+ stepId,
2760
+ result,
2761
+ requestContext
2762
+ }) {
2763
+ const table = getTableName2({ indexName: storage.TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName2(this.schema) });
1464
2764
  const transaction = this.pool.transaction();
1465
2765
  try {
1466
2766
  await transaction.begin();
1467
- for (const existingMessage of existingMessages) {
1468
- const updatePayload = messages.find((m) => m.id === existingMessage.id);
1469
- if (!updatePayload) continue;
1470
- const { id, ...fieldsToUpdate } = updatePayload;
1471
- if (Object.keys(fieldsToUpdate).length === 0) continue;
1472
- threadIdsToUpdate.add(existingMessage.threadId);
1473
- if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
1474
- threadIdsToUpdate.add(updatePayload.threadId);
1475
- }
1476
- const setClauses = [];
1477
- const req = transaction.request();
1478
- req.input("id", id);
1479
- const columnMapping = { threadId: "thread_id" };
1480
- const updatableFields = { ...fieldsToUpdate };
1481
- if (updatableFields.content) {
1482
- const newContent = {
1483
- ...existingMessage.content,
1484
- ...updatableFields.content,
1485
- ...existingMessage.content?.metadata && updatableFields.content.metadata ? { metadata: { ...existingMessage.content.metadata, ...updatableFields.content.metadata } } : {}
1486
- };
1487
- setClauses.push(`content = @content`);
1488
- req.input("content", JSON.stringify(newContent));
1489
- delete updatableFields.content;
1490
- }
1491
- for (const key in updatableFields) {
1492
- if (Object.prototype.hasOwnProperty.call(updatableFields, key)) {
1493
- const dbColumn = columnMapping[key] || key;
1494
- setClauses.push(`[${dbColumn}] = @${dbColumn}`);
1495
- req.input(dbColumn, updatableFields[key]);
1496
- }
1497
- }
1498
- if (setClauses.length > 0) {
1499
- const updateSql = `UPDATE ${this.getTableName(storage.TABLE_MESSAGES)} SET ${setClauses.join(", ")} WHERE id = @id`;
1500
- await req.query(updateSql);
1501
- }
1502
- }
1503
- if (threadIdsToUpdate.size > 0) {
1504
- const threadIdParams = Array.from(threadIdsToUpdate).map((_, i) => `@tid${i}`).join(", ");
1505
- const threadReq = transaction.request();
1506
- Array.from(threadIdsToUpdate).forEach((tid, i) => threadReq.input(`tid${i}`, tid));
1507
- threadReq.input("updatedAt", (/* @__PURE__ */ new Date()).toISOString());
1508
- const threadSql = `UPDATE ${this.getTableName(storage.TABLE_THREADS)} SET updatedAt = @updatedAt WHERE id IN (${threadIdParams})`;
1509
- await threadReq.query(threadSql);
2767
+ const selectRequest = new sql__default.default.Request(transaction);
2768
+ selectRequest.input("workflow_name", workflowName);
2769
+ selectRequest.input("run_id", runId);
2770
+ const existingSnapshotResult = await selectRequest.query(
2771
+ `SELECT snapshot FROM ${table} WITH (UPDLOCK, HOLDLOCK) WHERE workflow_name = @workflow_name AND run_id = @run_id`
2772
+ );
2773
+ let snapshot;
2774
+ if (!existingSnapshotResult.recordset || existingSnapshotResult.recordset.length === 0) {
2775
+ snapshot = {
2776
+ context: {},
2777
+ activePaths: [],
2778
+ activeStepsPath: {},
2779
+ timestamp: Date.now(),
2780
+ suspendedPaths: {},
2781
+ resumeLabels: {},
2782
+ serializedStepGraph: [],
2783
+ status: "pending",
2784
+ value: {},
2785
+ waitingPaths: {},
2786
+ runId,
2787
+ requestContext: {}
2788
+ };
2789
+ } else {
2790
+ const existingSnapshot = existingSnapshotResult.recordset[0].snapshot;
2791
+ snapshot = typeof existingSnapshot === "string" ? JSON.parse(existingSnapshot) : existingSnapshot;
1510
2792
  }
2793
+ snapshot.context[stepId] = result;
2794
+ snapshot.requestContext = { ...snapshot.requestContext, ...requestContext };
2795
+ const upsertReq = new sql__default.default.Request(transaction);
2796
+ upsertReq.input("workflow_name", workflowName);
2797
+ upsertReq.input("run_id", runId);
2798
+ upsertReq.input("snapshot", JSON.stringify(snapshot));
2799
+ upsertReq.input("createdAt", sql__default.default.DateTime2, /* @__PURE__ */ new Date());
2800
+ upsertReq.input("updatedAt", sql__default.default.DateTime2, /* @__PURE__ */ new Date());
2801
+ await upsertReq.query(
2802
+ `MERGE ${table} AS target
2803
+ USING (SELECT @workflow_name AS workflow_name, @run_id AS run_id) AS src
2804
+ ON target.workflow_name = src.workflow_name AND target.run_id = src.run_id
2805
+ WHEN MATCHED THEN UPDATE SET snapshot = @snapshot, [updatedAt] = @updatedAt
2806
+ WHEN NOT MATCHED THEN INSERT (workflow_name, run_id, snapshot, [createdAt], [updatedAt])
2807
+ VALUES (@workflow_name, @run_id, @snapshot, @createdAt, @updatedAt);`
2808
+ );
1511
2809
  await transaction.commit();
2810
+ return snapshot.context;
1512
2811
  } catch (error$1) {
1513
- await transaction.rollback();
2812
+ try {
2813
+ await transaction.rollback();
2814
+ } catch {
2815
+ }
1514
2816
  throw new error.MastraError(
1515
2817
  {
1516
- id: "MASTRA_STORAGE_MSSQL_UPDATE_MESSAGES_FAILED",
2818
+ id: storage.createStorageErrorId("MSSQL", "UPDATE_WORKFLOW_RESULTS", "FAILED"),
1517
2819
  domain: error.ErrorDomain.STORAGE,
1518
- category: error.ErrorCategory.THIRD_PARTY
2820
+ category: error.ErrorCategory.THIRD_PARTY,
2821
+ details: {
2822
+ workflowName,
2823
+ runId,
2824
+ stepId
2825
+ }
1519
2826
  },
1520
2827
  error$1
1521
2828
  );
1522
2829
  }
1523
- const refetchReq = this.pool.request();
1524
- messageIds.forEach((id, i) => refetchReq.input(`id${i}`, id));
1525
- const updatedMessages = (await refetchReq.query(selectQuery)).recordset;
1526
- return (updatedMessages || []).map((message) => {
1527
- if (typeof message.content === "string") {
1528
- try {
1529
- message.content = JSON.parse(message.content);
1530
- } catch {
1531
- }
1532
- }
1533
- return message;
1534
- });
1535
2830
  }
1536
- async close() {
1537
- if (this.pool) {
2831
+ async updateWorkflowState({
2832
+ workflowName,
2833
+ runId,
2834
+ opts
2835
+ }) {
2836
+ const table = getTableName2({ indexName: storage.TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName2(this.schema) });
2837
+ const transaction = this.pool.transaction();
2838
+ try {
2839
+ await transaction.begin();
2840
+ const selectRequest = new sql__default.default.Request(transaction);
2841
+ selectRequest.input("workflow_name", workflowName);
2842
+ selectRequest.input("run_id", runId);
2843
+ const existingSnapshotResult = await selectRequest.query(
2844
+ `SELECT snapshot FROM ${table} WITH (UPDLOCK, HOLDLOCK) WHERE workflow_name = @workflow_name AND run_id = @run_id`
2845
+ );
2846
+ if (!existingSnapshotResult.recordset || existingSnapshotResult.recordset.length === 0) {
2847
+ await transaction.rollback();
2848
+ return void 0;
2849
+ }
2850
+ const existingSnapshot = existingSnapshotResult.recordset[0].snapshot;
2851
+ const snapshot = typeof existingSnapshot === "string" ? JSON.parse(existingSnapshot) : existingSnapshot;
2852
+ if (!snapshot || !snapshot?.context) {
2853
+ await transaction.rollback();
2854
+ throw new error.MastraError(
2855
+ {
2856
+ id: storage.createStorageErrorId("MSSQL", "UPDATE_WORKFLOW_STATE", "SNAPSHOT_NOT_FOUND"),
2857
+ domain: error.ErrorDomain.STORAGE,
2858
+ category: error.ErrorCategory.SYSTEM,
2859
+ details: {
2860
+ workflowName,
2861
+ runId
2862
+ }
2863
+ },
2864
+ new Error(`Snapshot not found for runId ${runId}`)
2865
+ );
2866
+ }
2867
+ const updatedSnapshot = { ...snapshot, ...opts };
2868
+ const updateRequest = new sql__default.default.Request(transaction);
2869
+ updateRequest.input("snapshot", JSON.stringify(updatedSnapshot));
2870
+ updateRequest.input("workflow_name", workflowName);
2871
+ updateRequest.input("run_id", runId);
2872
+ updateRequest.input("updatedAt", sql__default.default.DateTime2, /* @__PURE__ */ new Date());
2873
+ await updateRequest.query(
2874
+ `UPDATE ${table} SET snapshot = @snapshot, [updatedAt] = @updatedAt WHERE workflow_name = @workflow_name AND run_id = @run_id`
2875
+ );
2876
+ await transaction.commit();
2877
+ return updatedSnapshot;
2878
+ } catch (error$1) {
1538
2879
  try {
1539
- if (this.pool.connected) {
1540
- await this.pool.close();
1541
- } else if (this.pool.connecting) {
1542
- await this.pool.connect();
1543
- await this.pool.close();
1544
- }
1545
- } catch (err) {
1546
- if (err.message && err.message.includes("Cannot close a pool while it is connecting")) ; else {
1547
- throw err;
1548
- }
2880
+ await transaction.rollback();
2881
+ } catch {
1549
2882
  }
2883
+ if (error$1 instanceof error.MastraError) throw error$1;
2884
+ throw new error.MastraError(
2885
+ {
2886
+ id: storage.createStorageErrorId("MSSQL", "UPDATE_WORKFLOW_STATE", "FAILED"),
2887
+ domain: error.ErrorDomain.STORAGE,
2888
+ category: error.ErrorCategory.THIRD_PARTY,
2889
+ details: {
2890
+ workflowName,
2891
+ runId
2892
+ }
2893
+ },
2894
+ error$1
2895
+ );
1550
2896
  }
1551
2897
  }
1552
- async getEvals(options = {}) {
1553
- const { agentName, type, page = 0, perPage = 100, dateRange } = options;
1554
- const fromDate = dateRange?.start;
1555
- const toDate = dateRange?.end;
1556
- const where = [];
1557
- const params = {};
1558
- if (agentName) {
1559
- where.push("agent_name = @agentName");
1560
- params["agentName"] = agentName;
2898
+ async persistWorkflowSnapshot({
2899
+ workflowName,
2900
+ runId,
2901
+ resourceId,
2902
+ snapshot
2903
+ }) {
2904
+ const table = getTableName2({ indexName: storage.TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName2(this.schema) });
2905
+ const now = (/* @__PURE__ */ new Date()).toISOString();
2906
+ try {
2907
+ const request = this.pool.request();
2908
+ request.input("workflow_name", workflowName);
2909
+ request.input("run_id", runId);
2910
+ request.input("resourceId", resourceId);
2911
+ request.input("snapshot", JSON.stringify(snapshot));
2912
+ request.input("createdAt", sql__default.default.DateTime2, new Date(now));
2913
+ request.input("updatedAt", sql__default.default.DateTime2, new Date(now));
2914
+ const mergeSql = `MERGE INTO ${table} AS target
2915
+ USING (SELECT @workflow_name AS workflow_name, @run_id AS run_id) AS src
2916
+ ON target.workflow_name = src.workflow_name AND target.run_id = src.run_id
2917
+ WHEN MATCHED THEN UPDATE SET
2918
+ resourceId = @resourceId,
2919
+ snapshot = @snapshot,
2920
+ [updatedAt] = @updatedAt
2921
+ WHEN NOT MATCHED THEN INSERT (workflow_name, run_id, resourceId, snapshot, [createdAt], [updatedAt])
2922
+ VALUES (@workflow_name, @run_id, @resourceId, @snapshot, @createdAt, @updatedAt);`;
2923
+ await request.query(mergeSql);
2924
+ } catch (error$1) {
2925
+ throw new error.MastraError(
2926
+ {
2927
+ id: storage.createStorageErrorId("MSSQL", "PERSIST_WORKFLOW_SNAPSHOT", "FAILED"),
2928
+ domain: error.ErrorDomain.STORAGE,
2929
+ category: error.ErrorCategory.THIRD_PARTY,
2930
+ details: {
2931
+ workflowName,
2932
+ runId
2933
+ }
2934
+ },
2935
+ error$1
2936
+ );
1561
2937
  }
1562
- if (type === "test") {
1563
- where.push("test_info IS NOT NULL AND JSON_VALUE(test_info, '$.testPath') IS NOT NULL");
1564
- } else if (type === "live") {
1565
- where.push("(test_info IS NULL OR JSON_VALUE(test_info, '$.testPath') IS NULL)");
2938
+ }
2939
+ async loadWorkflowSnapshot({
2940
+ workflowName,
2941
+ runId
2942
+ }) {
2943
+ try {
2944
+ const result = await this.db.load({
2945
+ tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
2946
+ keys: {
2947
+ workflow_name: workflowName,
2948
+ run_id: runId
2949
+ }
2950
+ });
2951
+ if (!result) {
2952
+ return null;
2953
+ }
2954
+ return result.snapshot;
2955
+ } catch (error$1) {
2956
+ throw new error.MastraError(
2957
+ {
2958
+ id: storage.createStorageErrorId("MSSQL", "LOAD_WORKFLOW_SNAPSHOT", "FAILED"),
2959
+ domain: error.ErrorDomain.STORAGE,
2960
+ category: error.ErrorCategory.THIRD_PARTY,
2961
+ details: {
2962
+ workflowName,
2963
+ runId
2964
+ }
2965
+ },
2966
+ error$1
2967
+ );
1566
2968
  }
1567
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
1568
- where.push(`[created_at] >= @fromDate`);
1569
- params[`fromDate`] = fromDate.toISOString();
2969
+ }
2970
+ async getWorkflowRunById({
2971
+ runId,
2972
+ workflowName
2973
+ }) {
2974
+ try {
2975
+ const conditions = [];
2976
+ const paramMap = {};
2977
+ if (runId) {
2978
+ conditions.push(`[run_id] = @runId`);
2979
+ paramMap["runId"] = runId;
2980
+ }
2981
+ if (workflowName) {
2982
+ conditions.push(`[workflow_name] = @workflowName`);
2983
+ paramMap["workflowName"] = workflowName;
2984
+ }
2985
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
2986
+ const tableName = getTableName2({ indexName: storage.TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName2(this.schema) });
2987
+ const query = `SELECT * FROM ${tableName} ${whereClause}`;
2988
+ const request = this.pool.request();
2989
+ Object.entries(paramMap).forEach(([key, value]) => request.input(key, value));
2990
+ const result = await request.query(query);
2991
+ if (!result.recordset || result.recordset.length === 0) {
2992
+ return null;
2993
+ }
2994
+ return this.parseWorkflowRun(result.recordset[0]);
2995
+ } catch (error$1) {
2996
+ throw new error.MastraError(
2997
+ {
2998
+ id: storage.createStorageErrorId("MSSQL", "GET_WORKFLOW_RUN_BY_ID", "FAILED"),
2999
+ domain: error.ErrorDomain.STORAGE,
3000
+ category: error.ErrorCategory.THIRD_PARTY,
3001
+ details: {
3002
+ runId,
3003
+ workflowName: workflowName || ""
3004
+ }
3005
+ },
3006
+ error$1
3007
+ );
1570
3008
  }
1571
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
1572
- where.push(`[created_at] <= @toDate`);
1573
- params[`toDate`] = toDate.toISOString();
3009
+ }
3010
+ async deleteWorkflowRunById({ runId, workflowName }) {
3011
+ const table = getTableName2({ indexName: storage.TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName2(this.schema) });
3012
+ const transaction = this.pool.transaction();
3013
+ try {
3014
+ await transaction.begin();
3015
+ const deleteRequest = new sql__default.default.Request(transaction);
3016
+ deleteRequest.input("workflow_name", workflowName);
3017
+ deleteRequest.input("run_id", runId);
3018
+ await deleteRequest.query(`DELETE FROM ${table} WHERE workflow_name = @workflow_name AND run_id = @run_id`);
3019
+ await transaction.commit();
3020
+ } catch (error$1) {
3021
+ try {
3022
+ await transaction.rollback();
3023
+ } catch {
3024
+ }
3025
+ throw new error.MastraError(
3026
+ {
3027
+ id: storage.createStorageErrorId("MSSQL", "DELETE_WORKFLOW_RUN_BY_ID", "FAILED"),
3028
+ domain: error.ErrorDomain.STORAGE,
3029
+ category: error.ErrorCategory.THIRD_PARTY,
3030
+ details: {
3031
+ runId,
3032
+ workflowName
3033
+ }
3034
+ },
3035
+ error$1
3036
+ );
1574
3037
  }
1575
- const whereClause = where.length > 0 ? `WHERE ${where.join(" AND ")}` : "";
1576
- const tableName = this.getTableName(storage.TABLE_EVALS);
1577
- const offset = page * perPage;
1578
- const countQuery = `SELECT COUNT(*) as total FROM ${tableName} ${whereClause}`;
1579
- const dataQuery = `SELECT * FROM ${tableName} ${whereClause} ORDER BY seq_id DESC OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
3038
+ }
3039
+ async listWorkflowRuns({
3040
+ workflowName,
3041
+ fromDate,
3042
+ toDate,
3043
+ page,
3044
+ perPage,
3045
+ resourceId,
3046
+ status
3047
+ } = {}) {
1580
3048
  try {
1581
- const countReq = this.pool.request();
1582
- Object.entries(params).forEach(([key, value]) => {
1583
- if (value instanceof Date) {
1584
- countReq.input(key, sql__default.default.DateTime, value);
3049
+ const conditions = [];
3050
+ const paramMap = {};
3051
+ if (workflowName) {
3052
+ conditions.push(`[workflow_name] = @workflowName`);
3053
+ paramMap["workflowName"] = workflowName;
3054
+ }
3055
+ if (status) {
3056
+ conditions.push(`JSON_VALUE([snapshot], '$.status') = @status`);
3057
+ paramMap["status"] = status;
3058
+ }
3059
+ if (resourceId) {
3060
+ const hasResourceId = await this.db.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
3061
+ if (hasResourceId) {
3062
+ conditions.push(`[resourceId] = @resourceId`);
3063
+ paramMap["resourceId"] = resourceId;
1585
3064
  } else {
1586
- countReq.input(key, value);
3065
+ this.logger?.warn?.(`[${storage.TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
1587
3066
  }
1588
- });
1589
- const countResult = await countReq.query(countQuery);
1590
- const total = countResult.recordset[0]?.total || 0;
1591
- if (total === 0) {
1592
- return {
1593
- evals: [],
1594
- total: 0,
1595
- page,
1596
- perPage,
1597
- hasMore: false
1598
- };
1599
3067
  }
1600
- const req = this.pool.request();
1601
- Object.entries(params).forEach(([key, value]) => {
3068
+ if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
3069
+ conditions.push(`[createdAt] >= @fromDate`);
3070
+ paramMap[`fromDate`] = fromDate.toISOString();
3071
+ }
3072
+ if (toDate instanceof Date && !isNaN(toDate.getTime())) {
3073
+ conditions.push(`[createdAt] <= @toDate`);
3074
+ paramMap[`toDate`] = toDate.toISOString();
3075
+ }
3076
+ const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
3077
+ let total = 0;
3078
+ const tableName = getTableName2({ indexName: storage.TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName2(this.schema) });
3079
+ const request = this.pool.request();
3080
+ Object.entries(paramMap).forEach(([key, value]) => {
1602
3081
  if (value instanceof Date) {
1603
- req.input(key, sql__default.default.DateTime, value);
3082
+ request.input(key, sql__default.default.DateTime, value);
1604
3083
  } else {
1605
- req.input(key, value);
3084
+ request.input(key, value);
1606
3085
  }
1607
3086
  });
1608
- req.input("offset", offset);
1609
- req.input("perPage", perPage);
1610
- const result = await req.query(dataQuery);
1611
- const rows = result.recordset;
1612
- return {
1613
- evals: rows?.map((row) => this.transformEvalRow(row)) ?? [],
1614
- total,
1615
- page,
1616
- perPage,
1617
- hasMore: offset + (rows?.length ?? 0) < total
1618
- };
3087
+ const usePagination = typeof perPage === "number" && typeof page === "number";
3088
+ if (usePagination) {
3089
+ const countQuery = `SELECT COUNT(*) as count FROM ${tableName} ${whereClause}`;
3090
+ const countResult = await request.query(countQuery);
3091
+ total = Number(countResult.recordset[0]?.count || 0);
3092
+ }
3093
+ let query = `SELECT * FROM ${tableName} ${whereClause} ORDER BY [seq_id] DESC`;
3094
+ if (usePagination) {
3095
+ const normalizedPerPage = storage.normalizePerPage(perPage, Number.MAX_SAFE_INTEGER);
3096
+ const offset = page * normalizedPerPage;
3097
+ query += ` OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
3098
+ request.input("perPage", normalizedPerPage);
3099
+ request.input("offset", offset);
3100
+ }
3101
+ const result = await request.query(query);
3102
+ const runs = (result.recordset || []).map((row) => this.parseWorkflowRun(row));
3103
+ return { runs, total: total || runs.length };
1619
3104
  } catch (error$1) {
1620
- const mastraError = new error.MastraError(
3105
+ throw new error.MastraError(
1621
3106
  {
1622
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_EVALS_FAILED",
3107
+ id: storage.createStorageErrorId("MSSQL", "LIST_WORKFLOW_RUNS", "FAILED"),
1623
3108
  domain: error.ErrorDomain.STORAGE,
1624
3109
  category: error.ErrorCategory.THIRD_PARTY,
1625
3110
  details: {
1626
- agentName: agentName || "all",
1627
- type: type || "all",
1628
- page,
1629
- perPage
3111
+ workflowName: workflowName || "all"
1630
3112
  }
1631
3113
  },
1632
3114
  error$1
1633
3115
  );
1634
- this.logger?.error?.(mastraError.toString());
1635
- this.logger?.trackException(mastraError);
1636
- throw mastraError;
1637
3116
  }
1638
3117
  }
1639
- async saveResource({ resource }) {
1640
- const tableName = this.getTableName(storage.TABLE_RESOURCES);
1641
- try {
1642
- const req = this.pool.request();
1643
- req.input("id", resource.id);
1644
- req.input("workingMemory", resource.workingMemory);
1645
- req.input("metadata", JSON.stringify(resource.metadata));
1646
- req.input("createdAt", resource.createdAt.toISOString());
1647
- req.input("updatedAt", resource.updatedAt.toISOString());
1648
- await req.query(
1649
- `INSERT INTO ${tableName} (id, workingMemory, metadata, createdAt, updatedAt) VALUES (@id, @workingMemory, @metadata, @createdAt, @updatedAt)`
1650
- );
1651
- return resource;
1652
- } catch (error$1) {
1653
- const mastraError = new error.MastraError(
1654
- {
1655
- id: "MASTRA_STORAGE_MSSQL_SAVE_RESOURCE_FAILED",
1656
- domain: error.ErrorDomain.STORAGE,
1657
- category: error.ErrorCategory.THIRD_PARTY,
1658
- details: { resourceId: resource.id }
1659
- },
1660
- error$1
1661
- );
1662
- this.logger?.error?.(mastraError.toString());
1663
- this.logger?.trackException(mastraError);
1664
- throw mastraError;
3118
+ };
3119
+
3120
+ // src/storage/index.ts
3121
+ var MSSQLStore = class extends storage.MastraStorage {
3122
+ pool;
3123
+ schema;
3124
+ isConnected = null;
3125
+ #db;
3126
+ stores;
3127
+ constructor(config) {
3128
+ if (!config.id || typeof config.id !== "string" || config.id.trim() === "") {
3129
+ throw new Error("MSSQLStore: id must be provided and cannot be empty.");
1665
3130
  }
1666
- }
1667
- async updateResource({
1668
- resourceId,
1669
- workingMemory,
1670
- metadata
1671
- }) {
3131
+ super({ id: config.id, name: "MSSQLStore", disableInit: config.disableInit });
1672
3132
  try {
1673
- const existingResource = await this.getResourceById({ resourceId });
1674
- if (!existingResource) {
1675
- const newResource = {
1676
- id: resourceId,
1677
- workingMemory,
1678
- metadata: metadata || {},
1679
- createdAt: /* @__PURE__ */ new Date(),
1680
- updatedAt: /* @__PURE__ */ new Date()
1681
- };
1682
- return this.saveResource({ resource: newResource });
3133
+ if ("connectionString" in config) {
3134
+ if (!config.connectionString || typeof config.connectionString !== "string" || config.connectionString.trim() === "") {
3135
+ throw new Error("MSSQLStore: connectionString must be provided and cannot be empty.");
3136
+ }
3137
+ } else {
3138
+ const required = ["server", "database", "user", "password"];
3139
+ for (const key of required) {
3140
+ if (!(key in config) || typeof config[key] !== "string" || config[key].trim() === "") {
3141
+ throw new Error(`MSSQLStore: ${key} must be provided and cannot be empty.`);
3142
+ }
3143
+ }
1683
3144
  }
1684
- const updatedResource = {
1685
- ...existingResource,
1686
- workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
1687
- metadata: {
1688
- ...existingResource.metadata,
1689
- ...metadata
1690
- },
1691
- updatedAt: /* @__PURE__ */ new Date()
3145
+ this.schema = config.schemaName || "dbo";
3146
+ this.pool = "connectionString" in config ? new sql__default.default.ConnectionPool(config.connectionString) : new sql__default.default.ConnectionPool({
3147
+ server: config.server,
3148
+ database: config.database,
3149
+ user: config.user,
3150
+ password: config.password,
3151
+ port: config.port,
3152
+ options: config.options || { encrypt: true, trustServerCertificate: true }
3153
+ });
3154
+ this.#db = new MssqlDB({ pool: this.pool, schemaName: this.schema });
3155
+ const domainConfig = { pool: this.pool, db: this.#db, schema: this.schema };
3156
+ const scores = new ScoresMSSQL(domainConfig);
3157
+ const workflows = new WorkflowsMSSQL(domainConfig);
3158
+ const memory = new MemoryMSSQL(domainConfig);
3159
+ const observability = new ObservabilityMSSQL(domainConfig);
3160
+ this.stores = {
3161
+ scores,
3162
+ workflows,
3163
+ memory,
3164
+ observability
1692
3165
  };
1693
- const tableName = this.getTableName(storage.TABLE_RESOURCES);
1694
- const updates = [];
1695
- const req = this.pool.request();
1696
- if (workingMemory !== void 0) {
1697
- updates.push("workingMemory = @workingMemory");
1698
- req.input("workingMemory", workingMemory);
1699
- }
1700
- if (metadata) {
1701
- updates.push("metadata = @metadata");
1702
- req.input("metadata", JSON.stringify(updatedResource.metadata));
1703
- }
1704
- updates.push("updatedAt = @updatedAt");
1705
- req.input("updatedAt", updatedResource.updatedAt.toISOString());
1706
- req.input("id", resourceId);
1707
- await req.query(`UPDATE ${tableName} SET ${updates.join(", ")} WHERE id = @id`);
1708
- return updatedResource;
1709
- } catch (error$1) {
1710
- const mastraError = new error.MastraError(
3166
+ } catch (e) {
3167
+ throw new error.MastraError(
1711
3168
  {
1712
- id: "MASTRA_STORAGE_MSSQL_UPDATE_RESOURCE_FAILED",
3169
+ id: storage.createStorageErrorId("MSSQL", "INITIALIZATION", "FAILED"),
1713
3170
  domain: error.ErrorDomain.STORAGE,
1714
- category: error.ErrorCategory.THIRD_PARTY,
1715
- details: { resourceId }
3171
+ category: error.ErrorCategory.USER
1716
3172
  },
1717
- error$1
3173
+ e
1718
3174
  );
1719
- this.logger?.error?.(mastraError.toString());
1720
- this.logger?.trackException(mastraError);
1721
- throw mastraError;
1722
3175
  }
1723
3176
  }
1724
- async getResourceById({ resourceId }) {
1725
- const tableName = this.getTableName(storage.TABLE_RESOURCES);
3177
+ async init() {
3178
+ if (this.isConnected === null) {
3179
+ this.isConnected = this._performInitializationAndStore();
3180
+ }
1726
3181
  try {
1727
- const req = this.pool.request();
1728
- req.input("resourceId", resourceId);
1729
- const result = (await req.query(`SELECT * FROM ${tableName} WHERE id = @resourceId`)).recordset[0];
1730
- if (!result) {
1731
- return null;
3182
+ await this.isConnected;
3183
+ await super.init();
3184
+ try {
3185
+ await this.#db.createAutomaticIndexes();
3186
+ } catch (indexError) {
3187
+ this.logger?.warn?.("Failed to create indexes:", indexError);
1732
3188
  }
1733
- return {
1734
- ...result,
1735
- workingMemory: typeof result.workingMemory === "object" ? JSON.stringify(result.workingMemory) : result.workingMemory,
1736
- metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
1737
- };
1738
3189
  } catch (error$1) {
1739
- const mastraError = new error.MastraError(
3190
+ this.isConnected = null;
3191
+ throw new error.MastraError(
1740
3192
  {
1741
- id: "MASTRA_STORAGE_MSSQL_GET_RESOURCE_BY_ID_FAILED",
3193
+ id: storage.createStorageErrorId("MSSQL", "INIT", "FAILED"),
1742
3194
  domain: error.ErrorDomain.STORAGE,
1743
- category: error.ErrorCategory.THIRD_PARTY,
1744
- details: { resourceId }
3195
+ category: error.ErrorCategory.THIRD_PARTY
1745
3196
  },
1746
3197
  error$1
1747
3198
  );
1748
- this.logger?.error?.(mastraError.toString());
1749
- this.logger?.trackException(mastraError);
1750
- throw mastraError;
1751
3199
  }
1752
3200
  }
1753
- async getScoreById({ id }) {
1754
- throw new error.MastraError({
1755
- id: "STORAGE_MONGODB_STORE_GET_SCORE_BY_ID_FAILED",
1756
- domain: error.ErrorDomain.STORAGE,
1757
- category: error.ErrorCategory.THIRD_PARTY,
1758
- details: { id },
1759
- text: "getScoreById is not implemented yet in MongoDBStore"
1760
- });
3201
+ async _performInitializationAndStore() {
3202
+ try {
3203
+ await this.pool.connect();
3204
+ return true;
3205
+ } catch (err) {
3206
+ throw err;
3207
+ }
1761
3208
  }
1762
- async saveScore(_score) {
1763
- throw new error.MastraError({
1764
- id: "STORAGE_MONGODB_STORE_SAVE_SCORE_FAILED",
1765
- domain: error.ErrorDomain.STORAGE,
1766
- category: error.ErrorCategory.THIRD_PARTY,
1767
- details: {},
1768
- text: "saveScore is not implemented yet in MongoDBStore"
1769
- });
3209
+ get supports() {
3210
+ return {
3211
+ selectByIncludeResourceScope: true,
3212
+ resourceWorkingMemory: true,
3213
+ hasColumn: true,
3214
+ createTable: true,
3215
+ deleteMessages: true,
3216
+ listScoresBySpan: true,
3217
+ observabilityInstance: true,
3218
+ indexManagement: true
3219
+ };
1770
3220
  }
1771
- async getScoresByScorerId({
1772
- scorerId,
3221
+ /**
3222
+ * Memory
3223
+ */
3224
+ async getThreadById({ threadId }) {
3225
+ return this.stores.memory.getThreadById({ threadId });
3226
+ }
3227
+ async saveThread({ thread }) {
3228
+ return this.stores.memory.saveThread({ thread });
3229
+ }
3230
+ async updateThread({
3231
+ id,
3232
+ title,
3233
+ metadata
3234
+ }) {
3235
+ return this.stores.memory.updateThread({ id, title, metadata });
3236
+ }
3237
+ async deleteThread({ threadId }) {
3238
+ return this.stores.memory.deleteThread({ threadId });
3239
+ }
3240
+ async listMessagesById({ messageIds }) {
3241
+ return this.stores.memory.listMessagesById({ messageIds });
3242
+ }
3243
+ async saveMessages(args) {
3244
+ return this.stores.memory.saveMessages(args);
3245
+ }
3246
+ async updateMessages({
3247
+ messages
3248
+ }) {
3249
+ return this.stores.memory.updateMessages({ messages });
3250
+ }
3251
+ async deleteMessages(messageIds) {
3252
+ return this.stores.memory.deleteMessages(messageIds);
3253
+ }
3254
+ async getResourceById({ resourceId }) {
3255
+ return this.stores.memory.getResourceById({ resourceId });
3256
+ }
3257
+ async saveResource({ resource }) {
3258
+ return this.stores.memory.saveResource({ resource });
3259
+ }
3260
+ async updateResource({
3261
+ resourceId,
3262
+ workingMemory,
3263
+ metadata
3264
+ }) {
3265
+ return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
3266
+ }
3267
+ /**
3268
+ * Workflows
3269
+ */
3270
+ async updateWorkflowResults({
3271
+ workflowName,
3272
+ runId,
3273
+ stepId,
3274
+ result,
3275
+ requestContext
3276
+ }) {
3277
+ return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
3278
+ }
3279
+ async updateWorkflowState({
3280
+ workflowName,
3281
+ runId,
3282
+ opts
3283
+ }) {
3284
+ return this.stores.workflows.updateWorkflowState({ workflowName, runId, opts });
3285
+ }
3286
+ async persistWorkflowSnapshot({
3287
+ workflowName,
3288
+ runId,
3289
+ resourceId,
3290
+ snapshot
3291
+ }) {
3292
+ return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, resourceId, snapshot });
3293
+ }
3294
+ async loadWorkflowSnapshot({
3295
+ workflowName,
3296
+ runId
3297
+ }) {
3298
+ return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
3299
+ }
3300
+ async listWorkflowRuns(args = {}) {
3301
+ return this.stores.workflows.listWorkflowRuns(args);
3302
+ }
3303
+ async getWorkflowRunById({
3304
+ runId,
3305
+ workflowName
3306
+ }) {
3307
+ return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
3308
+ }
3309
+ async deleteWorkflowRunById({ runId, workflowName }) {
3310
+ return this.stores.workflows.deleteWorkflowRunById({ runId, workflowName });
3311
+ }
3312
+ async close() {
3313
+ await this.pool.close();
3314
+ }
3315
+ /**
3316
+ * Index Management
3317
+ */
3318
+ async createIndex(options) {
3319
+ return this.#db.createIndex(options);
3320
+ }
3321
+ async listIndexes(tableName) {
3322
+ return this.#db.listIndexes(tableName);
3323
+ }
3324
+ async describeIndex(indexName) {
3325
+ return this.#db.describeIndex(indexName);
3326
+ }
3327
+ async dropIndex(indexName) {
3328
+ return this.#db.dropIndex(indexName);
3329
+ }
3330
+ /**
3331
+ * Tracing / Observability
3332
+ */
3333
+ getObservabilityStore() {
3334
+ if (!this.stores.observability) {
3335
+ throw new error.MastraError({
3336
+ id: storage.createStorageErrorId("MSSQL", "OBSERVABILITY", "NOT_INITIALIZED"),
3337
+ domain: error.ErrorDomain.STORAGE,
3338
+ category: error.ErrorCategory.SYSTEM,
3339
+ text: "Observability storage is not initialized"
3340
+ });
3341
+ }
3342
+ return this.stores.observability;
3343
+ }
3344
+ async createSpan(span) {
3345
+ return this.getObservabilityStore().createSpan(span);
3346
+ }
3347
+ async updateSpan({
3348
+ spanId,
3349
+ traceId,
3350
+ updates
3351
+ }) {
3352
+ return this.getObservabilityStore().updateSpan({ spanId, traceId, updates });
3353
+ }
3354
+ async getTrace(traceId) {
3355
+ return this.getObservabilityStore().getTrace(traceId);
3356
+ }
3357
+ async getTracesPaginated(args) {
3358
+ return this.getObservabilityStore().getTracesPaginated(args);
3359
+ }
3360
+ async batchCreateSpans(args) {
3361
+ return this.getObservabilityStore().batchCreateSpans(args);
3362
+ }
3363
+ async batchUpdateSpans(args) {
3364
+ return this.getObservabilityStore().batchUpdateSpans(args);
3365
+ }
3366
+ async batchDeleteTraces(args) {
3367
+ return this.getObservabilityStore().batchDeleteTraces(args);
3368
+ }
3369
+ /**
3370
+ * Scorers
3371
+ */
3372
+ async getScoreById({ id: _id }) {
3373
+ return this.stores.scores.getScoreById({ id: _id });
3374
+ }
3375
+ async listScoresByScorerId({
3376
+ scorerId: _scorerId,
1773
3377
  pagination: _pagination,
1774
- entityId,
1775
- entityType
3378
+ entityId: _entityId,
3379
+ entityType: _entityType,
3380
+ source: _source
1776
3381
  }) {
1777
- throw new error.MastraError({
1778
- id: "STORAGE_MONGODB_STORE_GET_SCORES_BY_SCORER_ID_FAILED",
1779
- domain: error.ErrorDomain.STORAGE,
1780
- category: error.ErrorCategory.THIRD_PARTY,
1781
- details: { scorerId, entityId: entityId || "", entityType: entityType || "" },
1782
- text: "getScoresByScorerId is not implemented yet in MongoDBStore"
3382
+ return this.stores.scores.listScoresByScorerId({
3383
+ scorerId: _scorerId,
3384
+ pagination: _pagination,
3385
+ entityId: _entityId,
3386
+ entityType: _entityType,
3387
+ source: _source
1783
3388
  });
1784
3389
  }
1785
- async getScoresByRunId({
1786
- runId,
3390
+ async saveScore(score) {
3391
+ return this.stores.scores.saveScore(score);
3392
+ }
3393
+ async listScoresByRunId({
3394
+ runId: _runId,
1787
3395
  pagination: _pagination
1788
3396
  }) {
1789
- throw new error.MastraError({
1790
- id: "STORAGE_MONGODB_STORE_GET_SCORES_BY_RUN_ID_FAILED",
1791
- domain: error.ErrorDomain.STORAGE,
1792
- category: error.ErrorCategory.THIRD_PARTY,
1793
- details: { runId },
1794
- text: "getScoresByRunId is not implemented yet in MongoDBStore"
1795
- });
3397
+ return this.stores.scores.listScoresByRunId({ runId: _runId, pagination: _pagination });
1796
3398
  }
1797
- async getScoresByEntityId({
1798
- entityId,
1799
- entityType,
3399
+ async listScoresByEntityId({
3400
+ entityId: _entityId,
3401
+ entityType: _entityType,
1800
3402
  pagination: _pagination
1801
3403
  }) {
1802
- throw new error.MastraError({
1803
- id: "STORAGE_MONGODB_STORE_GET_SCORES_BY_ENTITY_ID_FAILED",
1804
- domain: error.ErrorDomain.STORAGE,
1805
- category: error.ErrorCategory.THIRD_PARTY,
1806
- details: { entityId, entityType },
1807
- text: "getScoresByEntityId is not implemented yet in MongoDBStore"
3404
+ return this.stores.scores.listScoresByEntityId({
3405
+ entityId: _entityId,
3406
+ entityType: _entityType,
3407
+ pagination: _pagination
1808
3408
  });
1809
3409
  }
1810
- async dropTable({ tableName }) {
1811
- throw new error.MastraError({
1812
- id: "STORAGE_MONGODB_STORE_DROP_TABLE_FAILED",
1813
- domain: error.ErrorDomain.STORAGE,
1814
- category: error.ErrorCategory.THIRD_PARTY,
1815
- details: { tableName },
1816
- text: "dropTable is not implemented yet in MongoDBStore"
1817
- });
3410
+ async listScoresBySpan({
3411
+ traceId,
3412
+ spanId,
3413
+ pagination: _pagination
3414
+ }) {
3415
+ return this.stores.scores.listScoresBySpan({ traceId, spanId, pagination: _pagination });
1818
3416
  }
1819
3417
  };
1820
3418