@mastra/mssql 0.0.0-vector-query-tool-provider-options-20250828222356 → 0.0.0-vnext-20251104230439

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/CHANGELOG.md +399 -3
  2. package/README.md +315 -36
  3. package/dist/index.cjs +1687 -710
  4. package/dist/index.cjs.map +1 -1
  5. package/dist/index.d.ts +1 -0
  6. package/dist/index.d.ts.map +1 -1
  7. package/dist/index.js +1689 -712
  8. package/dist/index.js.map +1 -1
  9. package/dist/storage/domains/memory/index.d.ts +18 -41
  10. package/dist/storage/domains/memory/index.d.ts.map +1 -1
  11. package/dist/storage/domains/observability/index.d.ts +44 -0
  12. package/dist/storage/domains/observability/index.d.ts.map +1 -0
  13. package/dist/storage/domains/operations/index.d.ts +67 -4
  14. package/dist/storage/domains/operations/index.d.ts.map +1 -1
  15. package/dist/storage/domains/scores/index.d.ts +13 -4
  16. package/dist/storage/domains/scores/index.d.ts.map +1 -1
  17. package/dist/storage/domains/utils.d.ts +19 -0
  18. package/dist/storage/domains/utils.d.ts.map +1 -1
  19. package/dist/storage/domains/workflows/index.d.ts +9 -13
  20. package/dist/storage/domains/workflows/index.d.ts.map +1 -1
  21. package/dist/storage/index.d.ts +76 -79
  22. package/dist/storage/index.d.ts.map +1 -1
  23. package/package.json +23 -10
  24. package/dist/storage/domains/legacy-evals/index.d.ts +0 -20
  25. package/dist/storage/domains/legacy-evals/index.d.ts.map +0 -1
  26. package/dist/storage/domains/traces/index.d.ts +0 -37
  27. package/dist/storage/domains/traces/index.d.ts.map +0 -1
  28. package/docker-compose.yaml +0 -14
  29. package/eslint.config.js +0 -6
  30. package/src/index.ts +0 -2
  31. package/src/storage/domains/legacy-evals/index.ts +0 -175
  32. package/src/storage/domains/memory/index.ts +0 -1084
  33. package/src/storage/domains/operations/index.ts +0 -401
  34. package/src/storage/domains/scores/index.ts +0 -316
  35. package/src/storage/domains/traces/index.ts +0 -212
  36. package/src/storage/domains/utils.ts +0 -12
  37. package/src/storage/domains/workflows/index.ts +0 -296
  38. package/src/storage/index.test.ts +0 -2228
  39. package/src/storage/index.ts +0 -494
  40. package/tsconfig.build.json +0 -9
  41. package/tsconfig.json +0 -5
  42. package/tsup.config.ts +0 -17
  43. package/vitest.config.ts +0 -12
package/dist/index.js CHANGED
@@ -1,8 +1,10 @@
1
1
  import { MastraError, ErrorCategory, ErrorDomain } from '@mastra/core/error';
2
- import { MastraStorage, LegacyEvalsStorage, StoreOperations, TABLE_WORKFLOW_SNAPSHOT, ScoresStorage, TABLE_SCORERS, TracesStorage, TABLE_TRACES, WorkflowsStorage, MemoryStorage, resolveMessageLimit, TABLE_RESOURCES, TABLE_EVALS, TABLE_THREADS, TABLE_MESSAGES } from '@mastra/core/storage';
2
+ import { MastraStorage, StoreOperations, TABLE_WORKFLOW_SNAPSHOT, TABLE_SCHEMAS, TABLE_THREADS, TABLE_MESSAGES, TABLE_TRACES, TABLE_SCORERS, TABLE_AI_SPANS, ScoresStorage, normalizePerPage, calculatePagination, WorkflowsStorage, MemoryStorage, resolveMessageLimit, TABLE_RESOURCES, ObservabilityStorage, safelyParseJSON } from '@mastra/core/storage';
3
3
  import sql2 from 'mssql';
4
- import { parseSqlIdentifier, parseFieldKey } from '@mastra/core/utils';
5
4
  import { MessageList } from '@mastra/core/agent';
5
+ import { parseSqlIdentifier } from '@mastra/core/utils';
6
+ import { randomUUID } from 'crypto';
7
+ import { saveScorePayloadSchema } from '@mastra/core/evals';
6
8
 
7
9
  // src/storage/index.ts
8
10
  function getSchemaName(schema) {
@@ -14,154 +16,71 @@ function getTableName({ indexName, schemaName }) {
14
16
  const quotedSchemaName = schemaName;
15
17
  return quotedSchemaName ? `${quotedSchemaName}.${quotedIndexName}` : quotedIndexName;
16
18
  }
17
-
18
- // src/storage/domains/legacy-evals/index.ts
19
- function transformEvalRow(row) {
20
- let testInfoValue = null, resultValue = null;
21
- if (row.test_info) {
22
- try {
23
- testInfoValue = typeof row.test_info === "string" ? JSON.parse(row.test_info) : row.test_info;
24
- } catch {
25
- }
19
+ function buildDateRangeFilter(dateRange, fieldName) {
20
+ const filters = {};
21
+ if (dateRange?.start) {
22
+ filters[`${fieldName}_gte`] = dateRange.start;
26
23
  }
27
- if (row.test_info) {
28
- try {
29
- resultValue = typeof row.result === "string" ? JSON.parse(row.result) : row.result;
30
- } catch {
31
- }
24
+ if (dateRange?.end) {
25
+ filters[`${fieldName}_lte`] = dateRange.end;
32
26
  }
27
+ return filters;
28
+ }
29
+ function prepareWhereClause(filters, _schema) {
30
+ const conditions = [];
31
+ const params = {};
32
+ let paramIndex = 1;
33
+ Object.entries(filters).forEach(([key, value]) => {
34
+ if (value === void 0) return;
35
+ const paramName = `p${paramIndex++}`;
36
+ if (key.endsWith("_gte")) {
37
+ const fieldName = key.slice(0, -4);
38
+ conditions.push(`[${parseSqlIdentifier(fieldName, "field name")}] >= @${paramName}`);
39
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
40
+ } else if (key.endsWith("_lte")) {
41
+ const fieldName = key.slice(0, -4);
42
+ conditions.push(`[${parseSqlIdentifier(fieldName, "field name")}] <= @${paramName}`);
43
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
44
+ } else if (value === null) {
45
+ conditions.push(`[${parseSqlIdentifier(key, "field name")}] IS NULL`);
46
+ } else {
47
+ conditions.push(`[${parseSqlIdentifier(key, "field name")}] = @${paramName}`);
48
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
49
+ }
50
+ });
33
51
  return {
34
- agentName: row.agent_name,
35
- input: row.input,
36
- output: row.output,
37
- result: resultValue,
38
- metricName: row.metric_name,
39
- instructions: row.instructions,
40
- testInfo: testInfoValue,
41
- globalRunId: row.global_run_id,
42
- runId: row.run_id,
43
- createdAt: row.created_at
52
+ sql: conditions.length > 0 ? ` WHERE ${conditions.join(" AND ")}` : "",
53
+ params
44
54
  };
45
55
  }
46
- var LegacyEvalsMSSQL = class extends LegacyEvalsStorage {
47
- pool;
48
- schema;
49
- constructor({ pool, schema }) {
50
- super();
51
- this.pool = pool;
52
- this.schema = schema;
53
- }
54
- /** @deprecated use getEvals instead */
55
- async getEvalsByAgentName(agentName, type) {
56
- try {
57
- let query = `SELECT * FROM ${getTableName({ indexName: TABLE_EVALS, schemaName: getSchemaName(this.schema) })} WHERE agent_name = @p1`;
58
- if (type === "test") {
59
- query += " AND test_info IS NOT NULL AND JSON_VALUE(test_info, '$.testPath') IS NOT NULL";
60
- } else if (type === "live") {
61
- query += " AND (test_info IS NULL OR JSON_VALUE(test_info, '$.testPath') IS NULL)";
62
- }
63
- query += " ORDER BY created_at DESC";
64
- const request = this.pool.request();
65
- request.input("p1", agentName);
66
- const result = await request.query(query);
67
- const rows = result.recordset;
68
- return typeof transformEvalRow === "function" ? rows?.map((row) => transformEvalRow(row)) ?? [] : rows ?? [];
69
- } catch (error) {
70
- if (error && error.number === 208 && error.message && error.message.includes("Invalid object name")) {
71
- return [];
72
- }
73
- console.error("Failed to get evals for the specified agent: " + error?.message);
74
- throw error;
75
- }
76
- }
77
- async getEvals(options = {}) {
78
- const { agentName, type, page = 0, perPage = 100, dateRange } = options;
79
- const fromDate = dateRange?.start;
80
- const toDate = dateRange?.end;
81
- const where = [];
82
- const params = {};
83
- if (agentName) {
84
- where.push("agent_name = @agentName");
85
- params["agentName"] = agentName;
86
- }
87
- if (type === "test") {
88
- where.push("test_info IS NOT NULL AND JSON_VALUE(test_info, '$.testPath') IS NOT NULL");
89
- } else if (type === "live") {
90
- where.push("(test_info IS NULL OR JSON_VALUE(test_info, '$.testPath') IS NULL)");
91
- }
92
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
93
- where.push(`[created_at] >= @fromDate`);
94
- params[`fromDate`] = fromDate.toISOString();
95
- }
96
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
97
- where.push(`[created_at] <= @toDate`);
98
- params[`toDate`] = toDate.toISOString();
99
- }
100
- const whereClause = where.length > 0 ? `WHERE ${where.join(" AND ")}` : "";
101
- const tableName = getTableName({ indexName: TABLE_EVALS, schemaName: getSchemaName(this.schema) });
102
- const offset = page * perPage;
103
- const countQuery = `SELECT COUNT(*) as total FROM ${tableName} ${whereClause}`;
104
- const dataQuery = `SELECT * FROM ${tableName} ${whereClause} ORDER BY seq_id DESC OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
105
- try {
106
- const countReq = this.pool.request();
107
- Object.entries(params).forEach(([key, value]) => {
108
- if (value instanceof Date) {
109
- countReq.input(key, sql2.DateTime, value);
110
- } else {
111
- countReq.input(key, value);
112
- }
113
- });
114
- const countResult = await countReq.query(countQuery);
115
- const total = countResult.recordset[0]?.total || 0;
116
- if (total === 0) {
117
- return {
118
- evals: [],
119
- total: 0,
120
- page,
121
- perPage,
122
- hasMore: false
123
- };
56
+ function transformFromSqlRow({
57
+ tableName,
58
+ sqlRow
59
+ }) {
60
+ const schema = TABLE_SCHEMAS[tableName];
61
+ const result = {};
62
+ Object.entries(sqlRow).forEach(([key, value]) => {
63
+ const columnSchema = schema?.[key];
64
+ if (columnSchema?.type === "jsonb" && typeof value === "string") {
65
+ try {
66
+ result[key] = JSON.parse(value);
67
+ } catch {
68
+ result[key] = value;
124
69
  }
125
- const req = this.pool.request();
126
- Object.entries(params).forEach(([key, value]) => {
127
- if (value instanceof Date) {
128
- req.input(key, sql2.DateTime, value);
129
- } else {
130
- req.input(key, value);
131
- }
132
- });
133
- req.input("offset", offset);
134
- req.input("perPage", perPage);
135
- const result = await req.query(dataQuery);
136
- const rows = result.recordset;
137
- return {
138
- evals: rows?.map((row) => transformEvalRow(row)) ?? [],
139
- total,
140
- page,
141
- perPage,
142
- hasMore: offset + (rows?.length ?? 0) < total
143
- };
144
- } catch (error) {
145
- const mastraError = new MastraError(
146
- {
147
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_EVALS_FAILED",
148
- domain: ErrorDomain.STORAGE,
149
- category: ErrorCategory.THIRD_PARTY,
150
- details: {
151
- agentName: agentName || "all",
152
- type: type || "all",
153
- page,
154
- perPage
155
- }
156
- },
157
- error
158
- );
159
- this.logger?.error?.(mastraError.toString());
160
- this.logger?.trackException(mastraError);
161
- throw mastraError;
70
+ } else if (columnSchema?.type === "timestamp" && value && typeof value === "string") {
71
+ result[key] = new Date(value);
72
+ } else if (columnSchema?.type === "timestamp" && value instanceof Date) {
73
+ result[key] = value;
74
+ } else if (columnSchema?.type === "boolean") {
75
+ result[key] = Boolean(value);
76
+ } else {
77
+ result[key] = value;
162
78
  }
163
- }
164
- };
79
+ });
80
+ return result;
81
+ }
82
+
83
+ // src/storage/domains/memory/index.ts
165
84
  var MemoryMSSQL = class extends MemoryStorage {
166
85
  pool;
167
86
  schema;
@@ -179,7 +98,7 @@ var MemoryMSSQL = class extends MemoryStorage {
179
98
  });
180
99
  const cleanMessages = messagesWithParsedContent.map(({ seq_id, ...rest }) => rest);
181
100
  const list = new MessageList().add(cleanMessages, "memory");
182
- return format === "v2" ? list.get.all.v2() : list.get.all.v1();
101
+ return format === "v2" ? list.get.all.db() : list.get.all.v1();
183
102
  }
184
103
  constructor({
185
104
  pool,
@@ -193,7 +112,7 @@ var MemoryMSSQL = class extends MemoryStorage {
193
112
  }
194
113
  async getThreadById({ threadId }) {
195
114
  try {
196
- const sql7 = `SELECT
115
+ const sql5 = `SELECT
197
116
  id,
198
117
  [resourceId],
199
118
  title,
@@ -204,7 +123,7 @@ var MemoryMSSQL = class extends MemoryStorage {
204
123
  WHERE id = @threadId`;
205
124
  const request = this.pool.request();
206
125
  request.input("threadId", threadId);
207
- const resultSet = await request.query(sql7);
126
+ const resultSet = await request.query(sql5);
208
127
  const thread = resultSet.recordset[0] || null;
209
128
  if (!thread) {
210
129
  return null;
@@ -229,11 +148,12 @@ var MemoryMSSQL = class extends MemoryStorage {
229
148
  );
230
149
  }
231
150
  }
232
- async getThreadsByResourceIdPaginated(args) {
233
- const { resourceId, page = 0, perPage: perPageInput, orderBy = "createdAt", sortDirection = "DESC" } = args;
151
+ async listThreadsByResourceId(args) {
152
+ const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
153
+ const perPage = normalizePerPage(perPageInput, 100);
154
+ const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
155
+ const { field, direction } = this.parseOrderBy(orderBy);
234
156
  try {
235
- const perPage = perPageInput !== void 0 ? perPageInput : 100;
236
- const currentOffset = page * perPage;
237
157
  const baseQuery = `FROM ${getTableName({ indexName: TABLE_THREADS, schemaName: getSchemaName(this.schema) })} WHERE [resourceId] = @resourceId`;
238
158
  const countQuery = `SELECT COUNT(*) as count ${baseQuery}`;
239
159
  const countRequest = this.pool.request();
@@ -245,16 +165,22 @@ var MemoryMSSQL = class extends MemoryStorage {
245
165
  threads: [],
246
166
  total: 0,
247
167
  page,
248
- perPage,
168
+ perPage: perPageForResponse,
249
169
  hasMore: false
250
170
  };
251
171
  }
252
- const orderByField = orderBy === "createdAt" ? "[createdAt]" : "[updatedAt]";
253
- const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY ${orderByField} ${sortDirection} OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
172
+ const orderByField = field === "createdAt" ? "[createdAt]" : "[updatedAt]";
173
+ const dir = (direction || "DESC").toUpperCase() === "ASC" ? "ASC" : "DESC";
174
+ const limitValue = perPageInput === false ? total : perPage;
175
+ const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY ${orderByField} ${dir} OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
254
176
  const dataRequest = this.pool.request();
255
177
  dataRequest.input("resourceId", resourceId);
256
- dataRequest.input("perPage", perPage);
257
- dataRequest.input("offset", currentOffset);
178
+ dataRequest.input("offset", offset);
179
+ if (limitValue > 2147483647) {
180
+ dataRequest.input("perPage", sql2.BigInt, limitValue);
181
+ } else {
182
+ dataRequest.input("perPage", limitValue);
183
+ }
258
184
  const rowsResult = await dataRequest.query(dataQuery);
259
185
  const rows = rowsResult.recordset || [];
260
186
  const threads = rows.map((thread) => ({
@@ -267,13 +193,13 @@ var MemoryMSSQL = class extends MemoryStorage {
267
193
  threads,
268
194
  total,
269
195
  page,
270
- perPage,
271
- hasMore: currentOffset + threads.length < total
196
+ perPage: perPageForResponse,
197
+ hasMore: perPageInput === false ? false : offset + perPage < total
272
198
  };
273
199
  } catch (error) {
274
200
  const mastraError = new MastraError(
275
201
  {
276
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_THREADS_BY_RESOURCE_ID_PAGINATED_FAILED",
202
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_THREADS_BY_RESOURCE_ID_FAILED",
277
203
  domain: ErrorDomain.STORAGE,
278
204
  category: ErrorCategory.THIRD_PARTY,
279
205
  details: {
@@ -285,7 +211,13 @@ var MemoryMSSQL = class extends MemoryStorage {
285
211
  );
286
212
  this.logger?.error?.(mastraError.toString());
287
213
  this.logger?.trackException?.(mastraError);
288
- return { threads: [], total: 0, page, perPage: perPageInput || 100, hasMore: false };
214
+ return {
215
+ threads: [],
216
+ total: 0,
217
+ page,
218
+ perPage: perPageForResponse,
219
+ hasMore: false
220
+ };
289
221
  }
290
222
  }
291
223
  async saveThread({ thread }) {
@@ -307,7 +239,12 @@ var MemoryMSSQL = class extends MemoryStorage {
307
239
  req.input("id", thread.id);
308
240
  req.input("resourceId", thread.resourceId);
309
241
  req.input("title", thread.title);
310
- req.input("metadata", thread.metadata ? JSON.stringify(thread.metadata) : null);
242
+ const metadata = thread.metadata ? JSON.stringify(thread.metadata) : null;
243
+ if (metadata === null) {
244
+ req.input("metadata", sql2.NVarChar, null);
245
+ } else {
246
+ req.input("metadata", metadata);
247
+ }
311
248
  req.input("createdAt", sql2.DateTime2, thread.createdAt);
312
249
  req.input("updatedAt", sql2.DateTime2, thread.updatedAt);
313
250
  await req.query(mergeSql);
@@ -326,30 +263,6 @@ var MemoryMSSQL = class extends MemoryStorage {
326
263
  );
327
264
  }
328
265
  }
329
- /**
330
- * @deprecated use getThreadsByResourceIdPaginated instead
331
- */
332
- async getThreadsByResourceId(args) {
333
- const { resourceId, orderBy = "createdAt", sortDirection = "DESC" } = args;
334
- try {
335
- const baseQuery = `FROM ${getTableName({ indexName: TABLE_THREADS, schemaName: getSchemaName(this.schema) })} WHERE [resourceId] = @resourceId`;
336
- const orderByField = orderBy === "createdAt" ? "[createdAt]" : "[updatedAt]";
337
- const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY ${orderByField} ${sortDirection}`;
338
- const request = this.pool.request();
339
- request.input("resourceId", resourceId);
340
- const resultSet = await request.query(dataQuery);
341
- const rows = resultSet.recordset || [];
342
- return rows.map((thread) => ({
343
- ...thread,
344
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
345
- createdAt: thread.createdAt,
346
- updatedAt: thread.updatedAt
347
- }));
348
- } catch (error) {
349
- this.logger?.error?.(`Error getting threads for resource ${resourceId}:`, error);
350
- return [];
351
- }
352
- }
353
266
  /**
354
267
  * Updates a thread's title and metadata, merging with existing metadata. Returns the updated thread.
355
268
  */
@@ -377,7 +290,7 @@ var MemoryMSSQL = class extends MemoryStorage {
377
290
  };
378
291
  try {
379
292
  const table = getTableName({ indexName: TABLE_THREADS, schemaName: getSchemaName(this.schema) });
380
- const sql7 = `UPDATE ${table}
293
+ const sql5 = `UPDATE ${table}
381
294
  SET title = @title,
382
295
  metadata = @metadata,
383
296
  [updatedAt] = @updatedAt
@@ -388,7 +301,7 @@ var MemoryMSSQL = class extends MemoryStorage {
388
301
  req.input("title", title);
389
302
  req.input("metadata", JSON.stringify(mergedMetadata));
390
303
  req.input("updatedAt", /* @__PURE__ */ new Date());
391
- const result = await req.query(sql7);
304
+ const result = await req.query(sql5);
392
305
  let thread = result.recordset && result.recordset[0];
393
306
  if (thread && "seq_id" in thread) {
394
307
  const { seq_id, ...rest } = thread;
@@ -458,9 +371,9 @@ var MemoryMSSQL = class extends MemoryStorage {
458
371
  }
459
372
  async _getIncludedMessages({
460
373
  threadId,
461
- selectBy,
462
- orderByStatement
374
+ selectBy
463
375
  }) {
376
+ if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
464
377
  const include = selectBy?.include;
465
378
  if (!include) return null;
466
379
  const unionQueries = [];
@@ -486,7 +399,7 @@ var MemoryMSSQL = class extends MemoryStorage {
486
399
  m.[resourceId],
487
400
  m.seq_id
488
401
  FROM (
489
- SELECT *, ROW_NUMBER() OVER (${orderByStatement}) as row_num
402
+ SELECT *, ROW_NUMBER() OVER (ORDER BY [createdAt] ASC) as row_num
490
403
  FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })}
491
404
  WHERE [thread_id] = ${pThreadId}
492
405
  ) AS m
@@ -494,15 +407,17 @@ var MemoryMSSQL = class extends MemoryStorage {
494
407
  OR EXISTS (
495
408
  SELECT 1
496
409
  FROM (
497
- SELECT *, ROW_NUMBER() OVER (${orderByStatement}) as row_num
410
+ SELECT *, ROW_NUMBER() OVER (ORDER BY [createdAt] ASC) as row_num
498
411
  FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })}
499
412
  WHERE [thread_id] = ${pThreadId}
500
413
  ) AS target
501
414
  WHERE target.id = ${pId}
502
415
  AND (
503
- (m.row_num <= target.row_num + ${pPrev} AND m.row_num > target.row_num)
416
+ -- Get previous messages (messages that come BEFORE the target)
417
+ (m.row_num < target.row_num AND m.row_num >= target.row_num - ${pPrev})
504
418
  OR
505
- (m.row_num >= target.row_num - ${pNext} AND m.row_num < target.row_num)
419
+ -- Get next messages (messages that come AFTER the target)
420
+ (m.row_num > target.row_num AND m.row_num <= target.row_num + ${pNext})
506
421
  )
507
422
  )
508
423
  `
@@ -531,16 +446,20 @@ var MemoryMSSQL = class extends MemoryStorage {
531
446
  });
532
447
  return dedupedRows;
533
448
  }
449
+ /**
450
+ * @deprecated use listMessages instead
451
+ */
534
452
  async getMessages(args) {
535
- const { threadId, format, selectBy } = args;
453
+ const { threadId, resourceId, selectBy } = args;
536
454
  const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId, resourceId`;
537
455
  const orderByStatement = `ORDER BY [seq_id] DESC`;
538
456
  const limit = resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
539
457
  try {
458
+ if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
540
459
  let rows = [];
541
460
  const include = selectBy?.include || [];
542
461
  if (include?.length) {
543
- const includeMessages = await this._getIncludedMessages({ threadId, selectBy, orderByStatement });
462
+ const includeMessages = await this._getIncludedMessages({ threadId, selectBy });
544
463
  if (includeMessages) {
545
464
  rows.push(...includeMessages);
546
465
  }
@@ -565,8 +484,19 @@ var MemoryMSSQL = class extends MemoryStorage {
565
484
  const timeDiff = a.seq_id - b.seq_id;
566
485
  return timeDiff;
567
486
  });
568
- rows = rows.map(({ seq_id, ...rest }) => rest);
569
- return this._parseAndFormatMessages(rows, format);
487
+ const messagesWithParsedContent = rows.map((row) => {
488
+ if (typeof row.content === "string") {
489
+ try {
490
+ return { ...row, content: JSON.parse(row.content) };
491
+ } catch {
492
+ return row;
493
+ }
494
+ }
495
+ return row;
496
+ });
497
+ const cleanMessages = messagesWithParsedContent.map(({ seq_id, ...rest }) => rest);
498
+ const list = new MessageList().add(cleanMessages, "memory");
499
+ return { messages: list.get.all.db() };
570
500
  } catch (error) {
571
501
  const mastraError = new MastraError(
572
502
  {
@@ -574,21 +504,19 @@ var MemoryMSSQL = class extends MemoryStorage {
574
504
  domain: ErrorDomain.STORAGE,
575
505
  category: ErrorCategory.THIRD_PARTY,
576
506
  details: {
577
- threadId
507
+ threadId,
508
+ resourceId: resourceId ?? ""
578
509
  }
579
510
  },
580
511
  error
581
512
  );
582
513
  this.logger?.error?.(mastraError.toString());
583
- this.logger?.trackException(mastraError);
584
- return [];
514
+ this.logger?.trackException?.(mastraError);
515
+ return { messages: [] };
585
516
  }
586
517
  }
587
- async getMessagesById({
588
- messageIds,
589
- format
590
- }) {
591
- if (messageIds.length === 0) return [];
518
+ async listMessagesById({ messageIds }) {
519
+ if (messageIds.length === 0) return { messages: [] };
592
520
  const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId, resourceId`;
593
521
  const orderByStatement = `ORDER BY [seq_id] DESC`;
594
522
  try {
@@ -604,13 +532,23 @@ var MemoryMSSQL = class extends MemoryStorage {
604
532
  const timeDiff = a.seq_id - b.seq_id;
605
533
  return timeDiff;
606
534
  });
607
- rows = rows.map(({ seq_id, ...rest }) => rest);
608
- if (format === `v1`) return this._parseAndFormatMessages(rows, format);
609
- return this._parseAndFormatMessages(rows, `v2`);
535
+ const messagesWithParsedContent = rows.map((row) => {
536
+ if (typeof row.content === "string") {
537
+ try {
538
+ return { ...row, content: JSON.parse(row.content) };
539
+ } catch {
540
+ return row;
541
+ }
542
+ }
543
+ return row;
544
+ });
545
+ const cleanMessages = messagesWithParsedContent.map(({ seq_id, ...rest }) => rest);
546
+ const list = new MessageList().add(cleanMessages, "memory");
547
+ return { messages: list.get.all.db() };
610
548
  } catch (error) {
611
549
  const mastraError = new MastraError(
612
550
  {
613
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_MESSAGES_BY_ID_FAILED",
551
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_MESSAGES_BY_ID_FAILED",
614
552
  domain: ErrorDomain.STORAGE,
615
553
  category: ErrorCategory.THIRD_PARTY,
616
554
  details: {
@@ -620,101 +558,125 @@ var MemoryMSSQL = class extends MemoryStorage {
620
558
  error
621
559
  );
622
560
  this.logger?.error?.(mastraError.toString());
623
- this.logger?.trackException(mastraError);
624
- return [];
561
+ this.logger?.trackException?.(mastraError);
562
+ return { messages: [] };
625
563
  }
626
564
  }
627
- async getMessagesPaginated(args) {
628
- const { threadId, selectBy } = args;
629
- const { page = 0, perPage: perPageInput } = selectBy?.pagination || {};
630
- const orderByStatement = `ORDER BY [seq_id] DESC`;
631
- if (selectBy?.include?.length) {
632
- await this._getIncludedMessages({ threadId, selectBy, orderByStatement });
565
+ async listMessages(args) {
566
+ const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
567
+ if (!threadId.trim()) {
568
+ throw new MastraError(
569
+ {
570
+ id: "STORAGE_MSSQL_LIST_MESSAGES_INVALID_THREAD_ID",
571
+ domain: ErrorDomain.STORAGE,
572
+ category: ErrorCategory.THIRD_PARTY,
573
+ details: { threadId }
574
+ },
575
+ new Error("threadId must be a non-empty string")
576
+ );
633
577
  }
578
+ const perPage = normalizePerPage(perPageInput, 40);
579
+ const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
634
580
  try {
635
- const { threadId: threadId2, format, selectBy: selectBy2 } = args;
636
- const { page: page2 = 0, perPage: perPageInput2, dateRange } = selectBy2?.pagination || {};
637
- const fromDate = dateRange?.start;
638
- const toDate = dateRange?.end;
581
+ const { field, direction } = this.parseOrderBy(orderBy);
582
+ const orderByStatement = `ORDER BY [${field}] ${direction}`;
639
583
  const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId, resourceId`;
640
- const orderByStatement2 = `ORDER BY [seq_id] DESC`;
641
- let messages2 = [];
642
- if (selectBy2?.include?.length) {
643
- const includeMessages = await this._getIncludedMessages({ threadId: threadId2, selectBy: selectBy2, orderByStatement: orderByStatement2 });
644
- if (includeMessages) messages2.push(...includeMessages);
645
- }
646
- const perPage = perPageInput2 !== void 0 ? perPageInput2 : resolveMessageLimit({ last: selectBy2?.last, defaultLimit: 40 });
647
- const currentOffset = page2 * perPage;
584
+ const tableName = getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) });
648
585
  const conditions = ["[thread_id] = @threadId"];
649
586
  const request = this.pool.request();
650
- request.input("threadId", threadId2);
651
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
587
+ request.input("threadId", threadId);
588
+ if (resourceId) {
589
+ conditions.push("[resourceId] = @resourceId");
590
+ request.input("resourceId", resourceId);
591
+ }
592
+ if (filter?.dateRange?.start) {
652
593
  conditions.push("[createdAt] >= @fromDate");
653
- request.input("fromDate", fromDate.toISOString());
594
+ request.input("fromDate", filter.dateRange.start);
654
595
  }
655
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
596
+ if (filter?.dateRange?.end) {
656
597
  conditions.push("[createdAt] <= @toDate");
657
- request.input("toDate", toDate.toISOString());
598
+ request.input("toDate", filter.dateRange.end);
658
599
  }
659
600
  const whereClause = `WHERE ${conditions.join(" AND ")}`;
660
- const countQuery = `SELECT COUNT(*) as total FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })} ${whereClause}`;
601
+ const countQuery = `SELECT COUNT(*) as total FROM ${tableName} ${whereClause}`;
661
602
  const countResult = await request.query(countQuery);
662
603
  const total = parseInt(countResult.recordset[0]?.total, 10) || 0;
663
- if (total === 0 && messages2.length > 0) {
664
- const parsedIncluded = this._parseAndFormatMessages(messages2, format);
604
+ const limitValue = perPageInput === false ? total : perPage;
605
+ const dataQuery = `${selectStatement} FROM ${tableName} ${whereClause} ${orderByStatement} OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
606
+ request.input("offset", offset);
607
+ if (limitValue > 2147483647) {
608
+ request.input("limit", sql2.BigInt, limitValue);
609
+ } else {
610
+ request.input("limit", limitValue);
611
+ }
612
+ const rowsResult = await request.query(dataQuery);
613
+ const rows = rowsResult.recordset || [];
614
+ const messages = [...rows];
615
+ if (total === 0 && messages.length === 0 && (!include || include.length === 0)) {
665
616
  return {
666
- messages: parsedIncluded,
667
- total: parsedIncluded.length,
668
- page: page2,
669
- perPage,
617
+ messages: [],
618
+ total: 0,
619
+ page,
620
+ perPage: perPageForResponse,
670
621
  hasMore: false
671
622
  };
672
623
  }
673
- const excludeIds = messages2.map((m) => m.id);
674
- if (excludeIds.length > 0) {
675
- const excludeParams = excludeIds.map((_, idx) => `@id${idx}`);
676
- conditions.push(`id NOT IN (${excludeParams.join(", ")})`);
677
- excludeIds.forEach((id, idx) => request.input(`id${idx}`, id));
624
+ const messageIds = new Set(messages.map((m) => m.id));
625
+ if (include && include.length > 0) {
626
+ const selectBy = { include };
627
+ const includeMessages = await this._getIncludedMessages({ threadId, selectBy });
628
+ if (includeMessages) {
629
+ for (const includeMsg of includeMessages) {
630
+ if (!messageIds.has(includeMsg.id)) {
631
+ messages.push(includeMsg);
632
+ messageIds.add(includeMsg.id);
633
+ }
634
+ }
635
+ }
678
636
  }
679
- const finalWhereClause = `WHERE ${conditions.join(" AND ")}`;
680
- const dataQuery = `${selectStatement} FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })} ${finalWhereClause} ${orderByStatement2} OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
681
- request.input("offset", currentOffset);
682
- request.input("limit", perPage);
683
- const rowsResult = await request.query(dataQuery);
684
- const rows = rowsResult.recordset || [];
685
- rows.sort((a, b) => a.seq_id - b.seq_id);
686
- messages2.push(...rows);
687
- const parsed = this._parseAndFormatMessages(messages2, format);
637
+ const parsed = this._parseAndFormatMessages(messages, "v2");
638
+ let finalMessages = parsed;
639
+ finalMessages = finalMessages.sort((a, b) => {
640
+ const aValue = field === "createdAt" ? new Date(a.createdAt).getTime() : a[field];
641
+ const bValue = field === "createdAt" ? new Date(b.createdAt).getTime() : b[field];
642
+ return direction === "ASC" ? aValue - bValue : bValue - aValue;
643
+ });
644
+ const returnedThreadMessageIds = new Set(finalMessages.filter((m) => m.threadId === threadId).map((m) => m.id));
645
+ const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
646
+ const hasMore = perPageInput !== false && !allThreadMessagesReturned && offset + perPage < total;
688
647
  return {
689
- messages: parsed,
690
- total: total + excludeIds.length,
691
- page: page2,
692
- perPage,
693
- hasMore: currentOffset + rows.length < total
648
+ messages: finalMessages,
649
+ total,
650
+ page,
651
+ perPage: perPageForResponse,
652
+ hasMore
694
653
  };
695
654
  } catch (error) {
696
655
  const mastraError = new MastraError(
697
656
  {
698
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_MESSAGES_PAGINATED_FAILED",
657
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_MESSAGES_FAILED",
699
658
  domain: ErrorDomain.STORAGE,
700
659
  category: ErrorCategory.THIRD_PARTY,
701
660
  details: {
702
661
  threadId,
703
- page
662
+ resourceId: resourceId ?? ""
704
663
  }
705
664
  },
706
665
  error
707
666
  );
708
667
  this.logger?.error?.(mastraError.toString());
709
- this.logger?.trackException(mastraError);
710
- return { messages: [], total: 0, page, perPage: perPageInput || 40, hasMore: false };
668
+ this.logger?.trackException?.(mastraError);
669
+ return {
670
+ messages: [],
671
+ total: 0,
672
+ page,
673
+ perPage: perPageForResponse,
674
+ hasMore: false
675
+ };
711
676
  }
712
677
  }
713
- async saveMessages({
714
- messages,
715
- format
716
- }) {
717
- if (messages.length === 0) return messages;
678
+ async saveMessages({ messages }) {
679
+ if (messages.length === 0) return { messages: [] };
718
680
  const threadId = messages[0]?.threadId;
719
681
  if (!threadId) {
720
682
  throw new MastraError({
@@ -796,8 +758,7 @@ var MemoryMSSQL = class extends MemoryStorage {
796
758
  return message;
797
759
  });
798
760
  const list = new MessageList().add(messagesWithParsedContent, "memory");
799
- if (format === "v2") return list.get.all.v2();
800
- return list.get.all.v1();
761
+ return { messages: list.get.all.db() };
801
762
  } catch (error) {
802
763
  throw new MastraError(
803
764
  {
@@ -973,8 +934,10 @@ var MemoryMSSQL = class extends MemoryStorage {
973
934
  return null;
974
935
  }
975
936
  return {
976
- ...result,
977
- workingMemory: typeof result.workingMemory === "object" ? JSON.stringify(result.workingMemory) : result.workingMemory,
937
+ id: result.id,
938
+ createdAt: result.createdAt,
939
+ updatedAt: result.updatedAt,
940
+ workingMemory: result.workingMemory,
978
941
  metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
979
942
  };
980
943
  } catch (error) {
@@ -988,7 +951,7 @@ var MemoryMSSQL = class extends MemoryStorage {
988
951
  error
989
952
  );
990
953
  this.logger?.error?.(mastraError.toString());
991
- this.logger?.trackException(mastraError);
954
+ this.logger?.trackException?.(mastraError);
992
955
  throw mastraError;
993
956
  }
994
957
  }
@@ -997,7 +960,7 @@ var MemoryMSSQL = class extends MemoryStorage {
997
960
  tableName: TABLE_RESOURCES,
998
961
  record: {
999
962
  ...resource,
1000
- metadata: JSON.stringify(resource.metadata)
963
+ metadata: resource.metadata
1001
964
  }
1002
965
  });
1003
966
  return resource;
@@ -1055,138 +1018,463 @@ var MemoryMSSQL = class extends MemoryStorage {
1055
1018
  error
1056
1019
  );
1057
1020
  this.logger?.error?.(mastraError.toString());
1058
- this.logger?.trackException(mastraError);
1021
+ this.logger?.trackException?.(mastraError);
1059
1022
  throw mastraError;
1060
1023
  }
1061
1024
  }
1062
1025
  };
1063
- var StoreOperationsMSSQL = class extends StoreOperations {
1026
+ var ObservabilityMSSQL = class extends ObservabilityStorage {
1064
1027
  pool;
1065
- schemaName;
1066
- setupSchemaPromise = null;
1067
- schemaSetupComplete = void 0;
1068
- getSqlType(type, isPrimaryKey = false) {
1069
- switch (type) {
1070
- case "text":
1071
- return isPrimaryKey ? "NVARCHAR(255)" : "NVARCHAR(MAX)";
1072
- case "timestamp":
1073
- return "DATETIME2(7)";
1074
- case "uuid":
1075
- return "UNIQUEIDENTIFIER";
1076
- case "jsonb":
1077
- return "NVARCHAR(MAX)";
1078
- case "integer":
1079
- return "INT";
1080
- case "bigint":
1081
- return "BIGINT";
1082
- case "float":
1083
- return "FLOAT";
1084
- default:
1085
- throw new MastraError({
1086
- id: "MASTRA_STORAGE_MSSQL_STORE_TYPE_NOT_SUPPORTED",
1087
- domain: ErrorDomain.STORAGE,
1088
- category: ErrorCategory.THIRD_PARTY
1089
- });
1090
- }
1091
- }
1092
- constructor({ pool, schemaName }) {
1028
+ operations;
1029
+ schema;
1030
+ constructor({
1031
+ pool,
1032
+ operations,
1033
+ schema
1034
+ }) {
1093
1035
  super();
1094
1036
  this.pool = pool;
1095
- this.schemaName = schemaName;
1096
- }
1097
- async hasColumn(table, column) {
1098
- const schema = this.schemaName || "dbo";
1099
- const request = this.pool.request();
1100
- request.input("schema", schema);
1101
- request.input("table", table);
1102
- request.input("column", column);
1103
- request.input("columnLower", column.toLowerCase());
1104
- const result = await request.query(
1105
- `SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @table AND (COLUMN_NAME = @column OR COLUMN_NAME = @columnLower)`
1106
- );
1107
- return result.recordset.length > 0;
1037
+ this.operations = operations;
1038
+ this.schema = schema;
1108
1039
  }
1109
- async setupSchema() {
1110
- if (!this.schemaName || this.schemaSetupComplete) {
1111
- return;
1112
- }
1113
- if (!this.setupSchemaPromise) {
1114
- this.setupSchemaPromise = (async () => {
1115
- try {
1116
- const checkRequest = this.pool.request();
1117
- checkRequest.input("schemaName", this.schemaName);
1118
- const checkResult = await checkRequest.query(`
1119
- SELECT 1 AS found FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = @schemaName
1120
- `);
1121
- const schemaExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
1122
- if (!schemaExists) {
1123
- try {
1124
- await this.pool.request().query(`CREATE SCHEMA [${this.schemaName}]`);
1125
- this.logger?.info?.(`Schema "${this.schemaName}" created successfully`);
1126
- } catch (error) {
1127
- this.logger?.error?.(`Failed to create schema "${this.schemaName}"`, { error });
1128
- throw new Error(
1129
- `Unable to create schema "${this.schemaName}". This requires CREATE privilege on the database. Either create the schema manually or grant CREATE privilege to the user.`
1130
- );
1131
- }
1132
- }
1133
- this.schemaSetupComplete = true;
1134
- this.logger?.debug?.(`Schema "${this.schemaName}" is ready for use`);
1135
- } catch (error) {
1136
- this.schemaSetupComplete = void 0;
1137
- this.setupSchemaPromise = null;
1138
- throw error;
1139
- } finally {
1140
- this.setupSchemaPromise = null;
1141
- }
1142
- })();
1143
- }
1144
- await this.setupSchemaPromise;
1040
+ get aiTracingStrategy() {
1041
+ return {
1042
+ preferred: "batch-with-updates",
1043
+ supported: ["batch-with-updates", "insert-only"]
1044
+ };
1145
1045
  }
1146
- async insert({ tableName, record }) {
1046
+ async createAISpan(span) {
1147
1047
  try {
1148
- const columns = Object.keys(record).map((col) => parseSqlIdentifier(col, "column name"));
1149
- const values = Object.values(record);
1150
- const paramNames = values.map((_, i) => `@param${i}`);
1151
- const insertSql = `INSERT INTO ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} (${columns.map((c) => `[${c}]`).join(", ")}) VALUES (${paramNames.join(", ")})`;
1152
- const request = this.pool.request();
1153
- values.forEach((value, i) => {
1154
- if (value instanceof Date) {
1155
- request.input(`param${i}`, sql2.DateTime2, value);
1156
- } else if (typeof value === "object" && value !== null) {
1157
- request.input(`param${i}`, JSON.stringify(value));
1158
- } else {
1159
- request.input(`param${i}`, value);
1160
- }
1161
- });
1162
- await request.query(insertSql);
1048
+ const startedAt = span.startedAt instanceof Date ? span.startedAt.toISOString() : span.startedAt;
1049
+ const endedAt = span.endedAt instanceof Date ? span.endedAt.toISOString() : span.endedAt;
1050
+ const record = {
1051
+ ...span,
1052
+ startedAt,
1053
+ endedAt
1054
+ // Note: createdAt/updatedAt will be set by default values
1055
+ };
1056
+ return this.operations.insert({ tableName: TABLE_AI_SPANS, record });
1163
1057
  } catch (error) {
1164
1058
  throw new MastraError(
1165
1059
  {
1166
- id: "MASTRA_STORAGE_MSSQL_STORE_INSERT_FAILED",
1060
+ id: "MSSQL_STORE_CREATE_AI_SPAN_FAILED",
1167
1061
  domain: ErrorDomain.STORAGE,
1168
- category: ErrorCategory.THIRD_PARTY,
1062
+ category: ErrorCategory.USER,
1169
1063
  details: {
1170
- tableName
1064
+ spanId: span.spanId,
1065
+ traceId: span.traceId,
1066
+ spanType: span.spanType,
1067
+ spanName: span.name
1171
1068
  }
1172
1069
  },
1173
1070
  error
1174
1071
  );
1175
1072
  }
1176
1073
  }
1177
- async clearTable({ tableName }) {
1178
- const fullTableName = getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) });
1074
+ async getAITrace(traceId) {
1179
1075
  try {
1180
- try {
1181
- await this.pool.request().query(`TRUNCATE TABLE ${fullTableName}`);
1182
- } catch (truncateError) {
1183
- if (truncateError.message && truncateError.message.includes("foreign key")) {
1184
- await this.pool.request().query(`DELETE FROM ${fullTableName}`);
1185
- } else {
1186
- throw truncateError;
1187
- }
1188
- }
1189
- } catch (error) {
1076
+ const tableName = getTableName({
1077
+ indexName: TABLE_AI_SPANS,
1078
+ schemaName: getSchemaName(this.schema)
1079
+ });
1080
+ const request = this.pool.request();
1081
+ request.input("traceId", traceId);
1082
+ const result = await request.query(
1083
+ `SELECT
1084
+ [traceId], [spanId], [parentSpanId], [name], [scope], [spanType],
1085
+ [attributes], [metadata], [links], [input], [output], [error], [isEvent],
1086
+ [startedAt], [endedAt], [createdAt], [updatedAt]
1087
+ FROM ${tableName}
1088
+ WHERE [traceId] = @traceId
1089
+ ORDER BY [startedAt] DESC`
1090
+ );
1091
+ if (!result.recordset || result.recordset.length === 0) {
1092
+ return null;
1093
+ }
1094
+ return {
1095
+ traceId,
1096
+ spans: result.recordset.map(
1097
+ (span) => transformFromSqlRow({
1098
+ tableName: TABLE_AI_SPANS,
1099
+ sqlRow: span
1100
+ })
1101
+ )
1102
+ };
1103
+ } catch (error) {
1104
+ throw new MastraError(
1105
+ {
1106
+ id: "MSSQL_STORE_GET_AI_TRACE_FAILED",
1107
+ domain: ErrorDomain.STORAGE,
1108
+ category: ErrorCategory.USER,
1109
+ details: {
1110
+ traceId
1111
+ }
1112
+ },
1113
+ error
1114
+ );
1115
+ }
1116
+ }
1117
+ async updateAISpan({
1118
+ spanId,
1119
+ traceId,
1120
+ updates
1121
+ }) {
1122
+ try {
1123
+ const data = { ...updates };
1124
+ if (data.endedAt instanceof Date) {
1125
+ data.endedAt = data.endedAt.toISOString();
1126
+ }
1127
+ if (data.startedAt instanceof Date) {
1128
+ data.startedAt = data.startedAt.toISOString();
1129
+ }
1130
+ await this.operations.update({
1131
+ tableName: TABLE_AI_SPANS,
1132
+ keys: { spanId, traceId },
1133
+ data
1134
+ });
1135
+ } catch (error) {
1136
+ throw new MastraError(
1137
+ {
1138
+ id: "MSSQL_STORE_UPDATE_AI_SPAN_FAILED",
1139
+ domain: ErrorDomain.STORAGE,
1140
+ category: ErrorCategory.USER,
1141
+ details: {
1142
+ spanId,
1143
+ traceId
1144
+ }
1145
+ },
1146
+ error
1147
+ );
1148
+ }
1149
+ }
1150
+ async getAITracesPaginated({
1151
+ filters,
1152
+ pagination
1153
+ }) {
1154
+ const page = pagination?.page ?? 0;
1155
+ const perPage = pagination?.perPage ?? 10;
1156
+ const { entityId, entityType, ...actualFilters } = filters || {};
1157
+ const filtersWithDateRange = {
1158
+ ...actualFilters,
1159
+ ...buildDateRangeFilter(pagination?.dateRange, "startedAt"),
1160
+ parentSpanId: null
1161
+ // Only get root spans for traces
1162
+ };
1163
+ const whereClause = prepareWhereClause(filtersWithDateRange);
1164
+ let actualWhereClause = whereClause.sql;
1165
+ const params = { ...whereClause.params };
1166
+ let currentParamIndex = Object.keys(params).length + 1;
1167
+ if (entityId && entityType) {
1168
+ let name = "";
1169
+ if (entityType === "workflow") {
1170
+ name = `workflow run: '${entityId}'`;
1171
+ } else if (entityType === "agent") {
1172
+ name = `agent run: '${entityId}'`;
1173
+ } else {
1174
+ const error = new MastraError({
1175
+ id: "MSSQL_STORE_GET_AI_TRACES_PAGINATED_FAILED",
1176
+ domain: ErrorDomain.STORAGE,
1177
+ category: ErrorCategory.USER,
1178
+ details: {
1179
+ entityType
1180
+ },
1181
+ text: `Cannot filter by entity type: ${entityType}`
1182
+ });
1183
+ throw error;
1184
+ }
1185
+ const entityParam = `p${currentParamIndex++}`;
1186
+ if (actualWhereClause) {
1187
+ actualWhereClause += ` AND [name] = @${entityParam}`;
1188
+ } else {
1189
+ actualWhereClause = ` WHERE [name] = @${entityParam}`;
1190
+ }
1191
+ params[entityParam] = name;
1192
+ }
1193
+ const tableName = getTableName({
1194
+ indexName: TABLE_AI_SPANS,
1195
+ schemaName: getSchemaName(this.schema)
1196
+ });
1197
+ try {
1198
+ const countRequest = this.pool.request();
1199
+ Object.entries(params).forEach(([key, value]) => {
1200
+ countRequest.input(key, value);
1201
+ });
1202
+ const countResult = await countRequest.query(
1203
+ `SELECT COUNT(*) as count FROM ${tableName}${actualWhereClause}`
1204
+ );
1205
+ const total = countResult.recordset[0]?.count ?? 0;
1206
+ if (total === 0) {
1207
+ return {
1208
+ pagination: {
1209
+ total: 0,
1210
+ page,
1211
+ perPage,
1212
+ hasMore: false
1213
+ },
1214
+ spans: []
1215
+ };
1216
+ }
1217
+ const dataRequest = this.pool.request();
1218
+ Object.entries(params).forEach(([key, value]) => {
1219
+ dataRequest.input(key, value);
1220
+ });
1221
+ dataRequest.input("offset", page * perPage);
1222
+ dataRequest.input("limit", perPage);
1223
+ const dataResult = await dataRequest.query(
1224
+ `SELECT * FROM ${tableName}${actualWhereClause} ORDER BY [startedAt] DESC OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`
1225
+ );
1226
+ const spans = dataResult.recordset.map(
1227
+ (row) => transformFromSqlRow({
1228
+ tableName: TABLE_AI_SPANS,
1229
+ sqlRow: row
1230
+ })
1231
+ );
1232
+ return {
1233
+ pagination: {
1234
+ total,
1235
+ page,
1236
+ perPage,
1237
+ hasMore: (page + 1) * perPage < total
1238
+ },
1239
+ spans
1240
+ };
1241
+ } catch (error) {
1242
+ throw new MastraError(
1243
+ {
1244
+ id: "MSSQL_STORE_GET_AI_TRACES_PAGINATED_FAILED",
1245
+ domain: ErrorDomain.STORAGE,
1246
+ category: ErrorCategory.USER
1247
+ },
1248
+ error
1249
+ );
1250
+ }
1251
+ }
1252
+ async batchCreateAISpans(args) {
1253
+ if (!args.records || args.records.length === 0) {
1254
+ return;
1255
+ }
1256
+ try {
1257
+ await this.operations.batchInsert({
1258
+ tableName: TABLE_AI_SPANS,
1259
+ records: args.records.map((span) => ({
1260
+ ...span,
1261
+ startedAt: span.startedAt instanceof Date ? span.startedAt.toISOString() : span.startedAt,
1262
+ endedAt: span.endedAt instanceof Date ? span.endedAt.toISOString() : span.endedAt
1263
+ }))
1264
+ });
1265
+ } catch (error) {
1266
+ throw new MastraError(
1267
+ {
1268
+ id: "MSSQL_STORE_BATCH_CREATE_AI_SPANS_FAILED",
1269
+ domain: ErrorDomain.STORAGE,
1270
+ category: ErrorCategory.USER,
1271
+ details: {
1272
+ count: args.records.length
1273
+ }
1274
+ },
1275
+ error
1276
+ );
1277
+ }
1278
+ }
1279
+ async batchUpdateAISpans(args) {
1280
+ if (!args.records || args.records.length === 0) {
1281
+ return;
1282
+ }
1283
+ try {
1284
+ const updates = args.records.map(({ traceId, spanId, updates: data }) => {
1285
+ const processedData = { ...data };
1286
+ if (processedData.endedAt instanceof Date) {
1287
+ processedData.endedAt = processedData.endedAt.toISOString();
1288
+ }
1289
+ if (processedData.startedAt instanceof Date) {
1290
+ processedData.startedAt = processedData.startedAt.toISOString();
1291
+ }
1292
+ return {
1293
+ keys: { spanId, traceId },
1294
+ data: processedData
1295
+ };
1296
+ });
1297
+ await this.operations.batchUpdate({
1298
+ tableName: TABLE_AI_SPANS,
1299
+ updates
1300
+ });
1301
+ } catch (error) {
1302
+ throw new MastraError(
1303
+ {
1304
+ id: "MSSQL_STORE_BATCH_UPDATE_AI_SPANS_FAILED",
1305
+ domain: ErrorDomain.STORAGE,
1306
+ category: ErrorCategory.USER,
1307
+ details: {
1308
+ count: args.records.length
1309
+ }
1310
+ },
1311
+ error
1312
+ );
1313
+ }
1314
+ }
1315
+ async batchDeleteAITraces(args) {
1316
+ if (!args.traceIds || args.traceIds.length === 0) {
1317
+ return;
1318
+ }
1319
+ try {
1320
+ const keys = args.traceIds.map((traceId) => ({ traceId }));
1321
+ await this.operations.batchDelete({
1322
+ tableName: TABLE_AI_SPANS,
1323
+ keys
1324
+ });
1325
+ } catch (error) {
1326
+ throw new MastraError(
1327
+ {
1328
+ id: "MSSQL_STORE_BATCH_DELETE_AI_TRACES_FAILED",
1329
+ domain: ErrorDomain.STORAGE,
1330
+ category: ErrorCategory.USER,
1331
+ details: {
1332
+ count: args.traceIds.length
1333
+ }
1334
+ },
1335
+ error
1336
+ );
1337
+ }
1338
+ }
1339
+ };
1340
+ var StoreOperationsMSSQL = class extends StoreOperations {
1341
+ pool;
1342
+ schemaName;
1343
+ setupSchemaPromise = null;
1344
+ schemaSetupComplete = void 0;
1345
+ getSqlType(type, isPrimaryKey = false, useLargeStorage = false) {
1346
+ switch (type) {
1347
+ case "text":
1348
+ if (useLargeStorage) {
1349
+ return "NVARCHAR(MAX)";
1350
+ }
1351
+ return isPrimaryKey ? "NVARCHAR(255)" : "NVARCHAR(400)";
1352
+ case "timestamp":
1353
+ return "DATETIME2(7)";
1354
+ case "uuid":
1355
+ return "UNIQUEIDENTIFIER";
1356
+ case "jsonb":
1357
+ return "NVARCHAR(MAX)";
1358
+ case "integer":
1359
+ return "INT";
1360
+ case "bigint":
1361
+ return "BIGINT";
1362
+ case "float":
1363
+ return "FLOAT";
1364
+ case "boolean":
1365
+ return "BIT";
1366
+ default:
1367
+ throw new MastraError({
1368
+ id: "MASTRA_STORAGE_MSSQL_STORE_TYPE_NOT_SUPPORTED",
1369
+ domain: ErrorDomain.STORAGE,
1370
+ category: ErrorCategory.THIRD_PARTY
1371
+ });
1372
+ }
1373
+ }
1374
+ constructor({ pool, schemaName }) {
1375
+ super();
1376
+ this.pool = pool;
1377
+ this.schemaName = schemaName;
1378
+ }
1379
+ async hasColumn(table, column) {
1380
+ const schema = this.schemaName || "dbo";
1381
+ const request = this.pool.request();
1382
+ request.input("schema", schema);
1383
+ request.input("table", table);
1384
+ request.input("column", column);
1385
+ request.input("columnLower", column.toLowerCase());
1386
+ const result = await request.query(
1387
+ `SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @table AND (COLUMN_NAME = @column OR COLUMN_NAME = @columnLower)`
1388
+ );
1389
+ return result.recordset.length > 0;
1390
+ }
1391
+ async setupSchema() {
1392
+ if (!this.schemaName || this.schemaSetupComplete) {
1393
+ return;
1394
+ }
1395
+ if (!this.setupSchemaPromise) {
1396
+ this.setupSchemaPromise = (async () => {
1397
+ try {
1398
+ const checkRequest = this.pool.request();
1399
+ checkRequest.input("schemaName", this.schemaName);
1400
+ const checkResult = await checkRequest.query(`
1401
+ SELECT 1 AS found FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = @schemaName
1402
+ `);
1403
+ const schemaExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
1404
+ if (!schemaExists) {
1405
+ try {
1406
+ await this.pool.request().query(`CREATE SCHEMA [${this.schemaName}]`);
1407
+ this.logger?.info?.(`Schema "${this.schemaName}" created successfully`);
1408
+ } catch (error) {
1409
+ this.logger?.error?.(`Failed to create schema "${this.schemaName}"`, { error });
1410
+ throw new Error(
1411
+ `Unable to create schema "${this.schemaName}". This requires CREATE privilege on the database. Either create the schema manually or grant CREATE privilege to the user.`
1412
+ );
1413
+ }
1414
+ }
1415
+ this.schemaSetupComplete = true;
1416
+ this.logger?.debug?.(`Schema "${this.schemaName}" is ready for use`);
1417
+ } catch (error) {
1418
+ this.schemaSetupComplete = void 0;
1419
+ this.setupSchemaPromise = null;
1420
+ throw error;
1421
+ } finally {
1422
+ this.setupSchemaPromise = null;
1423
+ }
1424
+ })();
1425
+ }
1426
+ await this.setupSchemaPromise;
1427
+ }
1428
+ async insert({
1429
+ tableName,
1430
+ record,
1431
+ transaction
1432
+ }) {
1433
+ try {
1434
+ const columns = Object.keys(record);
1435
+ const parsedColumns = columns.map((col) => parseSqlIdentifier(col, "column name"));
1436
+ const paramNames = columns.map((_, i) => `@param${i}`);
1437
+ const insertSql = `INSERT INTO ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} (${parsedColumns.map((c) => `[${c}]`).join(", ")}) VALUES (${paramNames.join(", ")})`;
1438
+ const request = transaction ? transaction.request() : this.pool.request();
1439
+ columns.forEach((col, i) => {
1440
+ const value = record[col];
1441
+ const preparedValue = this.prepareValue(value, col, tableName);
1442
+ if (preparedValue instanceof Date) {
1443
+ request.input(`param${i}`, sql2.DateTime2, preparedValue);
1444
+ } else if (preparedValue === null || preparedValue === void 0) {
1445
+ request.input(`param${i}`, this.getMssqlType(tableName, col), null);
1446
+ } else {
1447
+ request.input(`param${i}`, preparedValue);
1448
+ }
1449
+ });
1450
+ await request.query(insertSql);
1451
+ } catch (error) {
1452
+ throw new MastraError(
1453
+ {
1454
+ id: "MASTRA_STORAGE_MSSQL_STORE_INSERT_FAILED",
1455
+ domain: ErrorDomain.STORAGE,
1456
+ category: ErrorCategory.THIRD_PARTY,
1457
+ details: {
1458
+ tableName
1459
+ }
1460
+ },
1461
+ error
1462
+ );
1463
+ }
1464
+ }
1465
+ async clearTable({ tableName }) {
1466
+ const fullTableName = getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) });
1467
+ try {
1468
+ try {
1469
+ await this.pool.request().query(`TRUNCATE TABLE ${fullTableName}`);
1470
+ } catch (truncateError) {
1471
+ if (truncateError?.number === 4712) {
1472
+ await this.pool.request().query(`DELETE FROM ${fullTableName}`);
1473
+ } else {
1474
+ throw truncateError;
1475
+ }
1476
+ }
1477
+ } catch (error) {
1190
1478
  throw new MastraError(
1191
1479
  {
1192
1480
  id: "MASTRA_STORAGE_MSSQL_STORE_CLEAR_TABLE_FAILED",
@@ -1203,9 +1491,11 @@ var StoreOperationsMSSQL = class extends StoreOperations {
1203
1491
  getDefaultValue(type) {
1204
1492
  switch (type) {
1205
1493
  case "timestamp":
1206
- return "DEFAULT SYSDATETIMEOFFSET()";
1494
+ return "DEFAULT SYSUTCDATETIME()";
1207
1495
  case "jsonb":
1208
1496
  return "DEFAULT N'{}'";
1497
+ case "boolean":
1498
+ return "DEFAULT 0";
1209
1499
  default:
1210
1500
  return super.getDefaultValue(type);
1211
1501
  }
@@ -1216,13 +1506,29 @@ var StoreOperationsMSSQL = class extends StoreOperations {
1216
1506
  }) {
1217
1507
  try {
1218
1508
  const uniqueConstraintColumns = tableName === TABLE_WORKFLOW_SNAPSHOT ? ["workflow_name", "run_id"] : [];
1509
+ const largeDataColumns = [
1510
+ "workingMemory",
1511
+ "snapshot",
1512
+ "metadata",
1513
+ "content",
1514
+ // messages.content - can be very long conversation content
1515
+ "input",
1516
+ // evals.input - test input data
1517
+ "output",
1518
+ // evals.output - test output data
1519
+ "instructions",
1520
+ // evals.instructions - evaluation instructions
1521
+ "other"
1522
+ // traces.other - additional trace data
1523
+ ];
1219
1524
  const columns = Object.entries(schema).map(([name, def]) => {
1220
1525
  const parsedName = parseSqlIdentifier(name, "column name");
1221
1526
  const constraints = [];
1222
1527
  if (def.primaryKey) constraints.push("PRIMARY KEY");
1223
1528
  if (!def.nullable) constraints.push("NOT NULL");
1224
1529
  const isIndexed = !!def.primaryKey || uniqueConstraintColumns.includes(name);
1225
- return `[${parsedName}] ${this.getSqlType(def.type, isIndexed)} ${constraints.join(" ")}`.trim();
1530
+ const useLargeStorage = largeDataColumns.includes(name);
1531
+ return `[${parsedName}] ${this.getSqlType(def.type, isIndexed, useLargeStorage)} ${constraints.join(" ")}`.trim();
1226
1532
  }).join(",\n");
1227
1533
  if (this.schemaName) {
1228
1534
  await this.setupSchema();
@@ -1309,7 +1615,19 @@ ${columns}
1309
1615
  const columnExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
1310
1616
  if (!columnExists) {
1311
1617
  const columnDef = schema[columnName];
1312
- const sqlType = this.getSqlType(columnDef.type);
1618
+ const largeDataColumns = [
1619
+ "workingMemory",
1620
+ "snapshot",
1621
+ "metadata",
1622
+ "content",
1623
+ "input",
1624
+ "output",
1625
+ "instructions",
1626
+ "other"
1627
+ ];
1628
+ const useLargeStorage = largeDataColumns.includes(columnName);
1629
+ const isIndexed = !!columnDef.primaryKey;
1630
+ const sqlType = this.getSqlType(columnDef.type, isIndexed, useLargeStorage);
1313
1631
  const nullable = columnDef.nullable === false ? "NOT NULL" : "";
1314
1632
  const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : "";
1315
1633
  const parsedColumnName = parseSqlIdentifier(columnName, "column name");
@@ -1337,13 +1655,17 @@ ${columns}
1337
1655
  try {
1338
1656
  const keyEntries = Object.entries(keys).map(([key, value]) => [parseSqlIdentifier(key, "column name"), value]);
1339
1657
  const conditions = keyEntries.map(([key], i) => `[${key}] = @param${i}`).join(" AND ");
1340
- const values = keyEntries.map(([_, value]) => value);
1341
- const sql7 = `SELECT * FROM ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} WHERE ${conditions}`;
1658
+ const sql5 = `SELECT * FROM ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} WHERE ${conditions}`;
1342
1659
  const request = this.pool.request();
1343
- values.forEach((value, i) => {
1344
- request.input(`param${i}`, value);
1660
+ keyEntries.forEach(([key, value], i) => {
1661
+ const preparedValue = this.prepareValue(value, key, tableName);
1662
+ if (preparedValue === null || preparedValue === void 0) {
1663
+ request.input(`param${i}`, this.getMssqlType(tableName, key), null);
1664
+ } else {
1665
+ request.input(`param${i}`, preparedValue);
1666
+ }
1345
1667
  });
1346
- const resultSet = await request.query(sql7);
1668
+ const resultSet = await request.query(sql5);
1347
1669
  const result = resultSet.recordset[0] || null;
1348
1670
  if (!result) {
1349
1671
  return null;
@@ -1375,63 +1697,599 @@ ${columns}
1375
1697
  try {
1376
1698
  await transaction.begin();
1377
1699
  for (const record of records) {
1378
- await this.insert({ tableName, record });
1700
+ await this.insert({ tableName, record, transaction });
1701
+ }
1702
+ await transaction.commit();
1703
+ } catch (error) {
1704
+ await transaction.rollback();
1705
+ throw new MastraError(
1706
+ {
1707
+ id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_INSERT_FAILED",
1708
+ domain: ErrorDomain.STORAGE,
1709
+ category: ErrorCategory.THIRD_PARTY,
1710
+ details: {
1711
+ tableName,
1712
+ numberOfRecords: records.length
1713
+ }
1714
+ },
1715
+ error
1716
+ );
1717
+ }
1718
+ }
1719
+ async dropTable({ tableName }) {
1720
+ try {
1721
+ const tableNameWithSchema = getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) });
1722
+ await this.pool.request().query(`DROP TABLE IF EXISTS ${tableNameWithSchema}`);
1723
+ } catch (error) {
1724
+ throw new MastraError(
1725
+ {
1726
+ id: "MASTRA_STORAGE_MSSQL_STORE_DROP_TABLE_FAILED",
1727
+ domain: ErrorDomain.STORAGE,
1728
+ category: ErrorCategory.THIRD_PARTY,
1729
+ details: {
1730
+ tableName
1731
+ }
1732
+ },
1733
+ error
1734
+ );
1735
+ }
1736
+ }
1737
+ /**
1738
+ * Prepares a value for database operations, handling Date objects and JSON serialization
1739
+ */
1740
+ prepareValue(value, columnName, tableName) {
1741
+ if (value === null || value === void 0) {
1742
+ return value;
1743
+ }
1744
+ if (value instanceof Date) {
1745
+ return value;
1746
+ }
1747
+ const schema = TABLE_SCHEMAS[tableName];
1748
+ const columnSchema = schema?.[columnName];
1749
+ if (columnSchema?.type === "boolean") {
1750
+ return value ? 1 : 0;
1751
+ }
1752
+ if (columnSchema?.type === "jsonb") {
1753
+ return JSON.stringify(value);
1754
+ }
1755
+ if (typeof value === "object") {
1756
+ return JSON.stringify(value);
1757
+ }
1758
+ return value;
1759
+ }
1760
+ /**
1761
+ * Maps TABLE_SCHEMAS types to mssql param types (used when value is null)
1762
+ */
1763
+ getMssqlType(tableName, columnName) {
1764
+ const col = TABLE_SCHEMAS[tableName]?.[columnName];
1765
+ switch (col?.type) {
1766
+ case "text":
1767
+ return sql2.NVarChar;
1768
+ case "timestamp":
1769
+ return sql2.DateTime2;
1770
+ case "uuid":
1771
+ return sql2.UniqueIdentifier;
1772
+ case "jsonb":
1773
+ return sql2.NVarChar;
1774
+ case "integer":
1775
+ return sql2.Int;
1776
+ case "bigint":
1777
+ return sql2.BigInt;
1778
+ case "float":
1779
+ return sql2.Float;
1780
+ case "boolean":
1781
+ return sql2.Bit;
1782
+ default:
1783
+ return sql2.NVarChar;
1784
+ }
1785
+ }
1786
+ /**
1787
+ * Update a single record in the database
1788
+ */
1789
+ async update({
1790
+ tableName,
1791
+ keys,
1792
+ data,
1793
+ transaction
1794
+ }) {
1795
+ try {
1796
+ if (!data || Object.keys(data).length === 0) {
1797
+ throw new MastraError({
1798
+ id: "MASTRA_STORAGE_MSSQL_UPDATE_EMPTY_DATA",
1799
+ domain: ErrorDomain.STORAGE,
1800
+ category: ErrorCategory.USER,
1801
+ text: "Cannot update with empty data payload"
1802
+ });
1803
+ }
1804
+ if (!keys || Object.keys(keys).length === 0) {
1805
+ throw new MastraError({
1806
+ id: "MASTRA_STORAGE_MSSQL_UPDATE_EMPTY_KEYS",
1807
+ domain: ErrorDomain.STORAGE,
1808
+ category: ErrorCategory.USER,
1809
+ text: "Cannot update without keys to identify records"
1810
+ });
1811
+ }
1812
+ const setClauses = [];
1813
+ const request = transaction ? transaction.request() : this.pool.request();
1814
+ let paramIndex = 0;
1815
+ Object.entries(data).forEach(([key, value]) => {
1816
+ const parsedKey = parseSqlIdentifier(key, "column name");
1817
+ const paramName = `set${paramIndex++}`;
1818
+ setClauses.push(`[${parsedKey}] = @${paramName}`);
1819
+ const preparedValue = this.prepareValue(value, key, tableName);
1820
+ if (preparedValue === null || preparedValue === void 0) {
1821
+ request.input(paramName, this.getMssqlType(tableName, key), null);
1822
+ } else {
1823
+ request.input(paramName, preparedValue);
1824
+ }
1825
+ });
1826
+ const whereConditions = [];
1827
+ Object.entries(keys).forEach(([key, value]) => {
1828
+ const parsedKey = parseSqlIdentifier(key, "column name");
1829
+ const paramName = `where${paramIndex++}`;
1830
+ whereConditions.push(`[${parsedKey}] = @${paramName}`);
1831
+ const preparedValue = this.prepareValue(value, key, tableName);
1832
+ if (preparedValue === null || preparedValue === void 0) {
1833
+ request.input(paramName, this.getMssqlType(tableName, key), null);
1834
+ } else {
1835
+ request.input(paramName, preparedValue);
1836
+ }
1837
+ });
1838
+ const tableName_ = getTableName({
1839
+ indexName: tableName,
1840
+ schemaName: getSchemaName(this.schemaName)
1841
+ });
1842
+ const updateSql = `UPDATE ${tableName_} SET ${setClauses.join(", ")} WHERE ${whereConditions.join(" AND ")}`;
1843
+ await request.query(updateSql);
1844
+ } catch (error) {
1845
+ throw new MastraError(
1846
+ {
1847
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_FAILED",
1848
+ domain: ErrorDomain.STORAGE,
1849
+ category: ErrorCategory.THIRD_PARTY,
1850
+ details: {
1851
+ tableName
1852
+ }
1853
+ },
1854
+ error
1855
+ );
1856
+ }
1857
+ }
1858
+ /**
1859
+ * Update multiple records in a single batch transaction
1860
+ */
1861
+ async batchUpdate({
1862
+ tableName,
1863
+ updates
1864
+ }) {
1865
+ const transaction = this.pool.transaction();
1866
+ try {
1867
+ await transaction.begin();
1868
+ for (const { keys, data } of updates) {
1869
+ await this.update({ tableName, keys, data, transaction });
1379
1870
  }
1380
1871
  await transaction.commit();
1381
1872
  } catch (error) {
1382
- await transaction.rollback();
1873
+ await transaction.rollback();
1874
+ throw new MastraError(
1875
+ {
1876
+ id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_UPDATE_FAILED",
1877
+ domain: ErrorDomain.STORAGE,
1878
+ category: ErrorCategory.THIRD_PARTY,
1879
+ details: {
1880
+ tableName,
1881
+ numberOfRecords: updates.length
1882
+ }
1883
+ },
1884
+ error
1885
+ );
1886
+ }
1887
+ }
1888
+ /**
1889
+ * Delete multiple records by keys
1890
+ */
1891
+ async batchDelete({ tableName, keys }) {
1892
+ if (keys.length === 0) {
1893
+ return;
1894
+ }
1895
+ const tableName_ = getTableName({
1896
+ indexName: tableName,
1897
+ schemaName: getSchemaName(this.schemaName)
1898
+ });
1899
+ const transaction = this.pool.transaction();
1900
+ try {
1901
+ await transaction.begin();
1902
+ for (const keySet of keys) {
1903
+ const conditions = [];
1904
+ const request = transaction.request();
1905
+ let paramIndex = 0;
1906
+ Object.entries(keySet).forEach(([key, value]) => {
1907
+ const parsedKey = parseSqlIdentifier(key, "column name");
1908
+ const paramName = `p${paramIndex++}`;
1909
+ conditions.push(`[${parsedKey}] = @${paramName}`);
1910
+ const preparedValue = this.prepareValue(value, key, tableName);
1911
+ if (preparedValue === null || preparedValue === void 0) {
1912
+ request.input(paramName, this.getMssqlType(tableName, key), null);
1913
+ } else {
1914
+ request.input(paramName, preparedValue);
1915
+ }
1916
+ });
1917
+ const deleteSql = `DELETE FROM ${tableName_} WHERE ${conditions.join(" AND ")}`;
1918
+ await request.query(deleteSql);
1919
+ }
1920
+ await transaction.commit();
1921
+ } catch (error) {
1922
+ await transaction.rollback();
1923
+ throw new MastraError(
1924
+ {
1925
+ id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_DELETE_FAILED",
1926
+ domain: ErrorDomain.STORAGE,
1927
+ category: ErrorCategory.THIRD_PARTY,
1928
+ details: {
1929
+ tableName,
1930
+ numberOfRecords: keys.length
1931
+ }
1932
+ },
1933
+ error
1934
+ );
1935
+ }
1936
+ }
1937
+ /**
1938
+ * Create a new index on a table
1939
+ */
1940
+ async createIndex(options) {
1941
+ try {
1942
+ const { name, table, columns, unique = false, where } = options;
1943
+ const schemaName = this.schemaName || "dbo";
1944
+ const fullTableName = getTableName({
1945
+ indexName: table,
1946
+ schemaName: getSchemaName(this.schemaName)
1947
+ });
1948
+ const indexNameSafe = parseSqlIdentifier(name, "index name");
1949
+ const checkRequest = this.pool.request();
1950
+ checkRequest.input("indexName", indexNameSafe);
1951
+ checkRequest.input("schemaName", schemaName);
1952
+ checkRequest.input("tableName", table);
1953
+ const indexExists = await checkRequest.query(`
1954
+ SELECT 1 as found
1955
+ FROM sys.indexes i
1956
+ INNER JOIN sys.tables t ON i.object_id = t.object_id
1957
+ INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
1958
+ WHERE i.name = @indexName
1959
+ AND s.name = @schemaName
1960
+ AND t.name = @tableName
1961
+ `);
1962
+ if (indexExists.recordset && indexExists.recordset.length > 0) {
1963
+ return;
1964
+ }
1965
+ const uniqueStr = unique ? "UNIQUE " : "";
1966
+ const columnsStr = columns.map((col) => {
1967
+ if (col.includes(" DESC") || col.includes(" ASC")) {
1968
+ const [colName, ...modifiers] = col.split(" ");
1969
+ if (!colName) {
1970
+ throw new Error(`Invalid column specification: ${col}`);
1971
+ }
1972
+ return `[${parseSqlIdentifier(colName, "column name")}] ${modifiers.join(" ")}`;
1973
+ }
1974
+ return `[${parseSqlIdentifier(col, "column name")}]`;
1975
+ }).join(", ");
1976
+ const whereStr = where ? ` WHERE ${where}` : "";
1977
+ const createIndexSql = `CREATE ${uniqueStr}INDEX [${indexNameSafe}] ON ${fullTableName} (${columnsStr})${whereStr}`;
1978
+ await this.pool.request().query(createIndexSql);
1979
+ } catch (error) {
1980
+ throw new MastraError(
1981
+ {
1982
+ id: "MASTRA_STORAGE_MSSQL_INDEX_CREATE_FAILED",
1983
+ domain: ErrorDomain.STORAGE,
1984
+ category: ErrorCategory.THIRD_PARTY,
1985
+ details: {
1986
+ indexName: options.name,
1987
+ tableName: options.table
1988
+ }
1989
+ },
1990
+ error
1991
+ );
1992
+ }
1993
+ }
1994
+ /**
1995
+ * Drop an existing index
1996
+ */
1997
+ async dropIndex(indexName) {
1998
+ try {
1999
+ const schemaName = this.schemaName || "dbo";
2000
+ const indexNameSafe = parseSqlIdentifier(indexName, "index name");
2001
+ const checkRequest = this.pool.request();
2002
+ checkRequest.input("indexName", indexNameSafe);
2003
+ checkRequest.input("schemaName", schemaName);
2004
+ const result = await checkRequest.query(`
2005
+ SELECT t.name as table_name
2006
+ FROM sys.indexes i
2007
+ INNER JOIN sys.tables t ON i.object_id = t.object_id
2008
+ INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
2009
+ WHERE i.name = @indexName
2010
+ AND s.name = @schemaName
2011
+ `);
2012
+ if (!result.recordset || result.recordset.length === 0) {
2013
+ return;
2014
+ }
2015
+ if (result.recordset.length > 1) {
2016
+ const tables = result.recordset.map((r) => r.table_name).join(", ");
2017
+ throw new MastraError({
2018
+ id: "MASTRA_STORAGE_MSSQL_INDEX_AMBIGUOUS",
2019
+ domain: ErrorDomain.STORAGE,
2020
+ category: ErrorCategory.USER,
2021
+ text: `Index "${indexNameSafe}" exists on multiple tables (${tables}) in schema "${schemaName}". Please drop indexes manually or ensure unique index names.`
2022
+ });
2023
+ }
2024
+ const tableName = result.recordset[0].table_name;
2025
+ const fullTableName = getTableName({
2026
+ indexName: tableName,
2027
+ schemaName: getSchemaName(this.schemaName)
2028
+ });
2029
+ const dropSql = `DROP INDEX [${indexNameSafe}] ON ${fullTableName}`;
2030
+ await this.pool.request().query(dropSql);
2031
+ } catch (error) {
2032
+ throw new MastraError(
2033
+ {
2034
+ id: "MASTRA_STORAGE_MSSQL_INDEX_DROP_FAILED",
2035
+ domain: ErrorDomain.STORAGE,
2036
+ category: ErrorCategory.THIRD_PARTY,
2037
+ details: {
2038
+ indexName
2039
+ }
2040
+ },
2041
+ error
2042
+ );
2043
+ }
2044
+ }
2045
+ /**
2046
+ * List indexes for a specific table or all tables
2047
+ */
2048
+ async listIndexes(tableName) {
2049
+ try {
2050
+ const schemaName = this.schemaName || "dbo";
2051
+ let query;
2052
+ const request = this.pool.request();
2053
+ request.input("schemaName", schemaName);
2054
+ if (tableName) {
2055
+ query = `
2056
+ SELECT
2057
+ i.name as name,
2058
+ o.name as [table],
2059
+ i.is_unique as is_unique,
2060
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size
2061
+ FROM sys.indexes i
2062
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2063
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
2064
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
2065
+ WHERE sch.name = @schemaName
2066
+ AND o.name = @tableName
2067
+ AND i.name IS NOT NULL
2068
+ GROUP BY i.name, o.name, i.is_unique
2069
+ `;
2070
+ request.input("tableName", tableName);
2071
+ } else {
2072
+ query = `
2073
+ SELECT
2074
+ i.name as name,
2075
+ o.name as [table],
2076
+ i.is_unique as is_unique,
2077
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size
2078
+ FROM sys.indexes i
2079
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2080
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
2081
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
2082
+ WHERE sch.name = @schemaName
2083
+ AND i.name IS NOT NULL
2084
+ GROUP BY i.name, o.name, i.is_unique
2085
+ `;
2086
+ }
2087
+ const result = await request.query(query);
2088
+ const indexes = [];
2089
+ for (const row of result.recordset) {
2090
+ const colRequest = this.pool.request();
2091
+ colRequest.input("indexName", row.name);
2092
+ colRequest.input("schemaName", schemaName);
2093
+ const colResult = await colRequest.query(`
2094
+ SELECT c.name as column_name
2095
+ FROM sys.indexes i
2096
+ INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
2097
+ INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
2098
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2099
+ INNER JOIN sys.schemas s ON o.schema_id = s.schema_id
2100
+ WHERE i.name = @indexName
2101
+ AND s.name = @schemaName
2102
+ ORDER BY ic.key_ordinal
2103
+ `);
2104
+ indexes.push({
2105
+ name: row.name,
2106
+ table: row.table,
2107
+ columns: colResult.recordset.map((c) => c.column_name),
2108
+ unique: row.is_unique || false,
2109
+ size: row.size || "0 MB",
2110
+ definition: ""
2111
+ // MSSQL doesn't store definition like PG
2112
+ });
2113
+ }
2114
+ return indexes;
2115
+ } catch (error) {
2116
+ throw new MastraError(
2117
+ {
2118
+ id: "MASTRA_STORAGE_MSSQL_INDEX_LIST_FAILED",
2119
+ domain: ErrorDomain.STORAGE,
2120
+ category: ErrorCategory.THIRD_PARTY,
2121
+ details: tableName ? {
2122
+ tableName
2123
+ } : {}
2124
+ },
2125
+ error
2126
+ );
2127
+ }
2128
+ }
2129
+ /**
2130
+ * Get detailed statistics for a specific index
2131
+ */
2132
+ async describeIndex(indexName) {
2133
+ try {
2134
+ const schemaName = this.schemaName || "dbo";
2135
+ const request = this.pool.request();
2136
+ request.input("indexName", indexName);
2137
+ request.input("schemaName", schemaName);
2138
+ const query = `
2139
+ SELECT
2140
+ i.name as name,
2141
+ o.name as [table],
2142
+ i.is_unique as is_unique,
2143
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size,
2144
+ i.type_desc as method,
2145
+ ISNULL(us.user_scans, 0) as scans,
2146
+ ISNULL(us.user_seeks + us.user_scans, 0) as tuples_read,
2147
+ ISNULL(us.user_lookups, 0) as tuples_fetched
2148
+ FROM sys.indexes i
2149
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2150
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
2151
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
2152
+ LEFT JOIN sys.dm_db_index_usage_stats us ON i.object_id = us.object_id AND i.index_id = us.index_id
2153
+ WHERE i.name = @indexName
2154
+ AND sch.name = @schemaName
2155
+ GROUP BY i.name, o.name, i.is_unique, i.type_desc, us.user_seeks, us.user_scans, us.user_lookups
2156
+ `;
2157
+ const result = await request.query(query);
2158
+ if (!result.recordset || result.recordset.length === 0) {
2159
+ throw new Error(`Index "${indexName}" not found in schema "${schemaName}"`);
2160
+ }
2161
+ const row = result.recordset[0];
2162
+ const colRequest = this.pool.request();
2163
+ colRequest.input("indexName", indexName);
2164
+ colRequest.input("schemaName", schemaName);
2165
+ const colResult = await colRequest.query(`
2166
+ SELECT c.name as column_name
2167
+ FROM sys.indexes i
2168
+ INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
2169
+ INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
2170
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2171
+ INNER JOIN sys.schemas s ON o.schema_id = s.schema_id
2172
+ WHERE i.name = @indexName
2173
+ AND s.name = @schemaName
2174
+ ORDER BY ic.key_ordinal
2175
+ `);
2176
+ return {
2177
+ name: row.name,
2178
+ table: row.table,
2179
+ columns: colResult.recordset.map((c) => c.column_name),
2180
+ unique: row.is_unique || false,
2181
+ size: row.size || "0 MB",
2182
+ definition: "",
2183
+ method: row.method?.toLowerCase() || "nonclustered",
2184
+ scans: Number(row.scans) || 0,
2185
+ tuples_read: Number(row.tuples_read) || 0,
2186
+ tuples_fetched: Number(row.tuples_fetched) || 0
2187
+ };
2188
+ } catch (error) {
1383
2189
  throw new MastraError(
1384
2190
  {
1385
- id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_INSERT_FAILED",
2191
+ id: "MASTRA_STORAGE_MSSQL_INDEX_DESCRIBE_FAILED",
1386
2192
  domain: ErrorDomain.STORAGE,
1387
2193
  category: ErrorCategory.THIRD_PARTY,
1388
2194
  details: {
1389
- tableName,
1390
- numberOfRecords: records.length
2195
+ indexName
1391
2196
  }
1392
2197
  },
1393
2198
  error
1394
2199
  );
1395
2200
  }
1396
2201
  }
1397
- async dropTable({ tableName }) {
2202
+ /**
2203
+ * Returns definitions for automatic performance indexes
2204
+ * IMPORTANT: Uses seq_id DESC instead of createdAt DESC for MSSQL due to millisecond accuracy limitations
2205
+ * NOTE: Using NVARCHAR(400) for text columns (800 bytes) leaves room for composite indexes
2206
+ */
2207
+ getAutomaticIndexDefinitions() {
2208
+ const schemaPrefix = this.schemaName ? `${this.schemaName}_` : "";
2209
+ return [
2210
+ // Composite indexes for optimal filtering + sorting performance
2211
+ // NVARCHAR(400) = 800 bytes, plus BIGINT (8 bytes) = 808 bytes total (under 900-byte limit)
2212
+ {
2213
+ name: `${schemaPrefix}mastra_threads_resourceid_seqid_idx`,
2214
+ table: TABLE_THREADS,
2215
+ columns: ["resourceId", "seq_id DESC"]
2216
+ },
2217
+ {
2218
+ name: `${schemaPrefix}mastra_messages_thread_id_seqid_idx`,
2219
+ table: TABLE_MESSAGES,
2220
+ columns: ["thread_id", "seq_id DESC"]
2221
+ },
2222
+ {
2223
+ name: `${schemaPrefix}mastra_traces_name_seqid_idx`,
2224
+ table: TABLE_TRACES,
2225
+ columns: ["name", "seq_id DESC"]
2226
+ },
2227
+ {
2228
+ name: `${schemaPrefix}mastra_scores_trace_id_span_id_seqid_idx`,
2229
+ table: TABLE_SCORERS,
2230
+ columns: ["traceId", "spanId", "seq_id DESC"]
2231
+ },
2232
+ // AI Spans indexes for optimal trace querying
2233
+ {
2234
+ name: `${schemaPrefix}mastra_ai_spans_traceid_startedat_idx`,
2235
+ table: TABLE_AI_SPANS,
2236
+ columns: ["traceId", "startedAt DESC"]
2237
+ },
2238
+ {
2239
+ name: `${schemaPrefix}mastra_ai_spans_parentspanid_startedat_idx`,
2240
+ table: TABLE_AI_SPANS,
2241
+ columns: ["parentSpanId", "startedAt DESC"]
2242
+ },
2243
+ {
2244
+ name: `${schemaPrefix}mastra_ai_spans_name_idx`,
2245
+ table: TABLE_AI_SPANS,
2246
+ columns: ["name"]
2247
+ },
2248
+ {
2249
+ name: `${schemaPrefix}mastra_ai_spans_spantype_startedat_idx`,
2250
+ table: TABLE_AI_SPANS,
2251
+ columns: ["spanType", "startedAt DESC"]
2252
+ }
2253
+ ];
2254
+ }
2255
+ /**
2256
+ * Creates automatic indexes for optimal query performance
2257
+ * Uses getAutomaticIndexDefinitions() to determine which indexes to create
2258
+ */
2259
+ async createAutomaticIndexes() {
1398
2260
  try {
1399
- const tableNameWithSchema = getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) });
1400
- await this.pool.request().query(`DROP TABLE IF EXISTS ${tableNameWithSchema}`);
2261
+ const indexes = this.getAutomaticIndexDefinitions();
2262
+ for (const indexOptions of indexes) {
2263
+ try {
2264
+ await this.createIndex(indexOptions);
2265
+ } catch (error) {
2266
+ this.logger?.warn?.(`Failed to create index ${indexOptions.name}:`, error);
2267
+ }
2268
+ }
1401
2269
  } catch (error) {
1402
2270
  throw new MastraError(
1403
2271
  {
1404
- id: "MASTRA_STORAGE_MSSQL_STORE_DROP_TABLE_FAILED",
2272
+ id: "MASTRA_STORAGE_MSSQL_STORE_CREATE_PERFORMANCE_INDEXES_FAILED",
1405
2273
  domain: ErrorDomain.STORAGE,
1406
- category: ErrorCategory.THIRD_PARTY,
1407
- details: {
1408
- tableName
1409
- }
2274
+ category: ErrorCategory.THIRD_PARTY
1410
2275
  },
1411
2276
  error
1412
2277
  );
1413
2278
  }
1414
2279
  }
1415
2280
  };
1416
- function parseJSON(jsonString) {
1417
- try {
1418
- return JSON.parse(jsonString);
1419
- } catch {
1420
- return jsonString;
1421
- }
1422
- }
1423
2281
  function transformScoreRow(row) {
1424
2282
  return {
1425
2283
  ...row,
1426
- input: parseJSON(row.input),
1427
- scorer: parseJSON(row.scorer),
1428
- preprocessStepResult: parseJSON(row.preprocessStepResult),
1429
- analyzeStepResult: parseJSON(row.analyzeStepResult),
1430
- metadata: parseJSON(row.metadata),
1431
- output: parseJSON(row.output),
1432
- additionalContext: parseJSON(row.additionalContext),
1433
- runtimeContext: parseJSON(row.runtimeContext),
1434
- entity: parseJSON(row.entity),
2284
+ input: safelyParseJSON(row.input),
2285
+ scorer: safelyParseJSON(row.scorer),
2286
+ preprocessStepResult: safelyParseJSON(row.preprocessStepResult),
2287
+ analyzeStepResult: safelyParseJSON(row.analyzeStepResult),
2288
+ metadata: safelyParseJSON(row.metadata),
2289
+ output: safelyParseJSON(row.output),
2290
+ additionalContext: safelyParseJSON(row.additionalContext),
2291
+ requestContext: safelyParseJSON(row.requestContext),
2292
+ entity: safelyParseJSON(row.entity),
1435
2293
  createdAt: row.createdAt,
1436
2294
  updatedAt: row.updatedAt
1437
2295
  };
@@ -1474,8 +2332,21 @@ var ScoresMSSQL = class extends ScoresStorage {
1474
2332
  }
1475
2333
  }
1476
2334
  async saveScore(score) {
2335
+ let validatedScore;
2336
+ try {
2337
+ validatedScore = saveScorePayloadSchema.parse(score);
2338
+ } catch (error) {
2339
+ throw new MastraError(
2340
+ {
2341
+ id: "MASTRA_STORAGE_MSSQL_STORE_SAVE_SCORE_VALIDATION_FAILED",
2342
+ domain: ErrorDomain.STORAGE,
2343
+ category: ErrorCategory.THIRD_PARTY
2344
+ },
2345
+ error
2346
+ );
2347
+ }
1477
2348
  try {
1478
- const scoreId = crypto.randomUUID();
2349
+ const scoreId = randomUUID();
1479
2350
  const {
1480
2351
  scorer,
1481
2352
  preprocessStepResult,
@@ -1484,24 +2355,24 @@ var ScoresMSSQL = class extends ScoresStorage {
1484
2355
  input,
1485
2356
  output,
1486
2357
  additionalContext,
1487
- runtimeContext,
2358
+ requestContext,
1488
2359
  entity,
1489
2360
  ...rest
1490
- } = score;
2361
+ } = validatedScore;
1491
2362
  await this.operations.insert({
1492
2363
  tableName: TABLE_SCORERS,
1493
2364
  record: {
1494
2365
  id: scoreId,
1495
2366
  ...rest,
1496
- input: JSON.stringify(input) || "",
1497
- output: JSON.stringify(output) || "",
1498
- preprocessStepResult: preprocessStepResult ? JSON.stringify(preprocessStepResult) : null,
1499
- analyzeStepResult: analyzeStepResult ? JSON.stringify(analyzeStepResult) : null,
1500
- metadata: metadata ? JSON.stringify(metadata) : null,
1501
- additionalContext: additionalContext ? JSON.stringify(additionalContext) : null,
1502
- runtimeContext: runtimeContext ? JSON.stringify(runtimeContext) : null,
1503
- entity: entity ? JSON.stringify(entity) : null,
1504
- scorer: scorer ? JSON.stringify(scorer) : null,
2367
+ input: input || "",
2368
+ output: output || "",
2369
+ preprocessStepResult: preprocessStepResult || null,
2370
+ analyzeStepResult: analyzeStepResult || null,
2371
+ metadata: metadata || null,
2372
+ additionalContext: additionalContext || null,
2373
+ requestContext: requestContext || null,
2374
+ entity: entity || null,
2375
+ scorer: scorer || null,
1505
2376
  createdAt: (/* @__PURE__ */ new Date()).toISOString(),
1506
2377
  updatedAt: (/* @__PURE__ */ new Date()).toISOString()
1507
2378
  }
@@ -1519,41 +2390,70 @@ var ScoresMSSQL = class extends ScoresStorage {
1519
2390
  );
1520
2391
  }
1521
2392
  }
1522
- async getScoresByScorerId({
2393
+ async listScoresByScorerId({
1523
2394
  scorerId,
1524
- pagination
2395
+ pagination,
2396
+ entityId,
2397
+ entityType,
2398
+ source
1525
2399
  }) {
1526
2400
  try {
1527
- const request = this.pool.request();
1528
- request.input("p1", scorerId);
1529
- const totalResult = await request.query(
1530
- `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [scorerId] = @p1`
1531
- );
2401
+ const conditions = ["[scorerId] = @p1"];
2402
+ const params = { p1: scorerId };
2403
+ let paramIndex = 2;
2404
+ if (entityId) {
2405
+ conditions.push(`[entityId] = @p${paramIndex}`);
2406
+ params[`p${paramIndex}`] = entityId;
2407
+ paramIndex++;
2408
+ }
2409
+ if (entityType) {
2410
+ conditions.push(`[entityType] = @p${paramIndex}`);
2411
+ params[`p${paramIndex}`] = entityType;
2412
+ paramIndex++;
2413
+ }
2414
+ if (source) {
2415
+ conditions.push(`[source] = @p${paramIndex}`);
2416
+ params[`p${paramIndex}`] = source;
2417
+ paramIndex++;
2418
+ }
2419
+ const whereClause = conditions.join(" AND ");
2420
+ const tableName = getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) });
2421
+ const countRequest = this.pool.request();
2422
+ Object.entries(params).forEach(([key, value]) => {
2423
+ countRequest.input(key, value);
2424
+ });
2425
+ const totalResult = await countRequest.query(`SELECT COUNT(*) as count FROM ${tableName} WHERE ${whereClause}`);
1532
2426
  const total = totalResult.recordset[0]?.count || 0;
2427
+ const { page, perPage: perPageInput } = pagination;
1533
2428
  if (total === 0) {
1534
2429
  return {
1535
2430
  pagination: {
1536
2431
  total: 0,
1537
- page: pagination.page,
1538
- perPage: pagination.perPage,
2432
+ page,
2433
+ perPage: perPageInput,
1539
2434
  hasMore: false
1540
2435
  },
1541
2436
  scores: []
1542
2437
  };
1543
2438
  }
2439
+ const perPage = normalizePerPage(perPageInput, 100);
2440
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2441
+ const limitValue = perPageInput === false ? total : perPage;
2442
+ const end = perPageInput === false ? total : start + perPage;
1544
2443
  const dataRequest = this.pool.request();
1545
- dataRequest.input("p1", scorerId);
1546
- dataRequest.input("p2", pagination.perPage);
1547
- dataRequest.input("p3", pagination.page * pagination.perPage);
1548
- const result = await dataRequest.query(
1549
- `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [scorerId] = @p1 ORDER BY [createdAt] DESC OFFSET @p3 ROWS FETCH NEXT @p2 ROWS ONLY`
1550
- );
2444
+ Object.entries(params).forEach(([key, value]) => {
2445
+ dataRequest.input(key, value);
2446
+ });
2447
+ dataRequest.input("perPage", limitValue);
2448
+ dataRequest.input("offset", start);
2449
+ const dataQuery = `SELECT * FROM ${tableName} WHERE ${whereClause} ORDER BY [createdAt] DESC OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
2450
+ const result = await dataRequest.query(dataQuery);
1551
2451
  return {
1552
2452
  pagination: {
1553
2453
  total: Number(total),
1554
- page: pagination.page,
1555
- perPage: pagination.perPage,
1556
- hasMore: Number(total) > (pagination.page + 1) * pagination.perPage
2454
+ page,
2455
+ perPage: perPageForResponse,
2456
+ hasMore: end < total
1557
2457
  },
1558
2458
  scores: result.recordset.map((row) => transformScoreRow(row))
1559
2459
  };
@@ -1569,7 +2469,7 @@ var ScoresMSSQL = class extends ScoresStorage {
1569
2469
  );
1570
2470
  }
1571
2471
  }
1572
- async getScoresByRunId({
2472
+ async listScoresByRunId({
1573
2473
  runId,
1574
2474
  pagination
1575
2475
  }) {
@@ -1580,30 +2480,35 @@ var ScoresMSSQL = class extends ScoresStorage {
1580
2480
  `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [runId] = @p1`
1581
2481
  );
1582
2482
  const total = totalResult.recordset[0]?.count || 0;
2483
+ const { page, perPage: perPageInput } = pagination;
1583
2484
  if (total === 0) {
1584
2485
  return {
1585
2486
  pagination: {
1586
2487
  total: 0,
1587
- page: pagination.page,
1588
- perPage: pagination.perPage,
2488
+ page,
2489
+ perPage: perPageInput,
1589
2490
  hasMore: false
1590
2491
  },
1591
2492
  scores: []
1592
2493
  };
1593
2494
  }
2495
+ const perPage = normalizePerPage(perPageInput, 100);
2496
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2497
+ const limitValue = perPageInput === false ? total : perPage;
2498
+ const end = perPageInput === false ? total : start + perPage;
1594
2499
  const dataRequest = this.pool.request();
1595
2500
  dataRequest.input("p1", runId);
1596
- dataRequest.input("p2", pagination.perPage);
1597
- dataRequest.input("p3", pagination.page * pagination.perPage);
2501
+ dataRequest.input("p2", limitValue);
2502
+ dataRequest.input("p3", start);
1598
2503
  const result = await dataRequest.query(
1599
2504
  `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [runId] = @p1 ORDER BY [createdAt] DESC OFFSET @p3 ROWS FETCH NEXT @p2 ROWS ONLY`
1600
2505
  );
1601
2506
  return {
1602
2507
  pagination: {
1603
2508
  total: Number(total),
1604
- page: pagination.page,
1605
- perPage: pagination.perPage,
1606
- hasMore: Number(total) > (pagination.page + 1) * pagination.perPage
2509
+ page,
2510
+ perPage: perPageForResponse,
2511
+ hasMore: end < total
1607
2512
  },
1608
2513
  scores: result.recordset.map((row) => transformScoreRow(row))
1609
2514
  };
@@ -1619,7 +2524,7 @@ var ScoresMSSQL = class extends ScoresStorage {
1619
2524
  );
1620
2525
  }
1621
2526
  }
1622
- async getScoresByEntityId({
2527
+ async listScoresByEntityId({
1623
2528
  entityId,
1624
2529
  entityType,
1625
2530
  pagination
@@ -1632,31 +2537,36 @@ var ScoresMSSQL = class extends ScoresStorage {
1632
2537
  `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [entityId] = @p1 AND [entityType] = @p2`
1633
2538
  );
1634
2539
  const total = totalResult.recordset[0]?.count || 0;
2540
+ const { page, perPage: perPageInput } = pagination;
2541
+ const perPage = normalizePerPage(perPageInput, 100);
2542
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
1635
2543
  if (total === 0) {
1636
2544
  return {
1637
2545
  pagination: {
1638
2546
  total: 0,
1639
- page: pagination.page,
1640
- perPage: pagination.perPage,
2547
+ page,
2548
+ perPage: perPageForResponse,
1641
2549
  hasMore: false
1642
2550
  },
1643
2551
  scores: []
1644
2552
  };
1645
2553
  }
2554
+ const limitValue = perPageInput === false ? total : perPage;
2555
+ const end = perPageInput === false ? total : start + perPage;
1646
2556
  const dataRequest = this.pool.request();
1647
2557
  dataRequest.input("p1", entityId);
1648
2558
  dataRequest.input("p2", entityType);
1649
- dataRequest.input("p3", pagination.perPage);
1650
- dataRequest.input("p4", pagination.page * pagination.perPage);
2559
+ dataRequest.input("p3", limitValue);
2560
+ dataRequest.input("p4", start);
1651
2561
  const result = await dataRequest.query(
1652
2562
  `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [entityId] = @p1 AND [entityType] = @p2 ORDER BY [createdAt] DESC OFFSET @p4 ROWS FETCH NEXT @p3 ROWS ONLY`
1653
2563
  );
1654
2564
  return {
1655
2565
  pagination: {
1656
2566
  total: Number(total),
1657
- page: pagination.page,
1658
- perPage: pagination.perPage,
1659
- hasMore: Number(total) > (pagination.page + 1) * pagination.perPage
2567
+ page,
2568
+ perPage: perPageForResponse,
2569
+ hasMore: end < total
1660
2570
  },
1661
2571
  scores: result.recordset.map((row) => transformScoreRow(row))
1662
2572
  };
@@ -1672,8 +2582,66 @@ var ScoresMSSQL = class extends ScoresStorage {
1672
2582
  );
1673
2583
  }
1674
2584
  }
2585
+ async listScoresBySpan({
2586
+ traceId,
2587
+ spanId,
2588
+ pagination
2589
+ }) {
2590
+ try {
2591
+ const request = this.pool.request();
2592
+ request.input("p1", traceId);
2593
+ request.input("p2", spanId);
2594
+ const totalResult = await request.query(
2595
+ `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [traceId] = @p1 AND [spanId] = @p2`
2596
+ );
2597
+ const total = totalResult.recordset[0]?.count || 0;
2598
+ const { page, perPage: perPageInput } = pagination;
2599
+ const perPage = normalizePerPage(perPageInput, 100);
2600
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2601
+ if (total === 0) {
2602
+ return {
2603
+ pagination: {
2604
+ total: 0,
2605
+ page,
2606
+ perPage: perPageForResponse,
2607
+ hasMore: false
2608
+ },
2609
+ scores: []
2610
+ };
2611
+ }
2612
+ const limitValue = perPageInput === false ? total : perPage;
2613
+ const end = perPageInput === false ? total : start + perPage;
2614
+ const dataRequest = this.pool.request();
2615
+ dataRequest.input("p1", traceId);
2616
+ dataRequest.input("p2", spanId);
2617
+ dataRequest.input("p3", limitValue);
2618
+ dataRequest.input("p4", start);
2619
+ const result = await dataRequest.query(
2620
+ `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [traceId] = @p1 AND [spanId] = @p2 ORDER BY [createdAt] DESC OFFSET @p4 ROWS FETCH NEXT @p3 ROWS ONLY`
2621
+ );
2622
+ return {
2623
+ pagination: {
2624
+ total: Number(total),
2625
+ page,
2626
+ perPage: perPageForResponse,
2627
+ hasMore: end < total
2628
+ },
2629
+ scores: result.recordset.map((row) => transformScoreRow(row))
2630
+ };
2631
+ } catch (error) {
2632
+ throw new MastraError(
2633
+ {
2634
+ id: "MASTRA_STORAGE_MSSQL_STORE_GET_SCORES_BY_SPAN_FAILED",
2635
+ domain: ErrorDomain.STORAGE,
2636
+ category: ErrorCategory.THIRD_PARTY,
2637
+ details: { traceId, spanId }
2638
+ },
2639
+ error
2640
+ );
2641
+ }
2642
+ }
1675
2643
  };
1676
- var TracesMSSQL = class extends TracesStorage {
2644
+ var WorkflowsMSSQL = class extends WorkflowsStorage {
1677
2645
  pool;
1678
2646
  operations;
1679
2647
  schema;
@@ -1687,210 +2655,168 @@ var TracesMSSQL = class extends TracesStorage {
1687
2655
  this.operations = operations;
1688
2656
  this.schema = schema;
1689
2657
  }
1690
- /** @deprecated use getTracesPaginated instead*/
1691
- async getTraces(args) {
1692
- if (args.fromDate || args.toDate) {
1693
- args.dateRange = {
1694
- start: args.fromDate,
1695
- end: args.toDate
1696
- };
1697
- }
1698
- const result = await this.getTracesPaginated(args);
1699
- return result.traces;
1700
- }
1701
- async getTracesPaginated(args) {
1702
- const { name, scope, page = 0, perPage: perPageInput, attributes, filters, dateRange } = args;
1703
- const fromDate = dateRange?.start;
1704
- const toDate = dateRange?.end;
1705
- const perPage = perPageInput !== void 0 ? perPageInput : 100;
1706
- const currentOffset = page * perPage;
1707
- const paramMap = {};
1708
- const conditions = [];
1709
- let paramIndex = 1;
1710
- if (name) {
1711
- const paramName = `p${paramIndex++}`;
1712
- conditions.push(`[name] LIKE @${paramName}`);
1713
- paramMap[paramName] = `${name}%`;
1714
- }
1715
- if (scope) {
1716
- const paramName = `p${paramIndex++}`;
1717
- conditions.push(`[scope] = @${paramName}`);
1718
- paramMap[paramName] = scope;
1719
- }
1720
- if (attributes) {
1721
- Object.entries(attributes).forEach(([key, value]) => {
1722
- const parsedKey = parseFieldKey(key);
1723
- const paramName = `p${paramIndex++}`;
1724
- conditions.push(`JSON_VALUE([attributes], '$.${parsedKey}') = @${paramName}`);
1725
- paramMap[paramName] = value;
1726
- });
1727
- }
1728
- if (filters) {
1729
- Object.entries(filters).forEach(([key, value]) => {
1730
- const parsedKey = parseFieldKey(key);
1731
- const paramName = `p${paramIndex++}`;
1732
- conditions.push(`[${parsedKey}] = @${paramName}`);
1733
- paramMap[paramName] = value;
1734
- });
1735
- }
1736
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
1737
- const paramName = `p${paramIndex++}`;
1738
- conditions.push(`[createdAt] >= @${paramName}`);
1739
- paramMap[paramName] = fromDate.toISOString();
1740
- }
1741
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
1742
- const paramName = `p${paramIndex++}`;
1743
- conditions.push(`[createdAt] <= @${paramName}`);
1744
- paramMap[paramName] = toDate.toISOString();
2658
+ parseWorkflowRun(row) {
2659
+ let parsedSnapshot = row.snapshot;
2660
+ if (typeof parsedSnapshot === "string") {
2661
+ try {
2662
+ parsedSnapshot = JSON.parse(row.snapshot);
2663
+ } catch (e) {
2664
+ this.logger?.warn?.(`Failed to parse snapshot for workflow ${row.workflow_name}:`, e);
2665
+ }
1745
2666
  }
1746
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1747
- const countQuery = `SELECT COUNT(*) as total FROM ${getTableName({ indexName: TABLE_TRACES, schemaName: getSchemaName(this.schema) })} ${whereClause}`;
1748
- let total = 0;
2667
+ return {
2668
+ workflowName: row.workflow_name,
2669
+ runId: row.run_id,
2670
+ snapshot: parsedSnapshot,
2671
+ createdAt: row.createdAt,
2672
+ updatedAt: row.updatedAt,
2673
+ resourceId: row.resourceId
2674
+ };
2675
+ }
2676
+ async updateWorkflowResults({
2677
+ workflowName,
2678
+ runId,
2679
+ stepId,
2680
+ result,
2681
+ requestContext
2682
+ }) {
2683
+ const table = getTableName({ indexName: TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName(this.schema) });
2684
+ const transaction = this.pool.transaction();
1749
2685
  try {
1750
- const countRequest = this.pool.request();
1751
- Object.entries(paramMap).forEach(([key, value]) => {
1752
- if (value instanceof Date) {
1753
- countRequest.input(key, sql2.DateTime, value);
1754
- } else {
1755
- countRequest.input(key, value);
1756
- }
1757
- });
1758
- const countResult = await countRequest.query(countQuery);
1759
- total = parseInt(countResult.recordset[0].total, 10);
2686
+ await transaction.begin();
2687
+ const selectRequest = new sql2.Request(transaction);
2688
+ selectRequest.input("workflow_name", workflowName);
2689
+ selectRequest.input("run_id", runId);
2690
+ const existingSnapshotResult = await selectRequest.query(
2691
+ `SELECT snapshot FROM ${table} WITH (UPDLOCK, HOLDLOCK) WHERE workflow_name = @workflow_name AND run_id = @run_id`
2692
+ );
2693
+ let snapshot;
2694
+ if (!existingSnapshotResult.recordset || existingSnapshotResult.recordset.length === 0) {
2695
+ snapshot = {
2696
+ context: {},
2697
+ activePaths: [],
2698
+ timestamp: Date.now(),
2699
+ suspendedPaths: {},
2700
+ resumeLabels: {},
2701
+ serializedStepGraph: [],
2702
+ value: {},
2703
+ waitingPaths: {},
2704
+ status: "pending",
2705
+ runId,
2706
+ requestContext: {}
2707
+ };
2708
+ } else {
2709
+ const existingSnapshot = existingSnapshotResult.recordset[0].snapshot;
2710
+ snapshot = typeof existingSnapshot === "string" ? JSON.parse(existingSnapshot) : existingSnapshot;
2711
+ }
2712
+ snapshot.context[stepId] = result;
2713
+ snapshot.requestContext = { ...snapshot.requestContext, ...requestContext };
2714
+ const upsertReq = new sql2.Request(transaction);
2715
+ upsertReq.input("workflow_name", workflowName);
2716
+ upsertReq.input("run_id", runId);
2717
+ upsertReq.input("snapshot", JSON.stringify(snapshot));
2718
+ upsertReq.input("createdAt", sql2.DateTime2, /* @__PURE__ */ new Date());
2719
+ upsertReq.input("updatedAt", sql2.DateTime2, /* @__PURE__ */ new Date());
2720
+ await upsertReq.query(
2721
+ `MERGE ${table} AS target
2722
+ USING (SELECT @workflow_name AS workflow_name, @run_id AS run_id) AS src
2723
+ ON target.workflow_name = src.workflow_name AND target.run_id = src.run_id
2724
+ WHEN MATCHED THEN UPDATE SET snapshot = @snapshot, [updatedAt] = @updatedAt
2725
+ WHEN NOT MATCHED THEN INSERT (workflow_name, run_id, snapshot, [createdAt], [updatedAt])
2726
+ VALUES (@workflow_name, @run_id, @snapshot, @createdAt, @updatedAt);`
2727
+ );
2728
+ await transaction.commit();
2729
+ return snapshot.context;
1760
2730
  } catch (error) {
2731
+ try {
2732
+ await transaction.rollback();
2733
+ } catch {
2734
+ }
1761
2735
  throw new MastraError(
1762
2736
  {
1763
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_TRACES_PAGINATED_FAILED_TO_RETRIEVE_TOTAL_COUNT",
2737
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_WORKFLOW_RESULTS_FAILED",
1764
2738
  domain: ErrorDomain.STORAGE,
1765
2739
  category: ErrorCategory.THIRD_PARTY,
1766
2740
  details: {
1767
- name: args.name ?? "",
1768
- scope: args.scope ?? ""
2741
+ workflowName,
2742
+ runId,
2743
+ stepId
1769
2744
  }
1770
2745
  },
1771
2746
  error
1772
2747
  );
1773
2748
  }
1774
- if (total === 0) {
1775
- return {
1776
- traces: [],
1777
- total: 0,
1778
- page,
1779
- perPage,
1780
- hasMore: false
1781
- };
1782
- }
1783
- const dataQuery = `SELECT * FROM ${getTableName({ indexName: TABLE_TRACES, schemaName: getSchemaName(this.schema) })} ${whereClause} ORDER BY [seq_id] DESC OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
1784
- const dataRequest = this.pool.request();
1785
- Object.entries(paramMap).forEach(([key, value]) => {
1786
- if (value instanceof Date) {
1787
- dataRequest.input(key, sql2.DateTime, value);
1788
- } else {
1789
- dataRequest.input(key, value);
1790
- }
1791
- });
1792
- dataRequest.input("offset", currentOffset);
1793
- dataRequest.input("limit", perPage);
2749
+ }
2750
+ async updateWorkflowState({
2751
+ workflowName,
2752
+ runId,
2753
+ opts
2754
+ }) {
2755
+ const table = getTableName({ indexName: TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName(this.schema) });
2756
+ const transaction = this.pool.transaction();
1794
2757
  try {
1795
- const rowsResult = await dataRequest.query(dataQuery);
1796
- const rows = rowsResult.recordset;
1797
- const traces = rows.map((row) => ({
1798
- id: row.id,
1799
- parentSpanId: row.parentSpanId,
1800
- traceId: row.traceId,
1801
- name: row.name,
1802
- scope: row.scope,
1803
- kind: row.kind,
1804
- status: JSON.parse(row.status),
1805
- events: JSON.parse(row.events),
1806
- links: JSON.parse(row.links),
1807
- attributes: JSON.parse(row.attributes),
1808
- startTime: row.startTime,
1809
- endTime: row.endTime,
1810
- other: row.other,
1811
- createdAt: row.createdAt
1812
- }));
1813
- return {
1814
- traces,
1815
- total,
1816
- page,
1817
- perPage,
1818
- hasMore: currentOffset + traces.length < total
1819
- };
2758
+ await transaction.begin();
2759
+ const selectRequest = new sql2.Request(transaction);
2760
+ selectRequest.input("workflow_name", workflowName);
2761
+ selectRequest.input("run_id", runId);
2762
+ const existingSnapshotResult = await selectRequest.query(
2763
+ `SELECT snapshot FROM ${table} WITH (UPDLOCK, HOLDLOCK) WHERE workflow_name = @workflow_name AND run_id = @run_id`
2764
+ );
2765
+ if (!existingSnapshotResult.recordset || existingSnapshotResult.recordset.length === 0) {
2766
+ await transaction.rollback();
2767
+ return void 0;
2768
+ }
2769
+ const existingSnapshot = existingSnapshotResult.recordset[0].snapshot;
2770
+ const snapshot = typeof existingSnapshot === "string" ? JSON.parse(existingSnapshot) : existingSnapshot;
2771
+ if (!snapshot || !snapshot?.context) {
2772
+ await transaction.rollback();
2773
+ throw new MastraError(
2774
+ {
2775
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_WORKFLOW_STATE_SNAPSHOT_NOT_FOUND",
2776
+ domain: ErrorDomain.STORAGE,
2777
+ category: ErrorCategory.SYSTEM,
2778
+ details: {
2779
+ workflowName,
2780
+ runId
2781
+ }
2782
+ },
2783
+ new Error(`Snapshot not found for runId ${runId}`)
2784
+ );
2785
+ }
2786
+ const updatedSnapshot = { ...snapshot, ...opts };
2787
+ const updateRequest = new sql2.Request(transaction);
2788
+ updateRequest.input("snapshot", JSON.stringify(updatedSnapshot));
2789
+ updateRequest.input("workflow_name", workflowName);
2790
+ updateRequest.input("run_id", runId);
2791
+ updateRequest.input("updatedAt", sql2.DateTime2, /* @__PURE__ */ new Date());
2792
+ await updateRequest.query(
2793
+ `UPDATE ${table} SET snapshot = @snapshot, [updatedAt] = @updatedAt WHERE workflow_name = @workflow_name AND run_id = @run_id`
2794
+ );
2795
+ await transaction.commit();
2796
+ return updatedSnapshot;
1820
2797
  } catch (error) {
2798
+ try {
2799
+ await transaction.rollback();
2800
+ } catch {
2801
+ }
1821
2802
  throw new MastraError(
1822
2803
  {
1823
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_TRACES_PAGINATED_FAILED_TO_RETRIEVE_TRACES",
2804
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_WORKFLOW_STATE_FAILED",
1824
2805
  domain: ErrorDomain.STORAGE,
1825
2806
  category: ErrorCategory.THIRD_PARTY,
1826
2807
  details: {
1827
- name: args.name ?? "",
1828
- scope: args.scope ?? ""
2808
+ workflowName,
2809
+ runId
1829
2810
  }
1830
2811
  },
1831
2812
  error
1832
2813
  );
1833
2814
  }
1834
2815
  }
1835
- async batchTraceInsert({ records }) {
1836
- this.logger.debug("Batch inserting traces", { count: records.length });
1837
- await this.operations.batchInsert({
1838
- tableName: TABLE_TRACES,
1839
- records
1840
- });
1841
- }
1842
- };
1843
- function parseWorkflowRun(row) {
1844
- let parsedSnapshot = row.snapshot;
1845
- if (typeof parsedSnapshot === "string") {
1846
- try {
1847
- parsedSnapshot = JSON.parse(row.snapshot);
1848
- } catch (e) {
1849
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1850
- }
1851
- }
1852
- return {
1853
- workflowName: row.workflow_name,
1854
- runId: row.run_id,
1855
- snapshot: parsedSnapshot,
1856
- createdAt: row.createdAt,
1857
- updatedAt: row.updatedAt,
1858
- resourceId: row.resourceId
1859
- };
1860
- }
1861
- var WorkflowsMSSQL = class extends WorkflowsStorage {
1862
- pool;
1863
- operations;
1864
- schema;
1865
- constructor({
1866
- pool,
1867
- operations,
1868
- schema
1869
- }) {
1870
- super();
1871
- this.pool = pool;
1872
- this.operations = operations;
1873
- this.schema = schema;
1874
- }
1875
- updateWorkflowResults({
1876
- // workflowName,
1877
- // runId,
1878
- // stepId,
1879
- // result,
1880
- // runtimeContext,
1881
- }) {
1882
- throw new Error("Method not implemented.");
1883
- }
1884
- updateWorkflowState({
1885
- // workflowName,
1886
- // runId,
1887
- // opts,
1888
- }) {
1889
- throw new Error("Method not implemented.");
1890
- }
1891
2816
  async persistWorkflowSnapshot({
1892
2817
  workflowName,
1893
2818
  runId,
2819
+ resourceId,
1894
2820
  snapshot
1895
2821
  }) {
1896
2822
  const table = getTableName({ indexName: TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName(this.schema) });
@@ -1899,6 +2825,7 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
1899
2825
  const request = this.pool.request();
1900
2826
  request.input("workflow_name", workflowName);
1901
2827
  request.input("run_id", runId);
2828
+ request.input("resourceId", resourceId);
1902
2829
  request.input("snapshot", JSON.stringify(snapshot));
1903
2830
  request.input("createdAt", sql2.DateTime2, new Date(now));
1904
2831
  request.input("updatedAt", sql2.DateTime2, new Date(now));
@@ -1906,10 +2833,11 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
1906
2833
  USING (SELECT @workflow_name AS workflow_name, @run_id AS run_id) AS src
1907
2834
  ON target.workflow_name = src.workflow_name AND target.run_id = src.run_id
1908
2835
  WHEN MATCHED THEN UPDATE SET
2836
+ resourceId = @resourceId,
1909
2837
  snapshot = @snapshot,
1910
2838
  [updatedAt] = @updatedAt
1911
- WHEN NOT MATCHED THEN INSERT (workflow_name, run_id, snapshot, [createdAt], [updatedAt])
1912
- VALUES (@workflow_name, @run_id, @snapshot, @createdAt, @updatedAt);`;
2839
+ WHEN NOT MATCHED THEN INSERT (workflow_name, run_id, resourceId, snapshot, [createdAt], [updatedAt])
2840
+ VALUES (@workflow_name, @run_id, @resourceId, @snapshot, @createdAt, @updatedAt);`;
1913
2841
  await request.query(mergeSql);
1914
2842
  } catch (error) {
1915
2843
  throw new MastraError(
@@ -1981,7 +2909,7 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
1981
2909
  if (!result.recordset || result.recordset.length === 0) {
1982
2910
  return null;
1983
2911
  }
1984
- return parseWorkflowRun(result.recordset[0]);
2912
+ return this.parseWorkflowRun(result.recordset[0]);
1985
2913
  } catch (error) {
1986
2914
  throw new MastraError(
1987
2915
  {
@@ -1997,12 +2925,12 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
1997
2925
  );
1998
2926
  }
1999
2927
  }
2000
- async getWorkflowRuns({
2928
+ async listWorkflowRuns({
2001
2929
  workflowName,
2002
2930
  fromDate,
2003
2931
  toDate,
2004
- limit,
2005
- offset,
2932
+ page,
2933
+ perPage,
2006
2934
  resourceId
2007
2935
  } = {}) {
2008
2936
  try {
@@ -2018,7 +2946,7 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
2018
2946
  conditions.push(`[resourceId] = @resourceId`);
2019
2947
  paramMap["resourceId"] = resourceId;
2020
2948
  } else {
2021
- console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
2949
+ this.logger?.warn?.(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
2022
2950
  }
2023
2951
  }
2024
2952
  if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
@@ -2040,24 +2968,27 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
2040
2968
  request.input(key, value);
2041
2969
  }
2042
2970
  });
2043
- if (limit !== void 0 && offset !== void 0) {
2971
+ const usePagination = typeof perPage === "number" && typeof page === "number";
2972
+ if (usePagination) {
2044
2973
  const countQuery = `SELECT COUNT(*) as count FROM ${tableName} ${whereClause}`;
2045
2974
  const countResult = await request.query(countQuery);
2046
2975
  total = Number(countResult.recordset[0]?.count || 0);
2047
2976
  }
2048
2977
  let query = `SELECT * FROM ${tableName} ${whereClause} ORDER BY [seq_id] DESC`;
2049
- if (limit !== void 0 && offset !== void 0) {
2050
- query += ` OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
2051
- request.input("limit", limit);
2978
+ if (usePagination) {
2979
+ const normalizedPerPage = normalizePerPage(perPage, Number.MAX_SAFE_INTEGER);
2980
+ const offset = page * normalizedPerPage;
2981
+ query += ` OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
2982
+ request.input("perPage", normalizedPerPage);
2052
2983
  request.input("offset", offset);
2053
2984
  }
2054
2985
  const result = await request.query(query);
2055
- const runs = (result.recordset || []).map((row) => parseWorkflowRun(row));
2986
+ const runs = (result.recordset || []).map((row) => this.parseWorkflowRun(row));
2056
2987
  return { runs, total: total || runs.length };
2057
2988
  } catch (error) {
2058
2989
  throw new MastraError(
2059
2990
  {
2060
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_WORKFLOW_RUNS_FAILED",
2991
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_WORKFLOW_RUNS_FAILED",
2061
2992
  domain: ErrorDomain.STORAGE,
2062
2993
  category: ErrorCategory.THIRD_PARTY,
2063
2994
  details: {
@@ -2100,19 +3031,17 @@ var MSSQLStore = class extends MastraStorage {
2100
3031
  port: config.port,
2101
3032
  options: config.options || { encrypt: true, trustServerCertificate: true }
2102
3033
  });
2103
- const legacyEvals = new LegacyEvalsMSSQL({ pool: this.pool, schema: this.schema });
2104
3034
  const operations = new StoreOperationsMSSQL({ pool: this.pool, schemaName: this.schema });
2105
3035
  const scores = new ScoresMSSQL({ pool: this.pool, operations, schema: this.schema });
2106
- const traces = new TracesMSSQL({ pool: this.pool, operations, schema: this.schema });
2107
3036
  const workflows = new WorkflowsMSSQL({ pool: this.pool, operations, schema: this.schema });
2108
3037
  const memory = new MemoryMSSQL({ pool: this.pool, schema: this.schema, operations });
3038
+ const observability = new ObservabilityMSSQL({ pool: this.pool, operations, schema: this.schema });
2109
3039
  this.stores = {
2110
3040
  operations,
2111
3041
  scores,
2112
- traces,
2113
3042
  workflows,
2114
- legacyEvals,
2115
- memory
3043
+ memory,
3044
+ observability
2116
3045
  };
2117
3046
  } catch (e) {
2118
3047
  throw new MastraError(
@@ -2132,6 +3061,11 @@ var MSSQLStore = class extends MastraStorage {
2132
3061
  try {
2133
3062
  await this.isConnected;
2134
3063
  await super.init();
3064
+ try {
3065
+ await this.stores.operations.createAutomaticIndexes();
3066
+ } catch (indexError) {
3067
+ this.logger?.warn?.("Failed to create indexes:", indexError);
3068
+ }
2135
3069
  } catch (error) {
2136
3070
  this.isConnected = null;
2137
3071
  throw new MastraError(
@@ -2158,28 +3092,12 @@ var MSSQLStore = class extends MastraStorage {
2158
3092
  resourceWorkingMemory: true,
2159
3093
  hasColumn: true,
2160
3094
  createTable: true,
2161
- deleteMessages: true
3095
+ deleteMessages: true,
3096
+ listScoresBySpan: true,
3097
+ aiTracing: true,
3098
+ indexManagement: true
2162
3099
  };
2163
3100
  }
2164
- /** @deprecated use getEvals instead */
2165
- async getEvalsByAgentName(agentName, type) {
2166
- return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
2167
- }
2168
- async getEvals(options = {}) {
2169
- return this.stores.legacyEvals.getEvals(options);
2170
- }
2171
- /**
2172
- * @deprecated use getTracesPaginated instead
2173
- */
2174
- async getTraces(args) {
2175
- return this.stores.traces.getTraces(args);
2176
- }
2177
- async getTracesPaginated(args) {
2178
- return this.stores.traces.getTracesPaginated(args);
2179
- }
2180
- async batchTraceInsert({ records }) {
2181
- return this.stores.traces.batchTraceInsert({ records });
2182
- }
2183
3101
  async createTable({
2184
3102
  tableName,
2185
3103
  schema
@@ -2214,15 +3132,6 @@ var MSSQLStore = class extends MastraStorage {
2214
3132
  async getThreadById({ threadId }) {
2215
3133
  return this.stores.memory.getThreadById({ threadId });
2216
3134
  }
2217
- /**
2218
- * @deprecated use getThreadsByResourceIdPaginated instead
2219
- */
2220
- async getThreadsByResourceId(args) {
2221
- return this.stores.memory.getThreadsByResourceId(args);
2222
- }
2223
- async getThreadsByResourceIdPaginated(args) {
2224
- return this.stores.memory.getThreadsByResourceIdPaginated(args);
2225
- }
2226
3135
  async saveThread({ thread }) {
2227
3136
  return this.stores.memory.saveThread({ thread });
2228
3137
  }
@@ -2236,17 +3145,14 @@ var MSSQLStore = class extends MastraStorage {
2236
3145
  async deleteThread({ threadId }) {
2237
3146
  return this.stores.memory.deleteThread({ threadId });
2238
3147
  }
3148
+ /**
3149
+ * @deprecated use listMessages instead
3150
+ */
2239
3151
  async getMessages(args) {
2240
3152
  return this.stores.memory.getMessages(args);
2241
3153
  }
2242
- async getMessagesById({
2243
- messageIds,
2244
- format
2245
- }) {
2246
- return this.stores.memory.getMessagesById({ messageIds, format });
2247
- }
2248
- async getMessagesPaginated(args) {
2249
- return this.stores.memory.getMessagesPaginated(args);
3154
+ async listMessagesById({ messageIds }) {
3155
+ return this.stores.memory.listMessagesById({ messageIds });
2250
3156
  }
2251
3157
  async saveMessages(args) {
2252
3158
  return this.stores.memory.saveMessages(args);
@@ -2280,9 +3186,9 @@ var MSSQLStore = class extends MastraStorage {
2280
3186
  runId,
2281
3187
  stepId,
2282
3188
  result,
2283
- runtimeContext
3189
+ requestContext
2284
3190
  }) {
2285
- return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, runtimeContext });
3191
+ return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
2286
3192
  }
2287
3193
  async updateWorkflowState({
2288
3194
  workflowName,
@@ -2294,9 +3200,10 @@ var MSSQLStore = class extends MastraStorage {
2294
3200
  async persistWorkflowSnapshot({
2295
3201
  workflowName,
2296
3202
  runId,
3203
+ resourceId,
2297
3204
  snapshot
2298
3205
  }) {
2299
- return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, snapshot });
3206
+ return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, resourceId, snapshot });
2300
3207
  }
2301
3208
  async loadWorkflowSnapshot({
2302
3209
  workflowName,
@@ -2304,15 +3211,15 @@ var MSSQLStore = class extends MastraStorage {
2304
3211
  }) {
2305
3212
  return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
2306
3213
  }
2307
- async getWorkflowRuns({
3214
+ async listWorkflowRuns({
2308
3215
  workflowName,
2309
3216
  fromDate,
2310
3217
  toDate,
2311
- limit,
2312
- offset,
3218
+ perPage,
3219
+ page,
2313
3220
  resourceId
2314
3221
  } = {}) {
2315
- return this.stores.workflows.getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId });
3222
+ return this.stores.workflows.listWorkflowRuns({ workflowName, fromDate, toDate, perPage, page, resourceId });
2316
3223
  }
2317
3224
  async getWorkflowRunById({
2318
3225
  runId,
@@ -2323,38 +3230,108 @@ var MSSQLStore = class extends MastraStorage {
2323
3230
  async close() {
2324
3231
  await this.pool.close();
2325
3232
  }
3233
+ /**
3234
+ * Index Management
3235
+ */
3236
+ async createIndex(options) {
3237
+ return this.stores.operations.createIndex(options);
3238
+ }
3239
+ async listIndexes(tableName) {
3240
+ return this.stores.operations.listIndexes(tableName);
3241
+ }
3242
+ async describeIndex(indexName) {
3243
+ return this.stores.operations.describeIndex(indexName);
3244
+ }
3245
+ async dropIndex(indexName) {
3246
+ return this.stores.operations.dropIndex(indexName);
3247
+ }
3248
+ /**
3249
+ * AI Tracing / Observability
3250
+ */
3251
+ getObservabilityStore() {
3252
+ if (!this.stores.observability) {
3253
+ throw new MastraError({
3254
+ id: "MSSQL_STORE_OBSERVABILITY_NOT_INITIALIZED",
3255
+ domain: ErrorDomain.STORAGE,
3256
+ category: ErrorCategory.SYSTEM,
3257
+ text: "Observability storage is not initialized"
3258
+ });
3259
+ }
3260
+ return this.stores.observability;
3261
+ }
3262
+ async createAISpan(span) {
3263
+ return this.getObservabilityStore().createAISpan(span);
3264
+ }
3265
+ async updateAISpan({
3266
+ spanId,
3267
+ traceId,
3268
+ updates
3269
+ }) {
3270
+ return this.getObservabilityStore().updateAISpan({ spanId, traceId, updates });
3271
+ }
3272
+ async getAITrace(traceId) {
3273
+ return this.getObservabilityStore().getAITrace(traceId);
3274
+ }
3275
+ async getAITracesPaginated(args) {
3276
+ return this.getObservabilityStore().getAITracesPaginated(args);
3277
+ }
3278
+ async batchCreateAISpans(args) {
3279
+ return this.getObservabilityStore().batchCreateAISpans(args);
3280
+ }
3281
+ async batchUpdateAISpans(args) {
3282
+ return this.getObservabilityStore().batchUpdateAISpans(args);
3283
+ }
3284
+ async batchDeleteAITraces(args) {
3285
+ return this.getObservabilityStore().batchDeleteAITraces(args);
3286
+ }
2326
3287
  /**
2327
3288
  * Scorers
2328
3289
  */
2329
3290
  async getScoreById({ id: _id }) {
2330
3291
  return this.stores.scores.getScoreById({ id: _id });
2331
3292
  }
2332
- async getScoresByScorerId({
3293
+ async listScoresByScorerId({
2333
3294
  scorerId: _scorerId,
2334
- pagination: _pagination
3295
+ pagination: _pagination,
3296
+ entityId: _entityId,
3297
+ entityType: _entityType,
3298
+ source: _source
2335
3299
  }) {
2336
- return this.stores.scores.getScoresByScorerId({ scorerId: _scorerId, pagination: _pagination });
3300
+ return this.stores.scores.listScoresByScorerId({
3301
+ scorerId: _scorerId,
3302
+ pagination: _pagination,
3303
+ entityId: _entityId,
3304
+ entityType: _entityType,
3305
+ source: _source
3306
+ });
2337
3307
  }
2338
3308
  async saveScore(_score) {
2339
3309
  return this.stores.scores.saveScore(_score);
2340
3310
  }
2341
- async getScoresByRunId({
3311
+ async listScoresByRunId({
2342
3312
  runId: _runId,
2343
3313
  pagination: _pagination
2344
3314
  }) {
2345
- return this.stores.scores.getScoresByRunId({ runId: _runId, pagination: _pagination });
3315
+ return this.stores.scores.listScoresByRunId({ runId: _runId, pagination: _pagination });
2346
3316
  }
2347
- async getScoresByEntityId({
3317
+ async listScoresByEntityId({
2348
3318
  entityId: _entityId,
2349
3319
  entityType: _entityType,
2350
3320
  pagination: _pagination
2351
3321
  }) {
2352
- return this.stores.scores.getScoresByEntityId({
3322
+ return this.stores.scores.listScoresByEntityId({
2353
3323
  entityId: _entityId,
2354
3324
  entityType: _entityType,
2355
3325
  pagination: _pagination
2356
3326
  });
2357
3327
  }
3328
+ async listScoresBySpan({
3329
+ traceId,
3330
+ spanId,
3331
+ pagination: _pagination
3332
+ }) {
3333
+ return this.stores.scores.listScoresBySpan({ traceId, spanId, pagination: _pagination });
3334
+ }
2358
3335
  };
2359
3336
 
2360
3337
  export { MSSQLStore };