@mastra/mssql 0.0.0-just-snapshot-20251014192224 → 0.0.0-main-test-20251105183450

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,9 +1,10 @@
1
1
  import { MastraError, ErrorCategory, ErrorDomain } from '@mastra/core/error';
2
- import { MastraStorage, LegacyEvalsStorage, StoreOperations, TABLE_WORKFLOW_SNAPSHOT, ScoresStorage, TABLE_SCORERS, TracesStorage, TABLE_TRACES, WorkflowsStorage, MemoryStorage, resolveMessageLimit, TABLE_RESOURCES, TABLE_EVALS, TABLE_THREADS, TABLE_MESSAGES } from '@mastra/core/storage';
2
+ import { MastraStorage, StoreOperations, TABLE_WORKFLOW_SNAPSHOT, TABLE_SCHEMAS, TABLE_THREADS, TABLE_MESSAGES, TABLE_TRACES, TABLE_SCORERS, TABLE_AI_SPANS, ScoresStorage, normalizePerPage, calculatePagination, WorkflowsStorage, MemoryStorage, TABLE_RESOURCES, ObservabilityStorage, safelyParseJSON } from '@mastra/core/storage';
3
3
  import sql2 from 'mssql';
4
- import { parseSqlIdentifier, parseFieldKey } from '@mastra/core/utils';
5
4
  import { MessageList } from '@mastra/core/agent';
6
- import { saveScorePayloadSchema } from '@mastra/core/scores';
5
+ import { parseSqlIdentifier } from '@mastra/core/utils';
6
+ import { randomUUID } from 'crypto';
7
+ import { saveScorePayloadSchema } from '@mastra/core/evals';
7
8
 
8
9
  // src/storage/index.ts
9
10
  function getSchemaName(schema) {
@@ -15,154 +16,71 @@ function getTableName({ indexName, schemaName }) {
15
16
  const quotedSchemaName = schemaName;
16
17
  return quotedSchemaName ? `${quotedSchemaName}.${quotedIndexName}` : quotedIndexName;
17
18
  }
18
-
19
- // src/storage/domains/legacy-evals/index.ts
20
- function transformEvalRow(row) {
21
- let testInfoValue = null, resultValue = null;
22
- if (row.test_info) {
23
- try {
24
- testInfoValue = typeof row.test_info === "string" ? JSON.parse(row.test_info) : row.test_info;
25
- } catch {
26
- }
19
+ function buildDateRangeFilter(dateRange, fieldName) {
20
+ const filters = {};
21
+ if (dateRange?.start) {
22
+ filters[`${fieldName}_gte`] = dateRange.start;
27
23
  }
28
- if (row.test_info) {
29
- try {
30
- resultValue = typeof row.result === "string" ? JSON.parse(row.result) : row.result;
31
- } catch {
32
- }
24
+ if (dateRange?.end) {
25
+ filters[`${fieldName}_lte`] = dateRange.end;
33
26
  }
27
+ return filters;
28
+ }
29
+ function prepareWhereClause(filters, _schema) {
30
+ const conditions = [];
31
+ const params = {};
32
+ let paramIndex = 1;
33
+ Object.entries(filters).forEach(([key, value]) => {
34
+ if (value === void 0) return;
35
+ const paramName = `p${paramIndex++}`;
36
+ if (key.endsWith("_gte")) {
37
+ const fieldName = key.slice(0, -4);
38
+ conditions.push(`[${parseSqlIdentifier(fieldName, "field name")}] >= @${paramName}`);
39
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
40
+ } else if (key.endsWith("_lte")) {
41
+ const fieldName = key.slice(0, -4);
42
+ conditions.push(`[${parseSqlIdentifier(fieldName, "field name")}] <= @${paramName}`);
43
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
44
+ } else if (value === null) {
45
+ conditions.push(`[${parseSqlIdentifier(key, "field name")}] IS NULL`);
46
+ } else {
47
+ conditions.push(`[${parseSqlIdentifier(key, "field name")}] = @${paramName}`);
48
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
49
+ }
50
+ });
34
51
  return {
35
- agentName: row.agent_name,
36
- input: row.input,
37
- output: row.output,
38
- result: resultValue,
39
- metricName: row.metric_name,
40
- instructions: row.instructions,
41
- testInfo: testInfoValue,
42
- globalRunId: row.global_run_id,
43
- runId: row.run_id,
44
- createdAt: row.created_at
52
+ sql: conditions.length > 0 ? ` WHERE ${conditions.join(" AND ")}` : "",
53
+ params
45
54
  };
46
55
  }
47
- var LegacyEvalsMSSQL = class extends LegacyEvalsStorage {
48
- pool;
49
- schema;
50
- constructor({ pool, schema }) {
51
- super();
52
- this.pool = pool;
53
- this.schema = schema;
54
- }
55
- /** @deprecated use getEvals instead */
56
- async getEvalsByAgentName(agentName, type) {
57
- try {
58
- let query = `SELECT * FROM ${getTableName({ indexName: TABLE_EVALS, schemaName: getSchemaName(this.schema) })} WHERE agent_name = @p1`;
59
- if (type === "test") {
60
- query += " AND test_info IS NOT NULL AND JSON_VALUE(test_info, '$.testPath') IS NOT NULL";
61
- } else if (type === "live") {
62
- query += " AND (test_info IS NULL OR JSON_VALUE(test_info, '$.testPath') IS NULL)";
63
- }
64
- query += " ORDER BY created_at DESC";
65
- const request = this.pool.request();
66
- request.input("p1", agentName);
67
- const result = await request.query(query);
68
- const rows = result.recordset;
69
- return typeof transformEvalRow === "function" ? rows?.map((row) => transformEvalRow(row)) ?? [] : rows ?? [];
70
- } catch (error) {
71
- if (error && error.number === 208 && error.message && error.message.includes("Invalid object name")) {
72
- return [];
73
- }
74
- console.error("Failed to get evals for the specified agent: " + error?.message);
75
- throw error;
76
- }
77
- }
78
- async getEvals(options = {}) {
79
- const { agentName, type, page = 0, perPage = 100, dateRange } = options;
80
- const fromDate = dateRange?.start;
81
- const toDate = dateRange?.end;
82
- const where = [];
83
- const params = {};
84
- if (agentName) {
85
- where.push("agent_name = @agentName");
86
- params["agentName"] = agentName;
87
- }
88
- if (type === "test") {
89
- where.push("test_info IS NOT NULL AND JSON_VALUE(test_info, '$.testPath') IS NOT NULL");
90
- } else if (type === "live") {
91
- where.push("(test_info IS NULL OR JSON_VALUE(test_info, '$.testPath') IS NULL)");
92
- }
93
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
94
- where.push(`[created_at] >= @fromDate`);
95
- params[`fromDate`] = fromDate.toISOString();
96
- }
97
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
98
- where.push(`[created_at] <= @toDate`);
99
- params[`toDate`] = toDate.toISOString();
100
- }
101
- const whereClause = where.length > 0 ? `WHERE ${where.join(" AND ")}` : "";
102
- const tableName = getTableName({ indexName: TABLE_EVALS, schemaName: getSchemaName(this.schema) });
103
- const offset = page * perPage;
104
- const countQuery = `SELECT COUNT(*) as total FROM ${tableName} ${whereClause}`;
105
- const dataQuery = `SELECT * FROM ${tableName} ${whereClause} ORDER BY seq_id DESC OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
106
- try {
107
- const countReq = this.pool.request();
108
- Object.entries(params).forEach(([key, value]) => {
109
- if (value instanceof Date) {
110
- countReq.input(key, sql2.DateTime, value);
111
- } else {
112
- countReq.input(key, value);
113
- }
114
- });
115
- const countResult = await countReq.query(countQuery);
116
- const total = countResult.recordset[0]?.total || 0;
117
- if (total === 0) {
118
- return {
119
- evals: [],
120
- total: 0,
121
- page,
122
- perPage,
123
- hasMore: false
124
- };
56
+ function transformFromSqlRow({
57
+ tableName,
58
+ sqlRow
59
+ }) {
60
+ const schema = TABLE_SCHEMAS[tableName];
61
+ const result = {};
62
+ Object.entries(sqlRow).forEach(([key, value]) => {
63
+ const columnSchema = schema?.[key];
64
+ if (columnSchema?.type === "jsonb" && typeof value === "string") {
65
+ try {
66
+ result[key] = JSON.parse(value);
67
+ } catch {
68
+ result[key] = value;
125
69
  }
126
- const req = this.pool.request();
127
- Object.entries(params).forEach(([key, value]) => {
128
- if (value instanceof Date) {
129
- req.input(key, sql2.DateTime, value);
130
- } else {
131
- req.input(key, value);
132
- }
133
- });
134
- req.input("offset", offset);
135
- req.input("perPage", perPage);
136
- const result = await req.query(dataQuery);
137
- const rows = result.recordset;
138
- return {
139
- evals: rows?.map((row) => transformEvalRow(row)) ?? [],
140
- total,
141
- page,
142
- perPage,
143
- hasMore: offset + (rows?.length ?? 0) < total
144
- };
145
- } catch (error) {
146
- const mastraError = new MastraError(
147
- {
148
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_EVALS_FAILED",
149
- domain: ErrorDomain.STORAGE,
150
- category: ErrorCategory.THIRD_PARTY,
151
- details: {
152
- agentName: agentName || "all",
153
- type: type || "all",
154
- page,
155
- perPage
156
- }
157
- },
158
- error
159
- );
160
- this.logger?.error?.(mastraError.toString());
161
- this.logger?.trackException(mastraError);
162
- throw mastraError;
70
+ } else if (columnSchema?.type === "timestamp" && value && typeof value === "string") {
71
+ result[key] = new Date(value);
72
+ } else if (columnSchema?.type === "timestamp" && value instanceof Date) {
73
+ result[key] = value;
74
+ } else if (columnSchema?.type === "boolean") {
75
+ result[key] = Boolean(value);
76
+ } else {
77
+ result[key] = value;
163
78
  }
164
- }
165
- };
79
+ });
80
+ return result;
81
+ }
82
+
83
+ // src/storage/domains/memory/index.ts
166
84
  var MemoryMSSQL = class extends MemoryStorage {
167
85
  pool;
168
86
  schema;
@@ -180,7 +98,7 @@ var MemoryMSSQL = class extends MemoryStorage {
180
98
  });
181
99
  const cleanMessages = messagesWithParsedContent.map(({ seq_id, ...rest }) => rest);
182
100
  const list = new MessageList().add(cleanMessages, "memory");
183
- return format === "v2" ? list.get.all.v2() : list.get.all.v1();
101
+ return format === "v2" ? list.get.all.db() : list.get.all.v1();
184
102
  }
185
103
  constructor({
186
104
  pool,
@@ -194,7 +112,7 @@ var MemoryMSSQL = class extends MemoryStorage {
194
112
  }
195
113
  async getThreadById({ threadId }) {
196
114
  try {
197
- const sql7 = `SELECT
115
+ const sql5 = `SELECT
198
116
  id,
199
117
  [resourceId],
200
118
  title,
@@ -205,7 +123,7 @@ var MemoryMSSQL = class extends MemoryStorage {
205
123
  WHERE id = @threadId`;
206
124
  const request = this.pool.request();
207
125
  request.input("threadId", threadId);
208
- const resultSet = await request.query(sql7);
126
+ const resultSet = await request.query(sql5);
209
127
  const thread = resultSet.recordset[0] || null;
210
128
  if (!thread) {
211
129
  return null;
@@ -230,11 +148,12 @@ var MemoryMSSQL = class extends MemoryStorage {
230
148
  );
231
149
  }
232
150
  }
233
- async getThreadsByResourceIdPaginated(args) {
234
- const { resourceId, page = 0, perPage: perPageInput, orderBy = "createdAt", sortDirection = "DESC" } = args;
151
+ async listThreadsByResourceId(args) {
152
+ const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
153
+ const perPage = normalizePerPage(perPageInput, 100);
154
+ const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
155
+ const { field, direction } = this.parseOrderBy(orderBy);
235
156
  try {
236
- const perPage = perPageInput !== void 0 ? perPageInput : 100;
237
- const currentOffset = page * perPage;
238
157
  const baseQuery = `FROM ${getTableName({ indexName: TABLE_THREADS, schemaName: getSchemaName(this.schema) })} WHERE [resourceId] = @resourceId`;
239
158
  const countQuery = `SELECT COUNT(*) as count ${baseQuery}`;
240
159
  const countRequest = this.pool.request();
@@ -246,16 +165,22 @@ var MemoryMSSQL = class extends MemoryStorage {
246
165
  threads: [],
247
166
  total: 0,
248
167
  page,
249
- perPage,
168
+ perPage: perPageForResponse,
250
169
  hasMore: false
251
170
  };
252
171
  }
253
- const orderByField = orderBy === "createdAt" ? "[createdAt]" : "[updatedAt]";
254
- const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY ${orderByField} ${sortDirection} OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
172
+ const orderByField = field === "createdAt" ? "[createdAt]" : "[updatedAt]";
173
+ const dir = (direction || "DESC").toUpperCase() === "ASC" ? "ASC" : "DESC";
174
+ const limitValue = perPageInput === false ? total : perPage;
175
+ const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY ${orderByField} ${dir} OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
255
176
  const dataRequest = this.pool.request();
256
177
  dataRequest.input("resourceId", resourceId);
257
- dataRequest.input("perPage", perPage);
258
- dataRequest.input("offset", currentOffset);
178
+ dataRequest.input("offset", offset);
179
+ if (limitValue > 2147483647) {
180
+ dataRequest.input("perPage", sql2.BigInt, limitValue);
181
+ } else {
182
+ dataRequest.input("perPage", limitValue);
183
+ }
259
184
  const rowsResult = await dataRequest.query(dataQuery);
260
185
  const rows = rowsResult.recordset || [];
261
186
  const threads = rows.map((thread) => ({
@@ -268,13 +193,13 @@ var MemoryMSSQL = class extends MemoryStorage {
268
193
  threads,
269
194
  total,
270
195
  page,
271
- perPage,
272
- hasMore: currentOffset + threads.length < total
196
+ perPage: perPageForResponse,
197
+ hasMore: perPageInput === false ? false : offset + perPage < total
273
198
  };
274
199
  } catch (error) {
275
200
  const mastraError = new MastraError(
276
201
  {
277
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_THREADS_BY_RESOURCE_ID_PAGINATED_FAILED",
202
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_THREADS_BY_RESOURCE_ID_FAILED",
278
203
  domain: ErrorDomain.STORAGE,
279
204
  category: ErrorCategory.THIRD_PARTY,
280
205
  details: {
@@ -286,7 +211,13 @@ var MemoryMSSQL = class extends MemoryStorage {
286
211
  );
287
212
  this.logger?.error?.(mastraError.toString());
288
213
  this.logger?.trackException?.(mastraError);
289
- return { threads: [], total: 0, page, perPage: perPageInput || 100, hasMore: false };
214
+ return {
215
+ threads: [],
216
+ total: 0,
217
+ page,
218
+ perPage: perPageForResponse,
219
+ hasMore: false
220
+ };
290
221
  }
291
222
  }
292
223
  async saveThread({ thread }) {
@@ -308,7 +239,12 @@ var MemoryMSSQL = class extends MemoryStorage {
308
239
  req.input("id", thread.id);
309
240
  req.input("resourceId", thread.resourceId);
310
241
  req.input("title", thread.title);
311
- req.input("metadata", thread.metadata ? JSON.stringify(thread.metadata) : null);
242
+ const metadata = thread.metadata ? JSON.stringify(thread.metadata) : null;
243
+ if (metadata === null) {
244
+ req.input("metadata", sql2.NVarChar, null);
245
+ } else {
246
+ req.input("metadata", metadata);
247
+ }
312
248
  req.input("createdAt", sql2.DateTime2, thread.createdAt);
313
249
  req.input("updatedAt", sql2.DateTime2, thread.updatedAt);
314
250
  await req.query(mergeSql);
@@ -327,30 +263,6 @@ var MemoryMSSQL = class extends MemoryStorage {
327
263
  );
328
264
  }
329
265
  }
330
- /**
331
- * @deprecated use getThreadsByResourceIdPaginated instead
332
- */
333
- async getThreadsByResourceId(args) {
334
- const { resourceId, orderBy = "createdAt", sortDirection = "DESC" } = args;
335
- try {
336
- const baseQuery = `FROM ${getTableName({ indexName: TABLE_THREADS, schemaName: getSchemaName(this.schema) })} WHERE [resourceId] = @resourceId`;
337
- const orderByField = orderBy === "createdAt" ? "[createdAt]" : "[updatedAt]";
338
- const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY ${orderByField} ${sortDirection}`;
339
- const request = this.pool.request();
340
- request.input("resourceId", resourceId);
341
- const resultSet = await request.query(dataQuery);
342
- const rows = resultSet.recordset || [];
343
- return rows.map((thread) => ({
344
- ...thread,
345
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
346
- createdAt: thread.createdAt,
347
- updatedAt: thread.updatedAt
348
- }));
349
- } catch (error) {
350
- this.logger?.error?.(`Error getting threads for resource ${resourceId}:`, error);
351
- return [];
352
- }
353
- }
354
266
  /**
355
267
  * Updates a thread's title and metadata, merging with existing metadata. Returns the updated thread.
356
268
  */
@@ -378,7 +290,7 @@ var MemoryMSSQL = class extends MemoryStorage {
378
290
  };
379
291
  try {
380
292
  const table = getTableName({ indexName: TABLE_THREADS, schemaName: getSchemaName(this.schema) });
381
- const sql7 = `UPDATE ${table}
293
+ const sql5 = `UPDATE ${table}
382
294
  SET title = @title,
383
295
  metadata = @metadata,
384
296
  [updatedAt] = @updatedAt
@@ -389,7 +301,7 @@ var MemoryMSSQL = class extends MemoryStorage {
389
301
  req.input("title", title);
390
302
  req.input("metadata", JSON.stringify(mergedMetadata));
391
303
  req.input("updatedAt", /* @__PURE__ */ new Date());
392
- const result = await req.query(sql7);
304
+ const result = await req.query(sql5);
393
305
  let thread = result.recordset && result.recordset[0];
394
306
  if (thread && "seq_id" in thread) {
395
307
  const { seq_id, ...rest } = thread;
@@ -459,11 +371,9 @@ var MemoryMSSQL = class extends MemoryStorage {
459
371
  }
460
372
  async _getIncludedMessages({
461
373
  threadId,
462
- selectBy,
463
- orderByStatement
374
+ include
464
375
  }) {
465
376
  if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
466
- const include = selectBy?.include;
467
377
  if (!include) return null;
468
378
  const unionQueries = [];
469
379
  const paramValues = [];
@@ -488,7 +398,7 @@ var MemoryMSSQL = class extends MemoryStorage {
488
398
  m.[resourceId],
489
399
  m.seq_id
490
400
  FROM (
491
- SELECT *, ROW_NUMBER() OVER (${orderByStatement}) as row_num
401
+ SELECT *, ROW_NUMBER() OVER (ORDER BY [createdAt] ASC) as row_num
492
402
  FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })}
493
403
  WHERE [thread_id] = ${pThreadId}
494
404
  ) AS m
@@ -496,15 +406,17 @@ var MemoryMSSQL = class extends MemoryStorage {
496
406
  OR EXISTS (
497
407
  SELECT 1
498
408
  FROM (
499
- SELECT *, ROW_NUMBER() OVER (${orderByStatement}) as row_num
409
+ SELECT *, ROW_NUMBER() OVER (ORDER BY [createdAt] ASC) as row_num
500
410
  FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })}
501
411
  WHERE [thread_id] = ${pThreadId}
502
412
  ) AS target
503
413
  WHERE target.id = ${pId}
504
414
  AND (
505
- (m.row_num <= target.row_num + ${pPrev} AND m.row_num > target.row_num)
415
+ -- Get previous messages (messages that come BEFORE the target)
416
+ (m.row_num < target.row_num AND m.row_num >= target.row_num - ${pPrev})
506
417
  OR
507
- (m.row_num >= target.row_num - ${pNext} AND m.row_num < target.row_num)
418
+ -- Get next messages (messages that come AFTER the target)
419
+ (m.row_num > target.row_num AND m.row_num <= target.row_num + ${pNext})
508
420
  )
509
421
  )
510
422
  `
@@ -533,34 +445,16 @@ var MemoryMSSQL = class extends MemoryStorage {
533
445
  });
534
446
  return dedupedRows;
535
447
  }
536
- async getMessages(args) {
537
- const { threadId, resourceId, format, selectBy } = args;
448
+ async listMessagesById({ messageIds }) {
449
+ if (messageIds.length === 0) return { messages: [] };
538
450
  const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId, resourceId`;
539
451
  const orderByStatement = `ORDER BY [seq_id] DESC`;
540
- const limit = resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
541
452
  try {
542
- if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
543
453
  let rows = [];
544
- const include = selectBy?.include || [];
545
- if (include?.length) {
546
- const includeMessages = await this._getIncludedMessages({ threadId, selectBy, orderByStatement });
547
- if (includeMessages) {
548
- rows.push(...includeMessages);
549
- }
550
- }
551
- const excludeIds = rows.map((m) => m.id).filter(Boolean);
552
- let query = `${selectStatement} FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })} WHERE [thread_id] = @threadId`;
454
+ let query = `${selectStatement} FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })} WHERE [id] IN (${messageIds.map((_, i) => `@id${i}`).join(", ")})`;
553
455
  const request = this.pool.request();
554
- request.input("threadId", threadId);
555
- if (excludeIds.length > 0) {
556
- const excludeParams = excludeIds.map((_, idx) => `@id${idx}`);
557
- query += ` AND id NOT IN (${excludeParams.join(", ")})`;
558
- excludeIds.forEach((id, idx) => {
559
- request.input(`id${idx}`, id);
560
- });
561
- }
562
- query += ` ${orderByStatement} OFFSET 0 ROWS FETCH NEXT @limit ROWS ONLY`;
563
- request.input("limit", limit);
456
+ messageIds.forEach((id, i) => request.input(`id${i}`, id));
457
+ query += ` ${orderByStatement}`;
564
458
  const result = await request.query(query);
565
459
  const remainingRows = result.recordset || [];
566
460
  rows.push(...remainingRows);
@@ -568,153 +462,150 @@ var MemoryMSSQL = class extends MemoryStorage {
568
462
  const timeDiff = a.seq_id - b.seq_id;
569
463
  return timeDiff;
570
464
  });
571
- rows = rows.map(({ seq_id, ...rest }) => rest);
572
- return this._parseAndFormatMessages(rows, format);
465
+ const messagesWithParsedContent = rows.map((row) => {
466
+ if (typeof row.content === "string") {
467
+ try {
468
+ return { ...row, content: JSON.parse(row.content) };
469
+ } catch {
470
+ return row;
471
+ }
472
+ }
473
+ return row;
474
+ });
475
+ const cleanMessages = messagesWithParsedContent.map(({ seq_id, ...rest }) => rest);
476
+ const list = new MessageList().add(cleanMessages, "memory");
477
+ return { messages: list.get.all.db() };
573
478
  } catch (error) {
574
479
  const mastraError = new MastraError(
575
480
  {
576
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_MESSAGES_FAILED",
481
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_MESSAGES_BY_ID_FAILED",
577
482
  domain: ErrorDomain.STORAGE,
578
483
  category: ErrorCategory.THIRD_PARTY,
579
484
  details: {
580
- threadId,
581
- resourceId: resourceId ?? ""
485
+ messageIds: JSON.stringify(messageIds)
582
486
  }
583
487
  },
584
488
  error
585
489
  );
586
490
  this.logger?.error?.(mastraError.toString());
587
- this.logger?.trackException(mastraError);
588
- return [];
491
+ this.logger?.trackException?.(mastraError);
492
+ return { messages: [] };
589
493
  }
590
494
  }
591
- async getMessagesById({
592
- messageIds,
593
- format
594
- }) {
595
- if (messageIds.length === 0) return [];
596
- const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId, resourceId`;
597
- const orderByStatement = `ORDER BY [seq_id] DESC`;
598
- try {
599
- let rows = [];
600
- let query = `${selectStatement} FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })} WHERE [id] IN (${messageIds.map((_, i) => `@id${i}`).join(", ")})`;
601
- const request = this.pool.request();
602
- messageIds.forEach((id, i) => request.input(`id${i}`, id));
603
- query += ` ${orderByStatement}`;
604
- const result = await request.query(query);
605
- const remainingRows = result.recordset || [];
606
- rows.push(...remainingRows);
607
- rows.sort((a, b) => {
608
- const timeDiff = a.seq_id - b.seq_id;
609
- return timeDiff;
610
- });
611
- rows = rows.map(({ seq_id, ...rest }) => rest);
612
- if (format === `v1`) return this._parseAndFormatMessages(rows, format);
613
- return this._parseAndFormatMessages(rows, `v2`);
614
- } catch (error) {
615
- const mastraError = new MastraError(
495
+ async listMessages(args) {
496
+ const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
497
+ if (!threadId.trim()) {
498
+ throw new MastraError(
616
499
  {
617
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_MESSAGES_BY_ID_FAILED",
500
+ id: "STORAGE_MSSQL_LIST_MESSAGES_INVALID_THREAD_ID",
618
501
  domain: ErrorDomain.STORAGE,
619
502
  category: ErrorCategory.THIRD_PARTY,
620
- details: {
621
- messageIds: JSON.stringify(messageIds)
622
- }
503
+ details: { threadId }
623
504
  },
624
- error
505
+ new Error("threadId must be a non-empty string")
625
506
  );
626
- this.logger?.error?.(mastraError.toString());
627
- this.logger?.trackException(mastraError);
628
- return [];
629
507
  }
630
- }
631
- async getMessagesPaginated(args) {
632
- const { threadId, resourceId, format, selectBy } = args;
633
- const { page = 0, perPage: perPageInput, dateRange } = selectBy?.pagination || {};
508
+ const perPage = normalizePerPage(perPageInput, 40);
509
+ const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
634
510
  try {
635
- if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
636
- const fromDate = dateRange?.start;
637
- const toDate = dateRange?.end;
511
+ const { field, direction } = this.parseOrderBy(orderBy, "ASC");
512
+ const orderByStatement = `ORDER BY [${field}] ${direction}`;
638
513
  const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId, resourceId`;
639
- const orderByStatement = `ORDER BY [seq_id] DESC`;
640
- let messages = [];
641
- if (selectBy?.include?.length) {
642
- const includeMessages = await this._getIncludedMessages({ threadId, selectBy, orderByStatement });
643
- if (includeMessages) messages.push(...includeMessages);
644
- }
645
- const perPage = perPageInput !== void 0 ? perPageInput : resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
646
- const currentOffset = page * perPage;
514
+ const tableName = getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) });
647
515
  const conditions = ["[thread_id] = @threadId"];
648
516
  const request = this.pool.request();
649
517
  request.input("threadId", threadId);
650
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
518
+ if (resourceId) {
519
+ conditions.push("[resourceId] = @resourceId");
520
+ request.input("resourceId", resourceId);
521
+ }
522
+ if (filter?.dateRange?.start) {
651
523
  conditions.push("[createdAt] >= @fromDate");
652
- request.input("fromDate", fromDate.toISOString());
524
+ request.input("fromDate", filter.dateRange.start);
653
525
  }
654
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
526
+ if (filter?.dateRange?.end) {
655
527
  conditions.push("[createdAt] <= @toDate");
656
- request.input("toDate", toDate.toISOString());
528
+ request.input("toDate", filter.dateRange.end);
657
529
  }
658
530
  const whereClause = `WHERE ${conditions.join(" AND ")}`;
659
- const countQuery = `SELECT COUNT(*) as total FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })} ${whereClause}`;
531
+ const countQuery = `SELECT COUNT(*) as total FROM ${tableName} ${whereClause}`;
660
532
  const countResult = await request.query(countQuery);
661
533
  const total = parseInt(countResult.recordset[0]?.total, 10) || 0;
662
- if (total === 0 && messages.length > 0) {
663
- const parsedIncluded = this._parseAndFormatMessages(messages, format);
534
+ const limitValue = perPageInput === false ? total : perPage;
535
+ const dataQuery = `${selectStatement} FROM ${tableName} ${whereClause} ${orderByStatement} OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
536
+ request.input("offset", offset);
537
+ if (limitValue > 2147483647) {
538
+ request.input("limit", sql2.BigInt, limitValue);
539
+ } else {
540
+ request.input("limit", limitValue);
541
+ }
542
+ const rowsResult = await request.query(dataQuery);
543
+ const rows = rowsResult.recordset || [];
544
+ const messages = [...rows];
545
+ if (total === 0 && messages.length === 0 && (!include || include.length === 0)) {
664
546
  return {
665
- messages: parsedIncluded,
666
- total: parsedIncluded.length,
547
+ messages: [],
548
+ total: 0,
667
549
  page,
668
- perPage,
550
+ perPage: perPageForResponse,
669
551
  hasMore: false
670
552
  };
671
553
  }
672
- const excludeIds = messages.map((m) => m.id);
673
- if (excludeIds.length > 0) {
674
- const excludeParams = excludeIds.map((_, idx) => `@id${idx}`);
675
- conditions.push(`id NOT IN (${excludeParams.join(", ")})`);
676
- excludeIds.forEach((id, idx) => request.input(`id${idx}`, id));
554
+ const messageIds = new Set(messages.map((m) => m.id));
555
+ if (include && include.length > 0) {
556
+ const includeMessages = await this._getIncludedMessages({ threadId, include });
557
+ if (includeMessages) {
558
+ for (const includeMsg of includeMessages) {
559
+ if (!messageIds.has(includeMsg.id)) {
560
+ messages.push(includeMsg);
561
+ messageIds.add(includeMsg.id);
562
+ }
563
+ }
564
+ }
677
565
  }
678
- const finalWhereClause = `WHERE ${conditions.join(" AND ")}`;
679
- const dataQuery = `${selectStatement} FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })} ${finalWhereClause} ${orderByStatement} OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
680
- request.input("offset", currentOffset);
681
- request.input("limit", perPage);
682
- const rowsResult = await request.query(dataQuery);
683
- const rows = rowsResult.recordset || [];
684
- rows.sort((a, b) => a.seq_id - b.seq_id);
685
- messages.push(...rows);
686
- const parsed = this._parseAndFormatMessages(messages, format);
566
+ const parsed = this._parseAndFormatMessages(messages, "v2");
567
+ let finalMessages = parsed;
568
+ finalMessages = finalMessages.sort((a, b) => {
569
+ const aValue = field === "createdAt" ? new Date(a.createdAt).getTime() : a[field];
570
+ const bValue = field === "createdAt" ? new Date(b.createdAt).getTime() : b[field];
571
+ return direction === "ASC" ? aValue - bValue : bValue - aValue;
572
+ });
573
+ const returnedThreadMessageIds = new Set(finalMessages.filter((m) => m.threadId === threadId).map((m) => m.id));
574
+ const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
575
+ const hasMore = perPageInput !== false && !allThreadMessagesReturned && offset + perPage < total;
687
576
  return {
688
- messages: parsed,
689
- total: total + excludeIds.length,
577
+ messages: finalMessages,
578
+ total,
690
579
  page,
691
- perPage,
692
- hasMore: currentOffset + rows.length < total
580
+ perPage: perPageForResponse,
581
+ hasMore
693
582
  };
694
583
  } catch (error) {
695
584
  const mastraError = new MastraError(
696
585
  {
697
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_MESSAGES_PAGINATED_FAILED",
586
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_MESSAGES_FAILED",
698
587
  domain: ErrorDomain.STORAGE,
699
588
  category: ErrorCategory.THIRD_PARTY,
700
589
  details: {
701
590
  threadId,
702
- resourceId: resourceId ?? "",
703
- page
591
+ resourceId: resourceId ?? ""
704
592
  }
705
593
  },
706
594
  error
707
595
  );
708
596
  this.logger?.error?.(mastraError.toString());
709
- this.logger?.trackException(mastraError);
710
- return { messages: [], total: 0, page, perPage: perPageInput || 40, hasMore: false };
597
+ this.logger?.trackException?.(mastraError);
598
+ return {
599
+ messages: [],
600
+ total: 0,
601
+ page,
602
+ perPage: perPageForResponse,
603
+ hasMore: false
604
+ };
711
605
  }
712
606
  }
713
- async saveMessages({
714
- messages,
715
- format
716
- }) {
717
- if (messages.length === 0) return messages;
607
+ async saveMessages({ messages }) {
608
+ if (messages.length === 0) return { messages: [] };
718
609
  const threadId = messages[0]?.threadId;
719
610
  if (!threadId) {
720
611
  throw new MastraError({
@@ -796,8 +687,7 @@ var MemoryMSSQL = class extends MemoryStorage {
796
687
  return message;
797
688
  });
798
689
  const list = new MessageList().add(messagesWithParsedContent, "memory");
799
- if (format === "v2") return list.get.all.v2();
800
- return list.get.all.v1();
690
+ return { messages: list.get.all.db() };
801
691
  } catch (error) {
802
692
  throw new MastraError(
803
693
  {
@@ -973,8 +863,10 @@ var MemoryMSSQL = class extends MemoryStorage {
973
863
  return null;
974
864
  }
975
865
  return {
976
- ...result,
977
- workingMemory: typeof result.workingMemory === "object" ? JSON.stringify(result.workingMemory) : result.workingMemory,
866
+ id: result.id,
867
+ createdAt: result.createdAt,
868
+ updatedAt: result.updatedAt,
869
+ workingMemory: result.workingMemory,
978
870
  metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
979
871
  };
980
872
  } catch (error) {
@@ -988,7 +880,7 @@ var MemoryMSSQL = class extends MemoryStorage {
988
880
  error
989
881
  );
990
882
  this.logger?.error?.(mastraError.toString());
991
- this.logger?.trackException(mastraError);
883
+ this.logger?.trackException?.(mastraError);
992
884
  throw mastraError;
993
885
  }
994
886
  }
@@ -997,7 +889,7 @@ var MemoryMSSQL = class extends MemoryStorage {
997
889
  tableName: TABLE_RESOURCES,
998
890
  record: {
999
891
  ...resource,
1000
- metadata: JSON.stringify(resource.metadata)
892
+ metadata: resource.metadata
1001
893
  }
1002
894
  });
1003
895
  return resource;
@@ -1055,111 +947,436 @@ var MemoryMSSQL = class extends MemoryStorage {
1055
947
  error
1056
948
  );
1057
949
  this.logger?.error?.(mastraError.toString());
1058
- this.logger?.trackException(mastraError);
950
+ this.logger?.trackException?.(mastraError);
1059
951
  throw mastraError;
1060
952
  }
1061
953
  }
1062
954
  };
1063
- var StoreOperationsMSSQL = class extends StoreOperations {
955
+ var ObservabilityMSSQL = class extends ObservabilityStorage {
1064
956
  pool;
1065
- schemaName;
1066
- setupSchemaPromise = null;
1067
- schemaSetupComplete = void 0;
1068
- getSqlType(type, isPrimaryKey = false) {
1069
- switch (type) {
1070
- case "text":
1071
- return isPrimaryKey ? "NVARCHAR(255)" : "NVARCHAR(MAX)";
1072
- case "timestamp":
1073
- return "DATETIME2(7)";
1074
- case "uuid":
1075
- return "UNIQUEIDENTIFIER";
1076
- case "jsonb":
1077
- return "NVARCHAR(MAX)";
1078
- case "integer":
1079
- return "INT";
1080
- case "bigint":
1081
- return "BIGINT";
1082
- case "float":
1083
- return "FLOAT";
1084
- default:
1085
- throw new MastraError({
1086
- id: "MASTRA_STORAGE_MSSQL_STORE_TYPE_NOT_SUPPORTED",
1087
- domain: ErrorDomain.STORAGE,
1088
- category: ErrorCategory.THIRD_PARTY
1089
- });
1090
- }
1091
- }
1092
- constructor({ pool, schemaName }) {
957
+ operations;
958
+ schema;
959
+ constructor({
960
+ pool,
961
+ operations,
962
+ schema
963
+ }) {
1093
964
  super();
1094
965
  this.pool = pool;
1095
- this.schemaName = schemaName;
1096
- }
1097
- async hasColumn(table, column) {
1098
- const schema = this.schemaName || "dbo";
1099
- const request = this.pool.request();
1100
- request.input("schema", schema);
1101
- request.input("table", table);
1102
- request.input("column", column);
1103
- request.input("columnLower", column.toLowerCase());
1104
- const result = await request.query(
1105
- `SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @table AND (COLUMN_NAME = @column OR COLUMN_NAME = @columnLower)`
1106
- );
1107
- return result.recordset.length > 0;
966
+ this.operations = operations;
967
+ this.schema = schema;
1108
968
  }
1109
- async setupSchema() {
1110
- if (!this.schemaName || this.schemaSetupComplete) {
1111
- return;
1112
- }
1113
- if (!this.setupSchemaPromise) {
1114
- this.setupSchemaPromise = (async () => {
1115
- try {
1116
- const checkRequest = this.pool.request();
1117
- checkRequest.input("schemaName", this.schemaName);
1118
- const checkResult = await checkRequest.query(`
1119
- SELECT 1 AS found FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = @schemaName
1120
- `);
1121
- const schemaExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
1122
- if (!schemaExists) {
1123
- try {
1124
- await this.pool.request().query(`CREATE SCHEMA [${this.schemaName}]`);
1125
- this.logger?.info?.(`Schema "${this.schemaName}" created successfully`);
1126
- } catch (error) {
1127
- this.logger?.error?.(`Failed to create schema "${this.schemaName}"`, { error });
1128
- throw new Error(
1129
- `Unable to create schema "${this.schemaName}". This requires CREATE privilege on the database. Either create the schema manually or grant CREATE privilege to the user.`
1130
- );
1131
- }
1132
- }
1133
- this.schemaSetupComplete = true;
1134
- this.logger?.debug?.(`Schema "${this.schemaName}" is ready for use`);
1135
- } catch (error) {
1136
- this.schemaSetupComplete = void 0;
1137
- this.setupSchemaPromise = null;
1138
- throw error;
1139
- } finally {
1140
- this.setupSchemaPromise = null;
1141
- }
1142
- })();
1143
- }
1144
- await this.setupSchemaPromise;
969
+ get tracingStrategy() {
970
+ return {
971
+ preferred: "batch-with-updates",
972
+ supported: ["batch-with-updates", "insert-only"]
973
+ };
1145
974
  }
1146
- async insert({ tableName, record }) {
975
+ async createSpan(span) {
1147
976
  try {
1148
- const columns = Object.keys(record).map((col) => parseSqlIdentifier(col, "column name"));
1149
- const values = Object.values(record);
1150
- const paramNames = values.map((_, i) => `@param${i}`);
1151
- const insertSql = `INSERT INTO ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} (${columns.map((c) => `[${c}]`).join(", ")}) VALUES (${paramNames.join(", ")})`;
1152
- const request = this.pool.request();
1153
- values.forEach((value, i) => {
1154
- if (value instanceof Date) {
1155
- request.input(`param${i}`, sql2.DateTime2, value);
1156
- } else if (typeof value === "object" && value !== null) {
1157
- request.input(`param${i}`, JSON.stringify(value));
1158
- } else {
1159
- request.input(`param${i}`, value);
1160
- }
1161
- });
1162
- await request.query(insertSql);
977
+ const startedAt = span.startedAt instanceof Date ? span.startedAt.toISOString() : span.startedAt;
978
+ const endedAt = span.endedAt instanceof Date ? span.endedAt.toISOString() : span.endedAt;
979
+ const record = {
980
+ ...span,
981
+ startedAt,
982
+ endedAt
983
+ // Note: createdAt/updatedAt will be set by default values
984
+ };
985
+ return this.operations.insert({ tableName: TABLE_AI_SPANS, record });
986
+ } catch (error) {
987
+ throw new MastraError(
988
+ {
989
+ id: "MSSQL_STORE_CREATE_AI_SPAN_FAILED",
990
+ domain: ErrorDomain.STORAGE,
991
+ category: ErrorCategory.USER,
992
+ details: {
993
+ spanId: span.spanId,
994
+ traceId: span.traceId,
995
+ spanType: span.spanType,
996
+ spanName: span.name
997
+ }
998
+ },
999
+ error
1000
+ );
1001
+ }
1002
+ }
1003
+ async getAITrace(traceId) {
1004
+ try {
1005
+ const tableName = getTableName({
1006
+ indexName: TABLE_AI_SPANS,
1007
+ schemaName: getSchemaName(this.schema)
1008
+ });
1009
+ const request = this.pool.request();
1010
+ request.input("traceId", traceId);
1011
+ const result = await request.query(
1012
+ `SELECT
1013
+ [traceId], [spanId], [parentSpanId], [name], [scope], [spanType],
1014
+ [attributes], [metadata], [links], [input], [output], [error], [isEvent],
1015
+ [startedAt], [endedAt], [createdAt], [updatedAt]
1016
+ FROM ${tableName}
1017
+ WHERE [traceId] = @traceId
1018
+ ORDER BY [startedAt] DESC`
1019
+ );
1020
+ if (!result.recordset || result.recordset.length === 0) {
1021
+ return null;
1022
+ }
1023
+ return {
1024
+ traceId,
1025
+ spans: result.recordset.map(
1026
+ (span) => transformFromSqlRow({
1027
+ tableName: TABLE_AI_SPANS,
1028
+ sqlRow: span
1029
+ })
1030
+ )
1031
+ };
1032
+ } catch (error) {
1033
+ throw new MastraError(
1034
+ {
1035
+ id: "MSSQL_STORE_GET_AI_TRACE_FAILED",
1036
+ domain: ErrorDomain.STORAGE,
1037
+ category: ErrorCategory.USER,
1038
+ details: {
1039
+ traceId
1040
+ }
1041
+ },
1042
+ error
1043
+ );
1044
+ }
1045
+ }
1046
+ async updateSpan({
1047
+ spanId,
1048
+ traceId,
1049
+ updates
1050
+ }) {
1051
+ try {
1052
+ const data = { ...updates };
1053
+ if (data.endedAt instanceof Date) {
1054
+ data.endedAt = data.endedAt.toISOString();
1055
+ }
1056
+ if (data.startedAt instanceof Date) {
1057
+ data.startedAt = data.startedAt.toISOString();
1058
+ }
1059
+ await this.operations.update({
1060
+ tableName: TABLE_AI_SPANS,
1061
+ keys: { spanId, traceId },
1062
+ data
1063
+ });
1064
+ } catch (error) {
1065
+ throw new MastraError(
1066
+ {
1067
+ id: "MSSQL_STORE_UPDATE_AI_SPAN_FAILED",
1068
+ domain: ErrorDomain.STORAGE,
1069
+ category: ErrorCategory.USER,
1070
+ details: {
1071
+ spanId,
1072
+ traceId
1073
+ }
1074
+ },
1075
+ error
1076
+ );
1077
+ }
1078
+ }
1079
+ async getAITracesPaginated({
1080
+ filters,
1081
+ pagination
1082
+ }) {
1083
+ const page = pagination?.page ?? 0;
1084
+ const perPage = pagination?.perPage ?? 10;
1085
+ const { entityId, entityType, ...actualFilters } = filters || {};
1086
+ const filtersWithDateRange = {
1087
+ ...actualFilters,
1088
+ ...buildDateRangeFilter(pagination?.dateRange, "startedAt"),
1089
+ parentSpanId: null
1090
+ // Only get root spans for traces
1091
+ };
1092
+ const whereClause = prepareWhereClause(filtersWithDateRange);
1093
+ let actualWhereClause = whereClause.sql;
1094
+ const params = { ...whereClause.params };
1095
+ let currentParamIndex = Object.keys(params).length + 1;
1096
+ if (entityId && entityType) {
1097
+ let name = "";
1098
+ if (entityType === "workflow") {
1099
+ name = `workflow run: '${entityId}'`;
1100
+ } else if (entityType === "agent") {
1101
+ name = `agent run: '${entityId}'`;
1102
+ } else {
1103
+ const error = new MastraError({
1104
+ id: "MSSQL_STORE_GET_AI_TRACES_PAGINATED_FAILED",
1105
+ domain: ErrorDomain.STORAGE,
1106
+ category: ErrorCategory.USER,
1107
+ details: {
1108
+ entityType
1109
+ },
1110
+ text: `Cannot filter by entity type: ${entityType}`
1111
+ });
1112
+ throw error;
1113
+ }
1114
+ const entityParam = `p${currentParamIndex++}`;
1115
+ if (actualWhereClause) {
1116
+ actualWhereClause += ` AND [name] = @${entityParam}`;
1117
+ } else {
1118
+ actualWhereClause = ` WHERE [name] = @${entityParam}`;
1119
+ }
1120
+ params[entityParam] = name;
1121
+ }
1122
+ const tableName = getTableName({
1123
+ indexName: TABLE_AI_SPANS,
1124
+ schemaName: getSchemaName(this.schema)
1125
+ });
1126
+ try {
1127
+ const countRequest = this.pool.request();
1128
+ Object.entries(params).forEach(([key, value]) => {
1129
+ countRequest.input(key, value);
1130
+ });
1131
+ const countResult = await countRequest.query(
1132
+ `SELECT COUNT(*) as count FROM ${tableName}${actualWhereClause}`
1133
+ );
1134
+ const total = countResult.recordset[0]?.count ?? 0;
1135
+ if (total === 0) {
1136
+ return {
1137
+ pagination: {
1138
+ total: 0,
1139
+ page,
1140
+ perPage,
1141
+ hasMore: false
1142
+ },
1143
+ spans: []
1144
+ };
1145
+ }
1146
+ const dataRequest = this.pool.request();
1147
+ Object.entries(params).forEach(([key, value]) => {
1148
+ dataRequest.input(key, value);
1149
+ });
1150
+ dataRequest.input("offset", page * perPage);
1151
+ dataRequest.input("limit", perPage);
1152
+ const dataResult = await dataRequest.query(
1153
+ `SELECT * FROM ${tableName}${actualWhereClause} ORDER BY [startedAt] DESC OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`
1154
+ );
1155
+ const spans = dataResult.recordset.map(
1156
+ (row) => transformFromSqlRow({
1157
+ tableName: TABLE_AI_SPANS,
1158
+ sqlRow: row
1159
+ })
1160
+ );
1161
+ return {
1162
+ pagination: {
1163
+ total,
1164
+ page,
1165
+ perPage,
1166
+ hasMore: (page + 1) * perPage < total
1167
+ },
1168
+ spans
1169
+ };
1170
+ } catch (error) {
1171
+ throw new MastraError(
1172
+ {
1173
+ id: "MSSQL_STORE_GET_AI_TRACES_PAGINATED_FAILED",
1174
+ domain: ErrorDomain.STORAGE,
1175
+ category: ErrorCategory.USER
1176
+ },
1177
+ error
1178
+ );
1179
+ }
1180
+ }
1181
+ async batchCreateSpans(args) {
1182
+ if (!args.records || args.records.length === 0) {
1183
+ return;
1184
+ }
1185
+ try {
1186
+ await this.operations.batchInsert({
1187
+ tableName: TABLE_AI_SPANS,
1188
+ records: args.records.map((span) => ({
1189
+ ...span,
1190
+ startedAt: span.startedAt instanceof Date ? span.startedAt.toISOString() : span.startedAt,
1191
+ endedAt: span.endedAt instanceof Date ? span.endedAt.toISOString() : span.endedAt
1192
+ }))
1193
+ });
1194
+ } catch (error) {
1195
+ throw new MastraError(
1196
+ {
1197
+ id: "MSSQL_STORE_BATCH_CREATE_AI_SPANS_FAILED",
1198
+ domain: ErrorDomain.STORAGE,
1199
+ category: ErrorCategory.USER,
1200
+ details: {
1201
+ count: args.records.length
1202
+ }
1203
+ },
1204
+ error
1205
+ );
1206
+ }
1207
+ }
1208
+ async batchUpdateSpans(args) {
1209
+ if (!args.records || args.records.length === 0) {
1210
+ return;
1211
+ }
1212
+ try {
1213
+ const updates = args.records.map(({ traceId, spanId, updates: data }) => {
1214
+ const processedData = { ...data };
1215
+ if (processedData.endedAt instanceof Date) {
1216
+ processedData.endedAt = processedData.endedAt.toISOString();
1217
+ }
1218
+ if (processedData.startedAt instanceof Date) {
1219
+ processedData.startedAt = processedData.startedAt.toISOString();
1220
+ }
1221
+ return {
1222
+ keys: { spanId, traceId },
1223
+ data: processedData
1224
+ };
1225
+ });
1226
+ await this.operations.batchUpdate({
1227
+ tableName: TABLE_AI_SPANS,
1228
+ updates
1229
+ });
1230
+ } catch (error) {
1231
+ throw new MastraError(
1232
+ {
1233
+ id: "MSSQL_STORE_BATCH_UPDATE_AI_SPANS_FAILED",
1234
+ domain: ErrorDomain.STORAGE,
1235
+ category: ErrorCategory.USER,
1236
+ details: {
1237
+ count: args.records.length
1238
+ }
1239
+ },
1240
+ error
1241
+ );
1242
+ }
1243
+ }
1244
+ async batchDeleteAITraces(args) {
1245
+ if (!args.traceIds || args.traceIds.length === 0) {
1246
+ return;
1247
+ }
1248
+ try {
1249
+ const keys = args.traceIds.map((traceId) => ({ traceId }));
1250
+ await this.operations.batchDelete({
1251
+ tableName: TABLE_AI_SPANS,
1252
+ keys
1253
+ });
1254
+ } catch (error) {
1255
+ throw new MastraError(
1256
+ {
1257
+ id: "MSSQL_STORE_BATCH_DELETE_AI_TRACES_FAILED",
1258
+ domain: ErrorDomain.STORAGE,
1259
+ category: ErrorCategory.USER,
1260
+ details: {
1261
+ count: args.traceIds.length
1262
+ }
1263
+ },
1264
+ error
1265
+ );
1266
+ }
1267
+ }
1268
+ };
1269
+ var StoreOperationsMSSQL = class extends StoreOperations {
1270
+ pool;
1271
+ schemaName;
1272
+ setupSchemaPromise = null;
1273
+ schemaSetupComplete = void 0;
1274
+ getSqlType(type, isPrimaryKey = false, useLargeStorage = false) {
1275
+ switch (type) {
1276
+ case "text":
1277
+ if (useLargeStorage) {
1278
+ return "NVARCHAR(MAX)";
1279
+ }
1280
+ return isPrimaryKey ? "NVARCHAR(255)" : "NVARCHAR(400)";
1281
+ case "timestamp":
1282
+ return "DATETIME2(7)";
1283
+ case "uuid":
1284
+ return "UNIQUEIDENTIFIER";
1285
+ case "jsonb":
1286
+ return "NVARCHAR(MAX)";
1287
+ case "integer":
1288
+ return "INT";
1289
+ case "bigint":
1290
+ return "BIGINT";
1291
+ case "float":
1292
+ return "FLOAT";
1293
+ case "boolean":
1294
+ return "BIT";
1295
+ default:
1296
+ throw new MastraError({
1297
+ id: "MASTRA_STORAGE_MSSQL_STORE_TYPE_NOT_SUPPORTED",
1298
+ domain: ErrorDomain.STORAGE,
1299
+ category: ErrorCategory.THIRD_PARTY
1300
+ });
1301
+ }
1302
+ }
1303
+ constructor({ pool, schemaName }) {
1304
+ super();
1305
+ this.pool = pool;
1306
+ this.schemaName = schemaName;
1307
+ }
1308
+ async hasColumn(table, column) {
1309
+ const schema = this.schemaName || "dbo";
1310
+ const request = this.pool.request();
1311
+ request.input("schema", schema);
1312
+ request.input("table", table);
1313
+ request.input("column", column);
1314
+ request.input("columnLower", column.toLowerCase());
1315
+ const result = await request.query(
1316
+ `SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @table AND (COLUMN_NAME = @column OR COLUMN_NAME = @columnLower)`
1317
+ );
1318
+ return result.recordset.length > 0;
1319
+ }
1320
+ async setupSchema() {
1321
+ if (!this.schemaName || this.schemaSetupComplete) {
1322
+ return;
1323
+ }
1324
+ if (!this.setupSchemaPromise) {
1325
+ this.setupSchemaPromise = (async () => {
1326
+ try {
1327
+ const checkRequest = this.pool.request();
1328
+ checkRequest.input("schemaName", this.schemaName);
1329
+ const checkResult = await checkRequest.query(`
1330
+ SELECT 1 AS found FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = @schemaName
1331
+ `);
1332
+ const schemaExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
1333
+ if (!schemaExists) {
1334
+ try {
1335
+ await this.pool.request().query(`CREATE SCHEMA [${this.schemaName}]`);
1336
+ this.logger?.info?.(`Schema "${this.schemaName}" created successfully`);
1337
+ } catch (error) {
1338
+ this.logger?.error?.(`Failed to create schema "${this.schemaName}"`, { error });
1339
+ throw new Error(
1340
+ `Unable to create schema "${this.schemaName}". This requires CREATE privilege on the database. Either create the schema manually or grant CREATE privilege to the user.`
1341
+ );
1342
+ }
1343
+ }
1344
+ this.schemaSetupComplete = true;
1345
+ this.logger?.debug?.(`Schema "${this.schemaName}" is ready for use`);
1346
+ } catch (error) {
1347
+ this.schemaSetupComplete = void 0;
1348
+ this.setupSchemaPromise = null;
1349
+ throw error;
1350
+ } finally {
1351
+ this.setupSchemaPromise = null;
1352
+ }
1353
+ })();
1354
+ }
1355
+ await this.setupSchemaPromise;
1356
+ }
1357
+ async insert({
1358
+ tableName,
1359
+ record,
1360
+ transaction
1361
+ }) {
1362
+ try {
1363
+ const columns = Object.keys(record);
1364
+ const parsedColumns = columns.map((col) => parseSqlIdentifier(col, "column name"));
1365
+ const paramNames = columns.map((_, i) => `@param${i}`);
1366
+ const insertSql = `INSERT INTO ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} (${parsedColumns.map((c) => `[${c}]`).join(", ")}) VALUES (${paramNames.join(", ")})`;
1367
+ const request = transaction ? transaction.request() : this.pool.request();
1368
+ columns.forEach((col, i) => {
1369
+ const value = record[col];
1370
+ const preparedValue = this.prepareValue(value, col, tableName);
1371
+ if (preparedValue instanceof Date) {
1372
+ request.input(`param${i}`, sql2.DateTime2, preparedValue);
1373
+ } else if (preparedValue === null || preparedValue === void 0) {
1374
+ request.input(`param${i}`, this.getMssqlType(tableName, col), null);
1375
+ } else {
1376
+ request.input(`param${i}`, preparedValue);
1377
+ }
1378
+ });
1379
+ await request.query(insertSql);
1163
1380
  } catch (error) {
1164
1381
  throw new MastraError(
1165
1382
  {
@@ -1180,7 +1397,7 @@ var StoreOperationsMSSQL = class extends StoreOperations {
1180
1397
  try {
1181
1398
  await this.pool.request().query(`TRUNCATE TABLE ${fullTableName}`);
1182
1399
  } catch (truncateError) {
1183
- if (truncateError.message && truncateError.message.includes("foreign key")) {
1400
+ if (truncateError?.number === 4712) {
1184
1401
  await this.pool.request().query(`DELETE FROM ${fullTableName}`);
1185
1402
  } else {
1186
1403
  throw truncateError;
@@ -1203,9 +1420,11 @@ var StoreOperationsMSSQL = class extends StoreOperations {
1203
1420
  getDefaultValue(type) {
1204
1421
  switch (type) {
1205
1422
  case "timestamp":
1206
- return "DEFAULT SYSDATETIMEOFFSET()";
1423
+ return "DEFAULT SYSUTCDATETIME()";
1207
1424
  case "jsonb":
1208
1425
  return "DEFAULT N'{}'";
1426
+ case "boolean":
1427
+ return "DEFAULT 0";
1209
1428
  default:
1210
1429
  return super.getDefaultValue(type);
1211
1430
  }
@@ -1216,13 +1435,29 @@ var StoreOperationsMSSQL = class extends StoreOperations {
1216
1435
  }) {
1217
1436
  try {
1218
1437
  const uniqueConstraintColumns = tableName === TABLE_WORKFLOW_SNAPSHOT ? ["workflow_name", "run_id"] : [];
1438
+ const largeDataColumns = [
1439
+ "workingMemory",
1440
+ "snapshot",
1441
+ "metadata",
1442
+ "content",
1443
+ // messages.content - can be very long conversation content
1444
+ "input",
1445
+ // evals.input - test input data
1446
+ "output",
1447
+ // evals.output - test output data
1448
+ "instructions",
1449
+ // evals.instructions - evaluation instructions
1450
+ "other"
1451
+ // traces.other - additional trace data
1452
+ ];
1219
1453
  const columns = Object.entries(schema).map(([name, def]) => {
1220
1454
  const parsedName = parseSqlIdentifier(name, "column name");
1221
1455
  const constraints = [];
1222
1456
  if (def.primaryKey) constraints.push("PRIMARY KEY");
1223
1457
  if (!def.nullable) constraints.push("NOT NULL");
1224
1458
  const isIndexed = !!def.primaryKey || uniqueConstraintColumns.includes(name);
1225
- return `[${parsedName}] ${this.getSqlType(def.type, isIndexed)} ${constraints.join(" ")}`.trim();
1459
+ const useLargeStorage = largeDataColumns.includes(name);
1460
+ return `[${parsedName}] ${this.getSqlType(def.type, isIndexed, useLargeStorage)} ${constraints.join(" ")}`.trim();
1226
1461
  }).join(",\n");
1227
1462
  if (this.schemaName) {
1228
1463
  await this.setupSchema();
@@ -1309,7 +1544,19 @@ ${columns}
1309
1544
  const columnExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
1310
1545
  if (!columnExists) {
1311
1546
  const columnDef = schema[columnName];
1312
- const sqlType = this.getSqlType(columnDef.type);
1547
+ const largeDataColumns = [
1548
+ "workingMemory",
1549
+ "snapshot",
1550
+ "metadata",
1551
+ "content",
1552
+ "input",
1553
+ "output",
1554
+ "instructions",
1555
+ "other"
1556
+ ];
1557
+ const useLargeStorage = largeDataColumns.includes(columnName);
1558
+ const isIndexed = !!columnDef.primaryKey;
1559
+ const sqlType = this.getSqlType(columnDef.type, isIndexed, useLargeStorage);
1313
1560
  const nullable = columnDef.nullable === false ? "NOT NULL" : "";
1314
1561
  const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : "";
1315
1562
  const parsedColumnName = parseSqlIdentifier(columnName, "column name");
@@ -1322,116 +1569,656 @@ ${columns}
1322
1569
  } catch (error) {
1323
1570
  throw new MastraError(
1324
1571
  {
1325
- id: "MASTRA_STORAGE_MSSQL_STORE_ALTER_TABLE_FAILED",
1572
+ id: "MASTRA_STORAGE_MSSQL_STORE_ALTER_TABLE_FAILED",
1573
+ domain: ErrorDomain.STORAGE,
1574
+ category: ErrorCategory.THIRD_PARTY,
1575
+ details: {
1576
+ tableName
1577
+ }
1578
+ },
1579
+ error
1580
+ );
1581
+ }
1582
+ }
1583
+ async load({ tableName, keys }) {
1584
+ try {
1585
+ const keyEntries = Object.entries(keys).map(([key, value]) => [parseSqlIdentifier(key, "column name"), value]);
1586
+ const conditions = keyEntries.map(([key], i) => `[${key}] = @param${i}`).join(" AND ");
1587
+ const sql5 = `SELECT * FROM ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} WHERE ${conditions}`;
1588
+ const request = this.pool.request();
1589
+ keyEntries.forEach(([key, value], i) => {
1590
+ const preparedValue = this.prepareValue(value, key, tableName);
1591
+ if (preparedValue === null || preparedValue === void 0) {
1592
+ request.input(`param${i}`, this.getMssqlType(tableName, key), null);
1593
+ } else {
1594
+ request.input(`param${i}`, preparedValue);
1595
+ }
1596
+ });
1597
+ const resultSet = await request.query(sql5);
1598
+ const result = resultSet.recordset[0] || null;
1599
+ if (!result) {
1600
+ return null;
1601
+ }
1602
+ if (tableName === TABLE_WORKFLOW_SNAPSHOT) {
1603
+ const snapshot = result;
1604
+ if (typeof snapshot.snapshot === "string") {
1605
+ snapshot.snapshot = JSON.parse(snapshot.snapshot);
1606
+ }
1607
+ return snapshot;
1608
+ }
1609
+ return result;
1610
+ } catch (error) {
1611
+ throw new MastraError(
1612
+ {
1613
+ id: "MASTRA_STORAGE_MSSQL_STORE_LOAD_FAILED",
1614
+ domain: ErrorDomain.STORAGE,
1615
+ category: ErrorCategory.THIRD_PARTY,
1616
+ details: {
1617
+ tableName
1618
+ }
1619
+ },
1620
+ error
1621
+ );
1622
+ }
1623
+ }
1624
+ async batchInsert({ tableName, records }) {
1625
+ const transaction = this.pool.transaction();
1626
+ try {
1627
+ await transaction.begin();
1628
+ for (const record of records) {
1629
+ await this.insert({ tableName, record, transaction });
1630
+ }
1631
+ await transaction.commit();
1632
+ } catch (error) {
1633
+ await transaction.rollback();
1634
+ throw new MastraError(
1635
+ {
1636
+ id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_INSERT_FAILED",
1637
+ domain: ErrorDomain.STORAGE,
1638
+ category: ErrorCategory.THIRD_PARTY,
1639
+ details: {
1640
+ tableName,
1641
+ numberOfRecords: records.length
1642
+ }
1643
+ },
1644
+ error
1645
+ );
1646
+ }
1647
+ }
1648
+ async dropTable({ tableName }) {
1649
+ try {
1650
+ const tableNameWithSchema = getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) });
1651
+ await this.pool.request().query(`DROP TABLE IF EXISTS ${tableNameWithSchema}`);
1652
+ } catch (error) {
1653
+ throw new MastraError(
1654
+ {
1655
+ id: "MASTRA_STORAGE_MSSQL_STORE_DROP_TABLE_FAILED",
1656
+ domain: ErrorDomain.STORAGE,
1657
+ category: ErrorCategory.THIRD_PARTY,
1658
+ details: {
1659
+ tableName
1660
+ }
1661
+ },
1662
+ error
1663
+ );
1664
+ }
1665
+ }
1666
+ /**
1667
+ * Prepares a value for database operations, handling Date objects and JSON serialization
1668
+ */
1669
+ prepareValue(value, columnName, tableName) {
1670
+ if (value === null || value === void 0) {
1671
+ return value;
1672
+ }
1673
+ if (value instanceof Date) {
1674
+ return value;
1675
+ }
1676
+ const schema = TABLE_SCHEMAS[tableName];
1677
+ const columnSchema = schema?.[columnName];
1678
+ if (columnSchema?.type === "boolean") {
1679
+ return value ? 1 : 0;
1680
+ }
1681
+ if (columnSchema?.type === "jsonb") {
1682
+ return JSON.stringify(value);
1683
+ }
1684
+ if (typeof value === "object") {
1685
+ return JSON.stringify(value);
1686
+ }
1687
+ return value;
1688
+ }
1689
+ /**
1690
+ * Maps TABLE_SCHEMAS types to mssql param types (used when value is null)
1691
+ */
1692
+ getMssqlType(tableName, columnName) {
1693
+ const col = TABLE_SCHEMAS[tableName]?.[columnName];
1694
+ switch (col?.type) {
1695
+ case "text":
1696
+ return sql2.NVarChar;
1697
+ case "timestamp":
1698
+ return sql2.DateTime2;
1699
+ case "uuid":
1700
+ return sql2.UniqueIdentifier;
1701
+ case "jsonb":
1702
+ return sql2.NVarChar;
1703
+ case "integer":
1704
+ return sql2.Int;
1705
+ case "bigint":
1706
+ return sql2.BigInt;
1707
+ case "float":
1708
+ return sql2.Float;
1709
+ case "boolean":
1710
+ return sql2.Bit;
1711
+ default:
1712
+ return sql2.NVarChar;
1713
+ }
1714
+ }
1715
+ /**
1716
+ * Update a single record in the database
1717
+ */
1718
+ async update({
1719
+ tableName,
1720
+ keys,
1721
+ data,
1722
+ transaction
1723
+ }) {
1724
+ try {
1725
+ if (!data || Object.keys(data).length === 0) {
1726
+ throw new MastraError({
1727
+ id: "MASTRA_STORAGE_MSSQL_UPDATE_EMPTY_DATA",
1728
+ domain: ErrorDomain.STORAGE,
1729
+ category: ErrorCategory.USER,
1730
+ text: "Cannot update with empty data payload"
1731
+ });
1732
+ }
1733
+ if (!keys || Object.keys(keys).length === 0) {
1734
+ throw new MastraError({
1735
+ id: "MASTRA_STORAGE_MSSQL_UPDATE_EMPTY_KEYS",
1736
+ domain: ErrorDomain.STORAGE,
1737
+ category: ErrorCategory.USER,
1738
+ text: "Cannot update without keys to identify records"
1739
+ });
1740
+ }
1741
+ const setClauses = [];
1742
+ const request = transaction ? transaction.request() : this.pool.request();
1743
+ let paramIndex = 0;
1744
+ Object.entries(data).forEach(([key, value]) => {
1745
+ const parsedKey = parseSqlIdentifier(key, "column name");
1746
+ const paramName = `set${paramIndex++}`;
1747
+ setClauses.push(`[${parsedKey}] = @${paramName}`);
1748
+ const preparedValue = this.prepareValue(value, key, tableName);
1749
+ if (preparedValue === null || preparedValue === void 0) {
1750
+ request.input(paramName, this.getMssqlType(tableName, key), null);
1751
+ } else {
1752
+ request.input(paramName, preparedValue);
1753
+ }
1754
+ });
1755
+ const whereConditions = [];
1756
+ Object.entries(keys).forEach(([key, value]) => {
1757
+ const parsedKey = parseSqlIdentifier(key, "column name");
1758
+ const paramName = `where${paramIndex++}`;
1759
+ whereConditions.push(`[${parsedKey}] = @${paramName}`);
1760
+ const preparedValue = this.prepareValue(value, key, tableName);
1761
+ if (preparedValue === null || preparedValue === void 0) {
1762
+ request.input(paramName, this.getMssqlType(tableName, key), null);
1763
+ } else {
1764
+ request.input(paramName, preparedValue);
1765
+ }
1766
+ });
1767
+ const tableName_ = getTableName({
1768
+ indexName: tableName,
1769
+ schemaName: getSchemaName(this.schemaName)
1770
+ });
1771
+ const updateSql = `UPDATE ${tableName_} SET ${setClauses.join(", ")} WHERE ${whereConditions.join(" AND ")}`;
1772
+ await request.query(updateSql);
1773
+ } catch (error) {
1774
+ throw new MastraError(
1775
+ {
1776
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_FAILED",
1777
+ domain: ErrorDomain.STORAGE,
1778
+ category: ErrorCategory.THIRD_PARTY,
1779
+ details: {
1780
+ tableName
1781
+ }
1782
+ },
1783
+ error
1784
+ );
1785
+ }
1786
+ }
1787
+ /**
1788
+ * Update multiple records in a single batch transaction
1789
+ */
1790
+ async batchUpdate({
1791
+ tableName,
1792
+ updates
1793
+ }) {
1794
+ const transaction = this.pool.transaction();
1795
+ try {
1796
+ await transaction.begin();
1797
+ for (const { keys, data } of updates) {
1798
+ await this.update({ tableName, keys, data, transaction });
1799
+ }
1800
+ await transaction.commit();
1801
+ } catch (error) {
1802
+ await transaction.rollback();
1803
+ throw new MastraError(
1804
+ {
1805
+ id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_UPDATE_FAILED",
1806
+ domain: ErrorDomain.STORAGE,
1807
+ category: ErrorCategory.THIRD_PARTY,
1808
+ details: {
1809
+ tableName,
1810
+ numberOfRecords: updates.length
1811
+ }
1812
+ },
1813
+ error
1814
+ );
1815
+ }
1816
+ }
1817
+ /**
1818
+ * Delete multiple records by keys
1819
+ */
1820
+ async batchDelete({ tableName, keys }) {
1821
+ if (keys.length === 0) {
1822
+ return;
1823
+ }
1824
+ const tableName_ = getTableName({
1825
+ indexName: tableName,
1826
+ schemaName: getSchemaName(this.schemaName)
1827
+ });
1828
+ const transaction = this.pool.transaction();
1829
+ try {
1830
+ await transaction.begin();
1831
+ for (const keySet of keys) {
1832
+ const conditions = [];
1833
+ const request = transaction.request();
1834
+ let paramIndex = 0;
1835
+ Object.entries(keySet).forEach(([key, value]) => {
1836
+ const parsedKey = parseSqlIdentifier(key, "column name");
1837
+ const paramName = `p${paramIndex++}`;
1838
+ conditions.push(`[${parsedKey}] = @${paramName}`);
1839
+ const preparedValue = this.prepareValue(value, key, tableName);
1840
+ if (preparedValue === null || preparedValue === void 0) {
1841
+ request.input(paramName, this.getMssqlType(tableName, key), null);
1842
+ } else {
1843
+ request.input(paramName, preparedValue);
1844
+ }
1845
+ });
1846
+ const deleteSql = `DELETE FROM ${tableName_} WHERE ${conditions.join(" AND ")}`;
1847
+ await request.query(deleteSql);
1848
+ }
1849
+ await transaction.commit();
1850
+ } catch (error) {
1851
+ await transaction.rollback();
1852
+ throw new MastraError(
1853
+ {
1854
+ id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_DELETE_FAILED",
1855
+ domain: ErrorDomain.STORAGE,
1856
+ category: ErrorCategory.THIRD_PARTY,
1857
+ details: {
1858
+ tableName,
1859
+ numberOfRecords: keys.length
1860
+ }
1861
+ },
1862
+ error
1863
+ );
1864
+ }
1865
+ }
1866
+ /**
1867
+ * Create a new index on a table
1868
+ */
1869
+ async createIndex(options) {
1870
+ try {
1871
+ const { name, table, columns, unique = false, where } = options;
1872
+ const schemaName = this.schemaName || "dbo";
1873
+ const fullTableName = getTableName({
1874
+ indexName: table,
1875
+ schemaName: getSchemaName(this.schemaName)
1876
+ });
1877
+ const indexNameSafe = parseSqlIdentifier(name, "index name");
1878
+ const checkRequest = this.pool.request();
1879
+ checkRequest.input("indexName", indexNameSafe);
1880
+ checkRequest.input("schemaName", schemaName);
1881
+ checkRequest.input("tableName", table);
1882
+ const indexExists = await checkRequest.query(`
1883
+ SELECT 1 as found
1884
+ FROM sys.indexes i
1885
+ INNER JOIN sys.tables t ON i.object_id = t.object_id
1886
+ INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
1887
+ WHERE i.name = @indexName
1888
+ AND s.name = @schemaName
1889
+ AND t.name = @tableName
1890
+ `);
1891
+ if (indexExists.recordset && indexExists.recordset.length > 0) {
1892
+ return;
1893
+ }
1894
+ const uniqueStr = unique ? "UNIQUE " : "";
1895
+ const columnsStr = columns.map((col) => {
1896
+ if (col.includes(" DESC") || col.includes(" ASC")) {
1897
+ const [colName, ...modifiers] = col.split(" ");
1898
+ if (!colName) {
1899
+ throw new Error(`Invalid column specification: ${col}`);
1900
+ }
1901
+ return `[${parseSqlIdentifier(colName, "column name")}] ${modifiers.join(" ")}`;
1902
+ }
1903
+ return `[${parseSqlIdentifier(col, "column name")}]`;
1904
+ }).join(", ");
1905
+ const whereStr = where ? ` WHERE ${where}` : "";
1906
+ const createIndexSql = `CREATE ${uniqueStr}INDEX [${indexNameSafe}] ON ${fullTableName} (${columnsStr})${whereStr}`;
1907
+ await this.pool.request().query(createIndexSql);
1908
+ } catch (error) {
1909
+ throw new MastraError(
1910
+ {
1911
+ id: "MASTRA_STORAGE_MSSQL_INDEX_CREATE_FAILED",
1912
+ domain: ErrorDomain.STORAGE,
1913
+ category: ErrorCategory.THIRD_PARTY,
1914
+ details: {
1915
+ indexName: options.name,
1916
+ tableName: options.table
1917
+ }
1918
+ },
1919
+ error
1920
+ );
1921
+ }
1922
+ }
1923
+ /**
1924
+ * Drop an existing index
1925
+ */
1926
+ async dropIndex(indexName) {
1927
+ try {
1928
+ const schemaName = this.schemaName || "dbo";
1929
+ const indexNameSafe = parseSqlIdentifier(indexName, "index name");
1930
+ const checkRequest = this.pool.request();
1931
+ checkRequest.input("indexName", indexNameSafe);
1932
+ checkRequest.input("schemaName", schemaName);
1933
+ const result = await checkRequest.query(`
1934
+ SELECT t.name as table_name
1935
+ FROM sys.indexes i
1936
+ INNER JOIN sys.tables t ON i.object_id = t.object_id
1937
+ INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
1938
+ WHERE i.name = @indexName
1939
+ AND s.name = @schemaName
1940
+ `);
1941
+ if (!result.recordset || result.recordset.length === 0) {
1942
+ return;
1943
+ }
1944
+ if (result.recordset.length > 1) {
1945
+ const tables = result.recordset.map((r) => r.table_name).join(", ");
1946
+ throw new MastraError({
1947
+ id: "MASTRA_STORAGE_MSSQL_INDEX_AMBIGUOUS",
1948
+ domain: ErrorDomain.STORAGE,
1949
+ category: ErrorCategory.USER,
1950
+ text: `Index "${indexNameSafe}" exists on multiple tables (${tables}) in schema "${schemaName}". Please drop indexes manually or ensure unique index names.`
1951
+ });
1952
+ }
1953
+ const tableName = result.recordset[0].table_name;
1954
+ const fullTableName = getTableName({
1955
+ indexName: tableName,
1956
+ schemaName: getSchemaName(this.schemaName)
1957
+ });
1958
+ const dropSql = `DROP INDEX [${indexNameSafe}] ON ${fullTableName}`;
1959
+ await this.pool.request().query(dropSql);
1960
+ } catch (error) {
1961
+ throw new MastraError(
1962
+ {
1963
+ id: "MASTRA_STORAGE_MSSQL_INDEX_DROP_FAILED",
1326
1964
  domain: ErrorDomain.STORAGE,
1327
1965
  category: ErrorCategory.THIRD_PARTY,
1328
1966
  details: {
1329
- tableName
1967
+ indexName
1330
1968
  }
1331
1969
  },
1332
1970
  error
1333
1971
  );
1334
1972
  }
1335
1973
  }
1336
- async load({ tableName, keys }) {
1974
+ /**
1975
+ * List indexes for a specific table or all tables
1976
+ */
1977
+ async listIndexes(tableName) {
1337
1978
  try {
1338
- const keyEntries = Object.entries(keys).map(([key, value]) => [parseSqlIdentifier(key, "column name"), value]);
1339
- const conditions = keyEntries.map(([key], i) => `[${key}] = @param${i}`).join(" AND ");
1340
- const values = keyEntries.map(([_, value]) => value);
1341
- const sql7 = `SELECT * FROM ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} WHERE ${conditions}`;
1979
+ const schemaName = this.schemaName || "dbo";
1980
+ let query;
1342
1981
  const request = this.pool.request();
1343
- values.forEach((value, i) => {
1344
- request.input(`param${i}`, value);
1345
- });
1346
- const resultSet = await request.query(sql7);
1347
- const result = resultSet.recordset[0] || null;
1348
- if (!result) {
1349
- return null;
1982
+ request.input("schemaName", schemaName);
1983
+ if (tableName) {
1984
+ query = `
1985
+ SELECT
1986
+ i.name as name,
1987
+ o.name as [table],
1988
+ i.is_unique as is_unique,
1989
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size
1990
+ FROM sys.indexes i
1991
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
1992
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
1993
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
1994
+ WHERE sch.name = @schemaName
1995
+ AND o.name = @tableName
1996
+ AND i.name IS NOT NULL
1997
+ GROUP BY i.name, o.name, i.is_unique
1998
+ `;
1999
+ request.input("tableName", tableName);
2000
+ } else {
2001
+ query = `
2002
+ SELECT
2003
+ i.name as name,
2004
+ o.name as [table],
2005
+ i.is_unique as is_unique,
2006
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size
2007
+ FROM sys.indexes i
2008
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2009
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
2010
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
2011
+ WHERE sch.name = @schemaName
2012
+ AND i.name IS NOT NULL
2013
+ GROUP BY i.name, o.name, i.is_unique
2014
+ `;
1350
2015
  }
1351
- if (tableName === TABLE_WORKFLOW_SNAPSHOT) {
1352
- const snapshot = result;
1353
- if (typeof snapshot.snapshot === "string") {
1354
- snapshot.snapshot = JSON.parse(snapshot.snapshot);
1355
- }
1356
- return snapshot;
2016
+ const result = await request.query(query);
2017
+ const indexes = [];
2018
+ for (const row of result.recordset) {
2019
+ const colRequest = this.pool.request();
2020
+ colRequest.input("indexName", row.name);
2021
+ colRequest.input("schemaName", schemaName);
2022
+ const colResult = await colRequest.query(`
2023
+ SELECT c.name as column_name
2024
+ FROM sys.indexes i
2025
+ INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
2026
+ INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
2027
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2028
+ INNER JOIN sys.schemas s ON o.schema_id = s.schema_id
2029
+ WHERE i.name = @indexName
2030
+ AND s.name = @schemaName
2031
+ ORDER BY ic.key_ordinal
2032
+ `);
2033
+ indexes.push({
2034
+ name: row.name,
2035
+ table: row.table,
2036
+ columns: colResult.recordset.map((c) => c.column_name),
2037
+ unique: row.is_unique || false,
2038
+ size: row.size || "0 MB",
2039
+ definition: ""
2040
+ // MSSQL doesn't store definition like PG
2041
+ });
1357
2042
  }
1358
- return result;
2043
+ return indexes;
1359
2044
  } catch (error) {
1360
2045
  throw new MastraError(
1361
2046
  {
1362
- id: "MASTRA_STORAGE_MSSQL_STORE_LOAD_FAILED",
2047
+ id: "MASTRA_STORAGE_MSSQL_INDEX_LIST_FAILED",
1363
2048
  domain: ErrorDomain.STORAGE,
1364
2049
  category: ErrorCategory.THIRD_PARTY,
1365
- details: {
2050
+ details: tableName ? {
1366
2051
  tableName
1367
- }
2052
+ } : {}
1368
2053
  },
1369
2054
  error
1370
2055
  );
1371
2056
  }
1372
2057
  }
1373
- async batchInsert({ tableName, records }) {
1374
- const transaction = this.pool.transaction();
2058
+ /**
2059
+ * Get detailed statistics for a specific index
2060
+ */
2061
+ async describeIndex(indexName) {
1375
2062
  try {
1376
- await transaction.begin();
1377
- for (const record of records) {
1378
- await this.insert({ tableName, record });
2063
+ const schemaName = this.schemaName || "dbo";
2064
+ const request = this.pool.request();
2065
+ request.input("indexName", indexName);
2066
+ request.input("schemaName", schemaName);
2067
+ const query = `
2068
+ SELECT
2069
+ i.name as name,
2070
+ o.name as [table],
2071
+ i.is_unique as is_unique,
2072
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size,
2073
+ i.type_desc as method,
2074
+ ISNULL(us.user_scans, 0) as scans,
2075
+ ISNULL(us.user_seeks + us.user_scans, 0) as tuples_read,
2076
+ ISNULL(us.user_lookups, 0) as tuples_fetched
2077
+ FROM sys.indexes i
2078
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2079
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
2080
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
2081
+ LEFT JOIN sys.dm_db_index_usage_stats us ON i.object_id = us.object_id AND i.index_id = us.index_id
2082
+ WHERE i.name = @indexName
2083
+ AND sch.name = @schemaName
2084
+ GROUP BY i.name, o.name, i.is_unique, i.type_desc, us.user_seeks, us.user_scans, us.user_lookups
2085
+ `;
2086
+ const result = await request.query(query);
2087
+ if (!result.recordset || result.recordset.length === 0) {
2088
+ throw new Error(`Index "${indexName}" not found in schema "${schemaName}"`);
1379
2089
  }
1380
- await transaction.commit();
2090
+ const row = result.recordset[0];
2091
+ const colRequest = this.pool.request();
2092
+ colRequest.input("indexName", indexName);
2093
+ colRequest.input("schemaName", schemaName);
2094
+ const colResult = await colRequest.query(`
2095
+ SELECT c.name as column_name
2096
+ FROM sys.indexes i
2097
+ INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
2098
+ INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
2099
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2100
+ INNER JOIN sys.schemas s ON o.schema_id = s.schema_id
2101
+ WHERE i.name = @indexName
2102
+ AND s.name = @schemaName
2103
+ ORDER BY ic.key_ordinal
2104
+ `);
2105
+ return {
2106
+ name: row.name,
2107
+ table: row.table,
2108
+ columns: colResult.recordset.map((c) => c.column_name),
2109
+ unique: row.is_unique || false,
2110
+ size: row.size || "0 MB",
2111
+ definition: "",
2112
+ method: row.method?.toLowerCase() || "nonclustered",
2113
+ scans: Number(row.scans) || 0,
2114
+ tuples_read: Number(row.tuples_read) || 0,
2115
+ tuples_fetched: Number(row.tuples_fetched) || 0
2116
+ };
1381
2117
  } catch (error) {
1382
- await transaction.rollback();
1383
2118
  throw new MastraError(
1384
2119
  {
1385
- id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_INSERT_FAILED",
2120
+ id: "MASTRA_STORAGE_MSSQL_INDEX_DESCRIBE_FAILED",
1386
2121
  domain: ErrorDomain.STORAGE,
1387
2122
  category: ErrorCategory.THIRD_PARTY,
1388
2123
  details: {
1389
- tableName,
1390
- numberOfRecords: records.length
2124
+ indexName
1391
2125
  }
1392
2126
  },
1393
2127
  error
1394
2128
  );
1395
2129
  }
1396
2130
  }
1397
- async dropTable({ tableName }) {
2131
+ /**
2132
+ * Returns definitions for automatic performance indexes
2133
+ * IMPORTANT: Uses seq_id DESC instead of createdAt DESC for MSSQL due to millisecond accuracy limitations
2134
+ * NOTE: Using NVARCHAR(400) for text columns (800 bytes) leaves room for composite indexes
2135
+ */
2136
+ getAutomaticIndexDefinitions() {
2137
+ const schemaPrefix = this.schemaName ? `${this.schemaName}_` : "";
2138
+ return [
2139
+ // Composite indexes for optimal filtering + sorting performance
2140
+ // NVARCHAR(400) = 800 bytes, plus BIGINT (8 bytes) = 808 bytes total (under 900-byte limit)
2141
+ {
2142
+ name: `${schemaPrefix}mastra_threads_resourceid_seqid_idx`,
2143
+ table: TABLE_THREADS,
2144
+ columns: ["resourceId", "seq_id DESC"]
2145
+ },
2146
+ {
2147
+ name: `${schemaPrefix}mastra_messages_thread_id_seqid_idx`,
2148
+ table: TABLE_MESSAGES,
2149
+ columns: ["thread_id", "seq_id DESC"]
2150
+ },
2151
+ {
2152
+ name: `${schemaPrefix}mastra_traces_name_seqid_idx`,
2153
+ table: TABLE_TRACES,
2154
+ columns: ["name", "seq_id DESC"]
2155
+ },
2156
+ {
2157
+ name: `${schemaPrefix}mastra_scores_trace_id_span_id_seqid_idx`,
2158
+ table: TABLE_SCORERS,
2159
+ columns: ["traceId", "spanId", "seq_id DESC"]
2160
+ },
2161
+ // Spans indexes for optimal trace querying
2162
+ {
2163
+ name: `${schemaPrefix}mastra_ai_spans_traceid_startedat_idx`,
2164
+ table: TABLE_AI_SPANS,
2165
+ columns: ["traceId", "startedAt DESC"]
2166
+ },
2167
+ {
2168
+ name: `${schemaPrefix}mastra_ai_spans_parentspanid_startedat_idx`,
2169
+ table: TABLE_AI_SPANS,
2170
+ columns: ["parentSpanId", "startedAt DESC"]
2171
+ },
2172
+ {
2173
+ name: `${schemaPrefix}mastra_ai_spans_name_idx`,
2174
+ table: TABLE_AI_SPANS,
2175
+ columns: ["name"]
2176
+ },
2177
+ {
2178
+ name: `${schemaPrefix}mastra_ai_spans_spantype_startedat_idx`,
2179
+ table: TABLE_AI_SPANS,
2180
+ columns: ["spanType", "startedAt DESC"]
2181
+ }
2182
+ ];
2183
+ }
2184
+ /**
2185
+ * Creates automatic indexes for optimal query performance
2186
+ * Uses getAutomaticIndexDefinitions() to determine which indexes to create
2187
+ */
2188
+ async createAutomaticIndexes() {
1398
2189
  try {
1399
- const tableNameWithSchema = getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) });
1400
- await this.pool.request().query(`DROP TABLE IF EXISTS ${tableNameWithSchema}`);
2190
+ const indexes = this.getAutomaticIndexDefinitions();
2191
+ for (const indexOptions of indexes) {
2192
+ try {
2193
+ await this.createIndex(indexOptions);
2194
+ } catch (error) {
2195
+ this.logger?.warn?.(`Failed to create index ${indexOptions.name}:`, error);
2196
+ }
2197
+ }
1401
2198
  } catch (error) {
1402
2199
  throw new MastraError(
1403
2200
  {
1404
- id: "MASTRA_STORAGE_MSSQL_STORE_DROP_TABLE_FAILED",
2201
+ id: "MASTRA_STORAGE_MSSQL_STORE_CREATE_PERFORMANCE_INDEXES_FAILED",
1405
2202
  domain: ErrorDomain.STORAGE,
1406
- category: ErrorCategory.THIRD_PARTY,
1407
- details: {
1408
- tableName
1409
- }
2203
+ category: ErrorCategory.THIRD_PARTY
1410
2204
  },
1411
2205
  error
1412
2206
  );
1413
2207
  }
1414
2208
  }
1415
2209
  };
1416
- function parseJSON(jsonString) {
1417
- try {
1418
- return JSON.parse(jsonString);
1419
- } catch {
1420
- return jsonString;
1421
- }
1422
- }
1423
2210
  function transformScoreRow(row) {
1424
2211
  return {
1425
2212
  ...row,
1426
- input: parseJSON(row.input),
1427
- scorer: parseJSON(row.scorer),
1428
- preprocessStepResult: parseJSON(row.preprocessStepResult),
1429
- analyzeStepResult: parseJSON(row.analyzeStepResult),
1430
- metadata: parseJSON(row.metadata),
1431
- output: parseJSON(row.output),
1432
- additionalContext: parseJSON(row.additionalContext),
1433
- runtimeContext: parseJSON(row.runtimeContext),
1434
- entity: parseJSON(row.entity),
2213
+ input: safelyParseJSON(row.input),
2214
+ scorer: safelyParseJSON(row.scorer),
2215
+ preprocessStepResult: safelyParseJSON(row.preprocessStepResult),
2216
+ analyzeStepResult: safelyParseJSON(row.analyzeStepResult),
2217
+ metadata: safelyParseJSON(row.metadata),
2218
+ output: safelyParseJSON(row.output),
2219
+ additionalContext: safelyParseJSON(row.additionalContext),
2220
+ requestContext: safelyParseJSON(row.requestContext),
2221
+ entity: safelyParseJSON(row.entity),
1435
2222
  createdAt: row.createdAt,
1436
2223
  updatedAt: row.updatedAt
1437
2224
  };
@@ -1488,7 +2275,7 @@ var ScoresMSSQL = class extends ScoresStorage {
1488
2275
  );
1489
2276
  }
1490
2277
  try {
1491
- const scoreId = crypto.randomUUID();
2278
+ const scoreId = randomUUID();
1492
2279
  const {
1493
2280
  scorer,
1494
2281
  preprocessStepResult,
@@ -1497,7 +2284,7 @@ var ScoresMSSQL = class extends ScoresStorage {
1497
2284
  input,
1498
2285
  output,
1499
2286
  additionalContext,
1500
- runtimeContext,
2287
+ requestContext,
1501
2288
  entity,
1502
2289
  ...rest
1503
2290
  } = validatedScore;
@@ -1506,15 +2293,15 @@ var ScoresMSSQL = class extends ScoresStorage {
1506
2293
  record: {
1507
2294
  id: scoreId,
1508
2295
  ...rest,
1509
- input: JSON.stringify(input) || "",
1510
- output: JSON.stringify(output) || "",
1511
- preprocessStepResult: preprocessStepResult ? JSON.stringify(preprocessStepResult) : null,
1512
- analyzeStepResult: analyzeStepResult ? JSON.stringify(analyzeStepResult) : null,
1513
- metadata: metadata ? JSON.stringify(metadata) : null,
1514
- additionalContext: additionalContext ? JSON.stringify(additionalContext) : null,
1515
- runtimeContext: runtimeContext ? JSON.stringify(runtimeContext) : null,
1516
- entity: entity ? JSON.stringify(entity) : null,
1517
- scorer: scorer ? JSON.stringify(scorer) : null,
2296
+ input: input || "",
2297
+ output: output || "",
2298
+ preprocessStepResult: preprocessStepResult || null,
2299
+ analyzeStepResult: analyzeStepResult || null,
2300
+ metadata: metadata || null,
2301
+ additionalContext: additionalContext || null,
2302
+ requestContext: requestContext || null,
2303
+ entity: entity || null,
2304
+ scorer: scorer || null,
1518
2305
  createdAt: (/* @__PURE__ */ new Date()).toISOString(),
1519
2306
  updatedAt: (/* @__PURE__ */ new Date()).toISOString()
1520
2307
  }
@@ -1532,41 +2319,70 @@ var ScoresMSSQL = class extends ScoresStorage {
1532
2319
  );
1533
2320
  }
1534
2321
  }
1535
- async getScoresByScorerId({
2322
+ async listScoresByScorerId({
1536
2323
  scorerId,
1537
- pagination
2324
+ pagination,
2325
+ entityId,
2326
+ entityType,
2327
+ source
1538
2328
  }) {
1539
2329
  try {
1540
- const request = this.pool.request();
1541
- request.input("p1", scorerId);
1542
- const totalResult = await request.query(
1543
- `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [scorerId] = @p1`
1544
- );
2330
+ const conditions = ["[scorerId] = @p1"];
2331
+ const params = { p1: scorerId };
2332
+ let paramIndex = 2;
2333
+ if (entityId) {
2334
+ conditions.push(`[entityId] = @p${paramIndex}`);
2335
+ params[`p${paramIndex}`] = entityId;
2336
+ paramIndex++;
2337
+ }
2338
+ if (entityType) {
2339
+ conditions.push(`[entityType] = @p${paramIndex}`);
2340
+ params[`p${paramIndex}`] = entityType;
2341
+ paramIndex++;
2342
+ }
2343
+ if (source) {
2344
+ conditions.push(`[source] = @p${paramIndex}`);
2345
+ params[`p${paramIndex}`] = source;
2346
+ paramIndex++;
2347
+ }
2348
+ const whereClause = conditions.join(" AND ");
2349
+ const tableName = getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) });
2350
+ const countRequest = this.pool.request();
2351
+ Object.entries(params).forEach(([key, value]) => {
2352
+ countRequest.input(key, value);
2353
+ });
2354
+ const totalResult = await countRequest.query(`SELECT COUNT(*) as count FROM ${tableName} WHERE ${whereClause}`);
1545
2355
  const total = totalResult.recordset[0]?.count || 0;
2356
+ const { page, perPage: perPageInput } = pagination;
1546
2357
  if (total === 0) {
1547
2358
  return {
1548
2359
  pagination: {
1549
2360
  total: 0,
1550
- page: pagination.page,
1551
- perPage: pagination.perPage,
2361
+ page,
2362
+ perPage: perPageInput,
1552
2363
  hasMore: false
1553
2364
  },
1554
2365
  scores: []
1555
2366
  };
1556
2367
  }
2368
+ const perPage = normalizePerPage(perPageInput, 100);
2369
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2370
+ const limitValue = perPageInput === false ? total : perPage;
2371
+ const end = perPageInput === false ? total : start + perPage;
1557
2372
  const dataRequest = this.pool.request();
1558
- dataRequest.input("p1", scorerId);
1559
- dataRequest.input("p2", pagination.perPage);
1560
- dataRequest.input("p3", pagination.page * pagination.perPage);
1561
- const result = await dataRequest.query(
1562
- `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [scorerId] = @p1 ORDER BY [createdAt] DESC OFFSET @p3 ROWS FETCH NEXT @p2 ROWS ONLY`
1563
- );
2373
+ Object.entries(params).forEach(([key, value]) => {
2374
+ dataRequest.input(key, value);
2375
+ });
2376
+ dataRequest.input("perPage", limitValue);
2377
+ dataRequest.input("offset", start);
2378
+ const dataQuery = `SELECT * FROM ${tableName} WHERE ${whereClause} ORDER BY [createdAt] DESC OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
2379
+ const result = await dataRequest.query(dataQuery);
1564
2380
  return {
1565
2381
  pagination: {
1566
2382
  total: Number(total),
1567
- page: pagination.page,
1568
- perPage: pagination.perPage,
1569
- hasMore: Number(total) > (pagination.page + 1) * pagination.perPage
2383
+ page,
2384
+ perPage: perPageForResponse,
2385
+ hasMore: end < total
1570
2386
  },
1571
2387
  scores: result.recordset.map((row) => transformScoreRow(row))
1572
2388
  };
@@ -1582,7 +2398,7 @@ var ScoresMSSQL = class extends ScoresStorage {
1582
2398
  );
1583
2399
  }
1584
2400
  }
1585
- async getScoresByRunId({
2401
+ async listScoresByRunId({
1586
2402
  runId,
1587
2403
  pagination
1588
2404
  }) {
@@ -1593,30 +2409,35 @@ var ScoresMSSQL = class extends ScoresStorage {
1593
2409
  `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [runId] = @p1`
1594
2410
  );
1595
2411
  const total = totalResult.recordset[0]?.count || 0;
2412
+ const { page, perPage: perPageInput } = pagination;
1596
2413
  if (total === 0) {
1597
2414
  return {
1598
2415
  pagination: {
1599
2416
  total: 0,
1600
- page: pagination.page,
1601
- perPage: pagination.perPage,
2417
+ page,
2418
+ perPage: perPageInput,
1602
2419
  hasMore: false
1603
2420
  },
1604
2421
  scores: []
1605
2422
  };
1606
2423
  }
2424
+ const perPage = normalizePerPage(perPageInput, 100);
2425
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2426
+ const limitValue = perPageInput === false ? total : perPage;
2427
+ const end = perPageInput === false ? total : start + perPage;
1607
2428
  const dataRequest = this.pool.request();
1608
2429
  dataRequest.input("p1", runId);
1609
- dataRequest.input("p2", pagination.perPage);
1610
- dataRequest.input("p3", pagination.page * pagination.perPage);
2430
+ dataRequest.input("p2", limitValue);
2431
+ dataRequest.input("p3", start);
1611
2432
  const result = await dataRequest.query(
1612
2433
  `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [runId] = @p1 ORDER BY [createdAt] DESC OFFSET @p3 ROWS FETCH NEXT @p2 ROWS ONLY`
1613
2434
  );
1614
2435
  return {
1615
2436
  pagination: {
1616
2437
  total: Number(total),
1617
- page: pagination.page,
1618
- perPage: pagination.perPage,
1619
- hasMore: Number(total) > (pagination.page + 1) * pagination.perPage
2438
+ page,
2439
+ perPage: perPageForResponse,
2440
+ hasMore: end < total
1620
2441
  },
1621
2442
  scores: result.recordset.map((row) => transformScoreRow(row))
1622
2443
  };
@@ -1632,7 +2453,7 @@ var ScoresMSSQL = class extends ScoresStorage {
1632
2453
  );
1633
2454
  }
1634
2455
  }
1635
- async getScoresByEntityId({
2456
+ async listScoresByEntityId({
1636
2457
  entityId,
1637
2458
  entityType,
1638
2459
  pagination
@@ -1645,31 +2466,36 @@ var ScoresMSSQL = class extends ScoresStorage {
1645
2466
  `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [entityId] = @p1 AND [entityType] = @p2`
1646
2467
  );
1647
2468
  const total = totalResult.recordset[0]?.count || 0;
2469
+ const { page, perPage: perPageInput } = pagination;
2470
+ const perPage = normalizePerPage(perPageInput, 100);
2471
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
1648
2472
  if (total === 0) {
1649
2473
  return {
1650
2474
  pagination: {
1651
2475
  total: 0,
1652
- page: pagination.page,
1653
- perPage: pagination.perPage,
2476
+ page,
2477
+ perPage: perPageForResponse,
1654
2478
  hasMore: false
1655
2479
  },
1656
2480
  scores: []
1657
2481
  };
1658
2482
  }
2483
+ const limitValue = perPageInput === false ? total : perPage;
2484
+ const end = perPageInput === false ? total : start + perPage;
1659
2485
  const dataRequest = this.pool.request();
1660
2486
  dataRequest.input("p1", entityId);
1661
2487
  dataRequest.input("p2", entityType);
1662
- dataRequest.input("p3", pagination.perPage);
1663
- dataRequest.input("p4", pagination.page * pagination.perPage);
2488
+ dataRequest.input("p3", limitValue);
2489
+ dataRequest.input("p4", start);
1664
2490
  const result = await dataRequest.query(
1665
2491
  `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [entityId] = @p1 AND [entityType] = @p2 ORDER BY [createdAt] DESC OFFSET @p4 ROWS FETCH NEXT @p3 ROWS ONLY`
1666
2492
  );
1667
2493
  return {
1668
2494
  pagination: {
1669
2495
  total: Number(total),
1670
- page: pagination.page,
1671
- perPage: pagination.perPage,
1672
- hasMore: Number(total) > (pagination.page + 1) * pagination.perPage
2496
+ page,
2497
+ perPage: perPageForResponse,
2498
+ hasMore: end < total
1673
2499
  },
1674
2500
  scores: result.recordset.map((row) => transformScoreRow(row))
1675
2501
  };
@@ -1685,7 +2511,7 @@ var ScoresMSSQL = class extends ScoresStorage {
1685
2511
  );
1686
2512
  }
1687
2513
  }
1688
- async getScoresBySpan({
2514
+ async listScoresBySpan({
1689
2515
  traceId,
1690
2516
  spanId,
1691
2517
  pagination
@@ -1698,34 +2524,38 @@ var ScoresMSSQL = class extends ScoresStorage {
1698
2524
  `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [traceId] = @p1 AND [spanId] = @p2`
1699
2525
  );
1700
2526
  const total = totalResult.recordset[0]?.count || 0;
2527
+ const { page, perPage: perPageInput } = pagination;
2528
+ const perPage = normalizePerPage(perPageInput, 100);
2529
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
1701
2530
  if (total === 0) {
1702
2531
  return {
1703
2532
  pagination: {
1704
2533
  total: 0,
1705
- page: pagination.page,
1706
- perPage: pagination.perPage,
2534
+ page,
2535
+ perPage: perPageForResponse,
1707
2536
  hasMore: false
1708
2537
  },
1709
2538
  scores: []
1710
2539
  };
1711
2540
  }
1712
- const limit = pagination.perPage + 1;
2541
+ const limitValue = perPageInput === false ? total : perPage;
2542
+ const end = perPageInput === false ? total : start + perPage;
1713
2543
  const dataRequest = this.pool.request();
1714
2544
  dataRequest.input("p1", traceId);
1715
2545
  dataRequest.input("p2", spanId);
1716
- dataRequest.input("p3", limit);
1717
- dataRequest.input("p4", pagination.page * pagination.perPage);
2546
+ dataRequest.input("p3", limitValue);
2547
+ dataRequest.input("p4", start);
1718
2548
  const result = await dataRequest.query(
1719
2549
  `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [traceId] = @p1 AND [spanId] = @p2 ORDER BY [createdAt] DESC OFFSET @p4 ROWS FETCH NEXT @p3 ROWS ONLY`
1720
2550
  );
1721
2551
  return {
1722
2552
  pagination: {
1723
2553
  total: Number(total),
1724
- page: pagination.page,
1725
- perPage: pagination.perPage,
1726
- hasMore: result.recordset.length > pagination.perPage
2554
+ page,
2555
+ perPage: perPageForResponse,
2556
+ hasMore: end < total
1727
2557
  },
1728
- scores: result.recordset.slice(0, pagination.perPage).map((row) => transformScoreRow(row))
2558
+ scores: result.recordset.map((row) => transformScoreRow(row))
1729
2559
  };
1730
2560
  } catch (error) {
1731
2561
  throw new MastraError(
@@ -1740,7 +2570,7 @@ var ScoresMSSQL = class extends ScoresStorage {
1740
2570
  }
1741
2571
  }
1742
2572
  };
1743
- var TracesMSSQL = class extends TracesStorage {
2573
+ var WorkflowsMSSQL = class extends WorkflowsStorage {
1744
2574
  pool;
1745
2575
  operations;
1746
2576
  schema;
@@ -1754,207 +2584,164 @@ var TracesMSSQL = class extends TracesStorage {
1754
2584
  this.operations = operations;
1755
2585
  this.schema = schema;
1756
2586
  }
1757
- /** @deprecated use getTracesPaginated instead*/
1758
- async getTraces(args) {
1759
- if (args.fromDate || args.toDate) {
1760
- args.dateRange = {
1761
- start: args.fromDate,
1762
- end: args.toDate
1763
- };
1764
- }
1765
- const result = await this.getTracesPaginated(args);
1766
- return result.traces;
1767
- }
1768
- async getTracesPaginated(args) {
1769
- const { name, scope, page = 0, perPage: perPageInput, attributes, filters, dateRange } = args;
1770
- const fromDate = dateRange?.start;
1771
- const toDate = dateRange?.end;
1772
- const perPage = perPageInput !== void 0 ? perPageInput : 100;
1773
- const currentOffset = page * perPage;
1774
- const paramMap = {};
1775
- const conditions = [];
1776
- let paramIndex = 1;
1777
- if (name) {
1778
- const paramName = `p${paramIndex++}`;
1779
- conditions.push(`[name] LIKE @${paramName}`);
1780
- paramMap[paramName] = `${name}%`;
1781
- }
1782
- if (scope) {
1783
- const paramName = `p${paramIndex++}`;
1784
- conditions.push(`[scope] = @${paramName}`);
1785
- paramMap[paramName] = scope;
1786
- }
1787
- if (attributes) {
1788
- Object.entries(attributes).forEach(([key, value]) => {
1789
- const parsedKey = parseFieldKey(key);
1790
- const paramName = `p${paramIndex++}`;
1791
- conditions.push(`JSON_VALUE([attributes], '$.${parsedKey}') = @${paramName}`);
1792
- paramMap[paramName] = value;
1793
- });
1794
- }
1795
- if (filters) {
1796
- Object.entries(filters).forEach(([key, value]) => {
1797
- const parsedKey = parseFieldKey(key);
1798
- const paramName = `p${paramIndex++}`;
1799
- conditions.push(`[${parsedKey}] = @${paramName}`);
1800
- paramMap[paramName] = value;
1801
- });
1802
- }
1803
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
1804
- const paramName = `p${paramIndex++}`;
1805
- conditions.push(`[createdAt] >= @${paramName}`);
1806
- paramMap[paramName] = fromDate.toISOString();
1807
- }
1808
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
1809
- const paramName = `p${paramIndex++}`;
1810
- conditions.push(`[createdAt] <= @${paramName}`);
1811
- paramMap[paramName] = toDate.toISOString();
2587
+ parseWorkflowRun(row) {
2588
+ let parsedSnapshot = row.snapshot;
2589
+ if (typeof parsedSnapshot === "string") {
2590
+ try {
2591
+ parsedSnapshot = JSON.parse(row.snapshot);
2592
+ } catch (e) {
2593
+ this.logger?.warn?.(`Failed to parse snapshot for workflow ${row.workflow_name}:`, e);
2594
+ }
1812
2595
  }
1813
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1814
- const countQuery = `SELECT COUNT(*) as total FROM ${getTableName({ indexName: TABLE_TRACES, schemaName: getSchemaName(this.schema) })} ${whereClause}`;
1815
- let total = 0;
2596
+ return {
2597
+ workflowName: row.workflow_name,
2598
+ runId: row.run_id,
2599
+ snapshot: parsedSnapshot,
2600
+ createdAt: row.createdAt,
2601
+ updatedAt: row.updatedAt,
2602
+ resourceId: row.resourceId
2603
+ };
2604
+ }
2605
+ async updateWorkflowResults({
2606
+ workflowName,
2607
+ runId,
2608
+ stepId,
2609
+ result,
2610
+ requestContext
2611
+ }) {
2612
+ const table = getTableName({ indexName: TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName(this.schema) });
2613
+ const transaction = this.pool.transaction();
1816
2614
  try {
1817
- const countRequest = this.pool.request();
1818
- Object.entries(paramMap).forEach(([key, value]) => {
1819
- if (value instanceof Date) {
1820
- countRequest.input(key, sql2.DateTime, value);
1821
- } else {
1822
- countRequest.input(key, value);
1823
- }
1824
- });
1825
- const countResult = await countRequest.query(countQuery);
1826
- total = parseInt(countResult.recordset[0].total, 10);
2615
+ await transaction.begin();
2616
+ const selectRequest = new sql2.Request(transaction);
2617
+ selectRequest.input("workflow_name", workflowName);
2618
+ selectRequest.input("run_id", runId);
2619
+ const existingSnapshotResult = await selectRequest.query(
2620
+ `SELECT snapshot FROM ${table} WITH (UPDLOCK, HOLDLOCK) WHERE workflow_name = @workflow_name AND run_id = @run_id`
2621
+ );
2622
+ let snapshot;
2623
+ if (!existingSnapshotResult.recordset || existingSnapshotResult.recordset.length === 0) {
2624
+ snapshot = {
2625
+ context: {},
2626
+ activePaths: [],
2627
+ timestamp: Date.now(),
2628
+ suspendedPaths: {},
2629
+ resumeLabels: {},
2630
+ serializedStepGraph: [],
2631
+ value: {},
2632
+ waitingPaths: {},
2633
+ status: "pending",
2634
+ runId,
2635
+ requestContext: {}
2636
+ };
2637
+ } else {
2638
+ const existingSnapshot = existingSnapshotResult.recordset[0].snapshot;
2639
+ snapshot = typeof existingSnapshot === "string" ? JSON.parse(existingSnapshot) : existingSnapshot;
2640
+ }
2641
+ snapshot.context[stepId] = result;
2642
+ snapshot.requestContext = { ...snapshot.requestContext, ...requestContext };
2643
+ const upsertReq = new sql2.Request(transaction);
2644
+ upsertReq.input("workflow_name", workflowName);
2645
+ upsertReq.input("run_id", runId);
2646
+ upsertReq.input("snapshot", JSON.stringify(snapshot));
2647
+ upsertReq.input("createdAt", sql2.DateTime2, /* @__PURE__ */ new Date());
2648
+ upsertReq.input("updatedAt", sql2.DateTime2, /* @__PURE__ */ new Date());
2649
+ await upsertReq.query(
2650
+ `MERGE ${table} AS target
2651
+ USING (SELECT @workflow_name AS workflow_name, @run_id AS run_id) AS src
2652
+ ON target.workflow_name = src.workflow_name AND target.run_id = src.run_id
2653
+ WHEN MATCHED THEN UPDATE SET snapshot = @snapshot, [updatedAt] = @updatedAt
2654
+ WHEN NOT MATCHED THEN INSERT (workflow_name, run_id, snapshot, [createdAt], [updatedAt])
2655
+ VALUES (@workflow_name, @run_id, @snapshot, @createdAt, @updatedAt);`
2656
+ );
2657
+ await transaction.commit();
2658
+ return snapshot.context;
1827
2659
  } catch (error) {
2660
+ try {
2661
+ await transaction.rollback();
2662
+ } catch {
2663
+ }
1828
2664
  throw new MastraError(
1829
2665
  {
1830
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_TRACES_PAGINATED_FAILED_TO_RETRIEVE_TOTAL_COUNT",
2666
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_WORKFLOW_RESULTS_FAILED",
1831
2667
  domain: ErrorDomain.STORAGE,
1832
2668
  category: ErrorCategory.THIRD_PARTY,
1833
2669
  details: {
1834
- name: args.name ?? "",
1835
- scope: args.scope ?? ""
2670
+ workflowName,
2671
+ runId,
2672
+ stepId
1836
2673
  }
1837
2674
  },
1838
2675
  error
1839
2676
  );
1840
2677
  }
1841
- if (total === 0) {
1842
- return {
1843
- traces: [],
1844
- total: 0,
1845
- page,
1846
- perPage,
1847
- hasMore: false
1848
- };
1849
- }
1850
- const dataQuery = `SELECT * FROM ${getTableName({ indexName: TABLE_TRACES, schemaName: getSchemaName(this.schema) })} ${whereClause} ORDER BY [seq_id] DESC OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
1851
- const dataRequest = this.pool.request();
1852
- Object.entries(paramMap).forEach(([key, value]) => {
1853
- if (value instanceof Date) {
1854
- dataRequest.input(key, sql2.DateTime, value);
1855
- } else {
1856
- dataRequest.input(key, value);
1857
- }
1858
- });
1859
- dataRequest.input("offset", currentOffset);
1860
- dataRequest.input("limit", perPage);
2678
+ }
2679
+ async updateWorkflowState({
2680
+ workflowName,
2681
+ runId,
2682
+ opts
2683
+ }) {
2684
+ const table = getTableName({ indexName: TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName(this.schema) });
2685
+ const transaction = this.pool.transaction();
1861
2686
  try {
1862
- const rowsResult = await dataRequest.query(dataQuery);
1863
- const rows = rowsResult.recordset;
1864
- const traces = rows.map((row) => ({
1865
- id: row.id,
1866
- parentSpanId: row.parentSpanId,
1867
- traceId: row.traceId,
1868
- name: row.name,
1869
- scope: row.scope,
1870
- kind: row.kind,
1871
- status: JSON.parse(row.status),
1872
- events: JSON.parse(row.events),
1873
- links: JSON.parse(row.links),
1874
- attributes: JSON.parse(row.attributes),
1875
- startTime: row.startTime,
1876
- endTime: row.endTime,
1877
- other: row.other,
1878
- createdAt: row.createdAt
1879
- }));
1880
- return {
1881
- traces,
1882
- total,
1883
- page,
1884
- perPage,
1885
- hasMore: currentOffset + traces.length < total
1886
- };
2687
+ await transaction.begin();
2688
+ const selectRequest = new sql2.Request(transaction);
2689
+ selectRequest.input("workflow_name", workflowName);
2690
+ selectRequest.input("run_id", runId);
2691
+ const existingSnapshotResult = await selectRequest.query(
2692
+ `SELECT snapshot FROM ${table} WITH (UPDLOCK, HOLDLOCK) WHERE workflow_name = @workflow_name AND run_id = @run_id`
2693
+ );
2694
+ if (!existingSnapshotResult.recordset || existingSnapshotResult.recordset.length === 0) {
2695
+ await transaction.rollback();
2696
+ return void 0;
2697
+ }
2698
+ const existingSnapshot = existingSnapshotResult.recordset[0].snapshot;
2699
+ const snapshot = typeof existingSnapshot === "string" ? JSON.parse(existingSnapshot) : existingSnapshot;
2700
+ if (!snapshot || !snapshot?.context) {
2701
+ await transaction.rollback();
2702
+ throw new MastraError(
2703
+ {
2704
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_WORKFLOW_STATE_SNAPSHOT_NOT_FOUND",
2705
+ domain: ErrorDomain.STORAGE,
2706
+ category: ErrorCategory.SYSTEM,
2707
+ details: {
2708
+ workflowName,
2709
+ runId
2710
+ }
2711
+ },
2712
+ new Error(`Snapshot not found for runId ${runId}`)
2713
+ );
2714
+ }
2715
+ const updatedSnapshot = { ...snapshot, ...opts };
2716
+ const updateRequest = new sql2.Request(transaction);
2717
+ updateRequest.input("snapshot", JSON.stringify(updatedSnapshot));
2718
+ updateRequest.input("workflow_name", workflowName);
2719
+ updateRequest.input("run_id", runId);
2720
+ updateRequest.input("updatedAt", sql2.DateTime2, /* @__PURE__ */ new Date());
2721
+ await updateRequest.query(
2722
+ `UPDATE ${table} SET snapshot = @snapshot, [updatedAt] = @updatedAt WHERE workflow_name = @workflow_name AND run_id = @run_id`
2723
+ );
2724
+ await transaction.commit();
2725
+ return updatedSnapshot;
1887
2726
  } catch (error) {
2727
+ try {
2728
+ await transaction.rollback();
2729
+ } catch {
2730
+ }
1888
2731
  throw new MastraError(
1889
2732
  {
1890
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_TRACES_PAGINATED_FAILED_TO_RETRIEVE_TRACES",
2733
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_WORKFLOW_STATE_FAILED",
1891
2734
  domain: ErrorDomain.STORAGE,
1892
2735
  category: ErrorCategory.THIRD_PARTY,
1893
2736
  details: {
1894
- name: args.name ?? "",
1895
- scope: args.scope ?? ""
2737
+ workflowName,
2738
+ runId
1896
2739
  }
1897
2740
  },
1898
2741
  error
1899
2742
  );
1900
2743
  }
1901
2744
  }
1902
- async batchTraceInsert({ records }) {
1903
- this.logger.debug("Batch inserting traces", { count: records.length });
1904
- await this.operations.batchInsert({
1905
- tableName: TABLE_TRACES,
1906
- records
1907
- });
1908
- }
1909
- };
1910
- function parseWorkflowRun(row) {
1911
- let parsedSnapshot = row.snapshot;
1912
- if (typeof parsedSnapshot === "string") {
1913
- try {
1914
- parsedSnapshot = JSON.parse(row.snapshot);
1915
- } catch (e) {
1916
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1917
- }
1918
- }
1919
- return {
1920
- workflowName: row.workflow_name,
1921
- runId: row.run_id,
1922
- snapshot: parsedSnapshot,
1923
- createdAt: row.createdAt,
1924
- updatedAt: row.updatedAt,
1925
- resourceId: row.resourceId
1926
- };
1927
- }
1928
- var WorkflowsMSSQL = class extends WorkflowsStorage {
1929
- pool;
1930
- operations;
1931
- schema;
1932
- constructor({
1933
- pool,
1934
- operations,
1935
- schema
1936
- }) {
1937
- super();
1938
- this.pool = pool;
1939
- this.operations = operations;
1940
- this.schema = schema;
1941
- }
1942
- updateWorkflowResults({
1943
- // workflowName,
1944
- // runId,
1945
- // stepId,
1946
- // result,
1947
- // runtimeContext,
1948
- }) {
1949
- throw new Error("Method not implemented.");
1950
- }
1951
- updateWorkflowState({
1952
- // workflowName,
1953
- // runId,
1954
- // opts,
1955
- }) {
1956
- throw new Error("Method not implemented.");
1957
- }
1958
2745
  async persistWorkflowSnapshot({
1959
2746
  workflowName,
1960
2747
  runId,
@@ -2051,7 +2838,7 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
2051
2838
  if (!result.recordset || result.recordset.length === 0) {
2052
2839
  return null;
2053
2840
  }
2054
- return parseWorkflowRun(result.recordset[0]);
2841
+ return this.parseWorkflowRun(result.recordset[0]);
2055
2842
  } catch (error) {
2056
2843
  throw new MastraError(
2057
2844
  {
@@ -2067,12 +2854,12 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
2067
2854
  );
2068
2855
  }
2069
2856
  }
2070
- async getWorkflowRuns({
2857
+ async listWorkflowRuns({
2071
2858
  workflowName,
2072
2859
  fromDate,
2073
2860
  toDate,
2074
- limit,
2075
- offset,
2861
+ page,
2862
+ perPage,
2076
2863
  resourceId
2077
2864
  } = {}) {
2078
2865
  try {
@@ -2088,7 +2875,7 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
2088
2875
  conditions.push(`[resourceId] = @resourceId`);
2089
2876
  paramMap["resourceId"] = resourceId;
2090
2877
  } else {
2091
- console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
2878
+ this.logger?.warn?.(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
2092
2879
  }
2093
2880
  }
2094
2881
  if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
@@ -2110,24 +2897,27 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
2110
2897
  request.input(key, value);
2111
2898
  }
2112
2899
  });
2113
- if (limit !== void 0 && offset !== void 0) {
2900
+ const usePagination = typeof perPage === "number" && typeof page === "number";
2901
+ if (usePagination) {
2114
2902
  const countQuery = `SELECT COUNT(*) as count FROM ${tableName} ${whereClause}`;
2115
2903
  const countResult = await request.query(countQuery);
2116
2904
  total = Number(countResult.recordset[0]?.count || 0);
2117
2905
  }
2118
2906
  let query = `SELECT * FROM ${tableName} ${whereClause} ORDER BY [seq_id] DESC`;
2119
- if (limit !== void 0 && offset !== void 0) {
2120
- query += ` OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
2121
- request.input("limit", limit);
2907
+ if (usePagination) {
2908
+ const normalizedPerPage = normalizePerPage(perPage, Number.MAX_SAFE_INTEGER);
2909
+ const offset = page * normalizedPerPage;
2910
+ query += ` OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
2911
+ request.input("perPage", normalizedPerPage);
2122
2912
  request.input("offset", offset);
2123
2913
  }
2124
2914
  const result = await request.query(query);
2125
- const runs = (result.recordset || []).map((row) => parseWorkflowRun(row));
2915
+ const runs = (result.recordset || []).map((row) => this.parseWorkflowRun(row));
2126
2916
  return { runs, total: total || runs.length };
2127
2917
  } catch (error) {
2128
2918
  throw new MastraError(
2129
2919
  {
2130
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_WORKFLOW_RUNS_FAILED",
2920
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_WORKFLOW_RUNS_FAILED",
2131
2921
  domain: ErrorDomain.STORAGE,
2132
2922
  category: ErrorCategory.THIRD_PARTY,
2133
2923
  details: {
@@ -2170,19 +2960,17 @@ var MSSQLStore = class extends MastraStorage {
2170
2960
  port: config.port,
2171
2961
  options: config.options || { encrypt: true, trustServerCertificate: true }
2172
2962
  });
2173
- const legacyEvals = new LegacyEvalsMSSQL({ pool: this.pool, schema: this.schema });
2174
2963
  const operations = new StoreOperationsMSSQL({ pool: this.pool, schemaName: this.schema });
2175
2964
  const scores = new ScoresMSSQL({ pool: this.pool, operations, schema: this.schema });
2176
- const traces = new TracesMSSQL({ pool: this.pool, operations, schema: this.schema });
2177
2965
  const workflows = new WorkflowsMSSQL({ pool: this.pool, operations, schema: this.schema });
2178
2966
  const memory = new MemoryMSSQL({ pool: this.pool, schema: this.schema, operations });
2967
+ const observability = new ObservabilityMSSQL({ pool: this.pool, operations, schema: this.schema });
2179
2968
  this.stores = {
2180
2969
  operations,
2181
2970
  scores,
2182
- traces,
2183
2971
  workflows,
2184
- legacyEvals,
2185
- memory
2972
+ memory,
2973
+ observability
2186
2974
  };
2187
2975
  } catch (e) {
2188
2976
  throw new MastraError(
@@ -2202,6 +2990,11 @@ var MSSQLStore = class extends MastraStorage {
2202
2990
  try {
2203
2991
  await this.isConnected;
2204
2992
  await super.init();
2993
+ try {
2994
+ await this.stores.operations.createAutomaticIndexes();
2995
+ } catch (indexError) {
2996
+ this.logger?.warn?.("Failed to create indexes:", indexError);
2997
+ }
2205
2998
  } catch (error) {
2206
2999
  this.isConnected = null;
2207
3000
  throw new MastraError(
@@ -2229,28 +3022,11 @@ var MSSQLStore = class extends MastraStorage {
2229
3022
  hasColumn: true,
2230
3023
  createTable: true,
2231
3024
  deleteMessages: true,
2232
- getScoresBySpan: true
3025
+ listScoresBySpan: true,
3026
+ observabilityInstance: true,
3027
+ indexManagement: true
2233
3028
  };
2234
3029
  }
2235
- /** @deprecated use getEvals instead */
2236
- async getEvalsByAgentName(agentName, type) {
2237
- return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
2238
- }
2239
- async getEvals(options = {}) {
2240
- return this.stores.legacyEvals.getEvals(options);
2241
- }
2242
- /**
2243
- * @deprecated use getTracesPaginated instead
2244
- */
2245
- async getTraces(args) {
2246
- return this.stores.traces.getTraces(args);
2247
- }
2248
- async getTracesPaginated(args) {
2249
- return this.stores.traces.getTracesPaginated(args);
2250
- }
2251
- async batchTraceInsert({ records }) {
2252
- return this.stores.traces.batchTraceInsert({ records });
2253
- }
2254
3030
  async createTable({
2255
3031
  tableName,
2256
3032
  schema
@@ -2285,15 +3061,6 @@ var MSSQLStore = class extends MastraStorage {
2285
3061
  async getThreadById({ threadId }) {
2286
3062
  return this.stores.memory.getThreadById({ threadId });
2287
3063
  }
2288
- /**
2289
- * @deprecated use getThreadsByResourceIdPaginated instead
2290
- */
2291
- async getThreadsByResourceId(args) {
2292
- return this.stores.memory.getThreadsByResourceId(args);
2293
- }
2294
- async getThreadsByResourceIdPaginated(args) {
2295
- return this.stores.memory.getThreadsByResourceIdPaginated(args);
2296
- }
2297
3064
  async saveThread({ thread }) {
2298
3065
  return this.stores.memory.saveThread({ thread });
2299
3066
  }
@@ -2307,17 +3074,8 @@ var MSSQLStore = class extends MastraStorage {
2307
3074
  async deleteThread({ threadId }) {
2308
3075
  return this.stores.memory.deleteThread({ threadId });
2309
3076
  }
2310
- async getMessages(args) {
2311
- return this.stores.memory.getMessages(args);
2312
- }
2313
- async getMessagesById({
2314
- messageIds,
2315
- format
2316
- }) {
2317
- return this.stores.memory.getMessagesById({ messageIds, format });
2318
- }
2319
- async getMessagesPaginated(args) {
2320
- return this.stores.memory.getMessagesPaginated(args);
3077
+ async listMessagesById({ messageIds }) {
3078
+ return this.stores.memory.listMessagesById({ messageIds });
2321
3079
  }
2322
3080
  async saveMessages(args) {
2323
3081
  return this.stores.memory.saveMessages(args);
@@ -2351,9 +3109,9 @@ var MSSQLStore = class extends MastraStorage {
2351
3109
  runId,
2352
3110
  stepId,
2353
3111
  result,
2354
- runtimeContext
3112
+ requestContext
2355
3113
  }) {
2356
- return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, runtimeContext });
3114
+ return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
2357
3115
  }
2358
3116
  async updateWorkflowState({
2359
3117
  workflowName,
@@ -2376,15 +3134,15 @@ var MSSQLStore = class extends MastraStorage {
2376
3134
  }) {
2377
3135
  return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
2378
3136
  }
2379
- async getWorkflowRuns({
3137
+ async listWorkflowRuns({
2380
3138
  workflowName,
2381
3139
  fromDate,
2382
3140
  toDate,
2383
- limit,
2384
- offset,
3141
+ perPage,
3142
+ page,
2385
3143
  resourceId
2386
3144
  } = {}) {
2387
- return this.stores.workflows.getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId });
3145
+ return this.stores.workflows.listWorkflowRuns({ workflowName, fromDate, toDate, perPage, page, resourceId });
2388
3146
  }
2389
3147
  async getWorkflowRunById({
2390
3148
  runId,
@@ -2395,44 +3153,107 @@ var MSSQLStore = class extends MastraStorage {
2395
3153
  async close() {
2396
3154
  await this.pool.close();
2397
3155
  }
3156
+ /**
3157
+ * Index Management
3158
+ */
3159
+ async createIndex(options) {
3160
+ return this.stores.operations.createIndex(options);
3161
+ }
3162
+ async listIndexes(tableName) {
3163
+ return this.stores.operations.listIndexes(tableName);
3164
+ }
3165
+ async describeIndex(indexName) {
3166
+ return this.stores.operations.describeIndex(indexName);
3167
+ }
3168
+ async dropIndex(indexName) {
3169
+ return this.stores.operations.dropIndex(indexName);
3170
+ }
3171
+ /**
3172
+ * Tracing / Observability
3173
+ */
3174
+ getObservabilityStore() {
3175
+ if (!this.stores.observability) {
3176
+ throw new MastraError({
3177
+ id: "MSSQL_STORE_OBSERVABILITY_NOT_INITIALIZED",
3178
+ domain: ErrorDomain.STORAGE,
3179
+ category: ErrorCategory.SYSTEM,
3180
+ text: "Observability storage is not initialized"
3181
+ });
3182
+ }
3183
+ return this.stores.observability;
3184
+ }
3185
+ async createSpan(span) {
3186
+ return this.getObservabilityStore().createSpan(span);
3187
+ }
3188
+ async updateSpan({
3189
+ spanId,
3190
+ traceId,
3191
+ updates
3192
+ }) {
3193
+ return this.getObservabilityStore().updateSpan({ spanId, traceId, updates });
3194
+ }
3195
+ async getAITrace(traceId) {
3196
+ return this.getObservabilityStore().getAITrace(traceId);
3197
+ }
3198
+ async getAITracesPaginated(args) {
3199
+ return this.getObservabilityStore().getAITracesPaginated(args);
3200
+ }
3201
+ async batchCreateSpans(args) {
3202
+ return this.getObservabilityStore().batchCreateSpans(args);
3203
+ }
3204
+ async batchUpdateSpans(args) {
3205
+ return this.getObservabilityStore().batchUpdateSpans(args);
3206
+ }
3207
+ async batchDeleteAITraces(args) {
3208
+ return this.getObservabilityStore().batchDeleteAITraces(args);
3209
+ }
2398
3210
  /**
2399
3211
  * Scorers
2400
3212
  */
2401
3213
  async getScoreById({ id: _id }) {
2402
3214
  return this.stores.scores.getScoreById({ id: _id });
2403
3215
  }
2404
- async getScoresByScorerId({
3216
+ async listScoresByScorerId({
2405
3217
  scorerId: _scorerId,
2406
- pagination: _pagination
3218
+ pagination: _pagination,
3219
+ entityId: _entityId,
3220
+ entityType: _entityType,
3221
+ source: _source
2407
3222
  }) {
2408
- return this.stores.scores.getScoresByScorerId({ scorerId: _scorerId, pagination: _pagination });
3223
+ return this.stores.scores.listScoresByScorerId({
3224
+ scorerId: _scorerId,
3225
+ pagination: _pagination,
3226
+ entityId: _entityId,
3227
+ entityType: _entityType,
3228
+ source: _source
3229
+ });
2409
3230
  }
2410
3231
  async saveScore(_score) {
2411
3232
  return this.stores.scores.saveScore(_score);
2412
3233
  }
2413
- async getScoresByRunId({
3234
+ async listScoresByRunId({
2414
3235
  runId: _runId,
2415
3236
  pagination: _pagination
2416
3237
  }) {
2417
- return this.stores.scores.getScoresByRunId({ runId: _runId, pagination: _pagination });
3238
+ return this.stores.scores.listScoresByRunId({ runId: _runId, pagination: _pagination });
2418
3239
  }
2419
- async getScoresByEntityId({
3240
+ async listScoresByEntityId({
2420
3241
  entityId: _entityId,
2421
3242
  entityType: _entityType,
2422
3243
  pagination: _pagination
2423
3244
  }) {
2424
- return this.stores.scores.getScoresByEntityId({
3245
+ return this.stores.scores.listScoresByEntityId({
2425
3246
  entityId: _entityId,
2426
3247
  entityType: _entityType,
2427
3248
  pagination: _pagination
2428
3249
  });
2429
3250
  }
2430
- async getScoresBySpan({
3251
+ async listScoresBySpan({
2431
3252
  traceId,
2432
3253
  spanId,
2433
3254
  pagination: _pagination
2434
3255
  }) {
2435
- return this.stores.scores.getScoresBySpan({ traceId, spanId, pagination: _pagination });
3256
+ return this.stores.scores.listScoresBySpan({ traceId, spanId, pagination: _pagination });
2436
3257
  }
2437
3258
  };
2438
3259