@mastra/mssql 0.0.0-vector-extension-schema-20250922130418 → 0.0.0-vnext-20251104230439

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,8 +1,10 @@
1
1
  import { MastraError, ErrorCategory, ErrorDomain } from '@mastra/core/error';
2
- import { MastraStorage, LegacyEvalsStorage, StoreOperations, TABLE_WORKFLOW_SNAPSHOT, ScoresStorage, TABLE_SCORERS, TracesStorage, TABLE_TRACES, WorkflowsStorage, MemoryStorage, resolveMessageLimit, TABLE_RESOURCES, TABLE_EVALS, TABLE_THREADS, TABLE_MESSAGES } from '@mastra/core/storage';
2
+ import { MastraStorage, StoreOperations, TABLE_WORKFLOW_SNAPSHOT, TABLE_SCHEMAS, TABLE_THREADS, TABLE_MESSAGES, TABLE_TRACES, TABLE_SCORERS, TABLE_AI_SPANS, ScoresStorage, normalizePerPage, calculatePagination, WorkflowsStorage, MemoryStorage, resolveMessageLimit, TABLE_RESOURCES, ObservabilityStorage, safelyParseJSON } from '@mastra/core/storage';
3
3
  import sql2 from 'mssql';
4
- import { parseSqlIdentifier, parseFieldKey } from '@mastra/core/utils';
5
4
  import { MessageList } from '@mastra/core/agent';
5
+ import { parseSqlIdentifier } from '@mastra/core/utils';
6
+ import { randomUUID } from 'crypto';
7
+ import { saveScorePayloadSchema } from '@mastra/core/evals';
6
8
 
7
9
  // src/storage/index.ts
8
10
  function getSchemaName(schema) {
@@ -14,154 +16,71 @@ function getTableName({ indexName, schemaName }) {
14
16
  const quotedSchemaName = schemaName;
15
17
  return quotedSchemaName ? `${quotedSchemaName}.${quotedIndexName}` : quotedIndexName;
16
18
  }
17
-
18
- // src/storage/domains/legacy-evals/index.ts
19
- function transformEvalRow(row) {
20
- let testInfoValue = null, resultValue = null;
21
- if (row.test_info) {
22
- try {
23
- testInfoValue = typeof row.test_info === "string" ? JSON.parse(row.test_info) : row.test_info;
24
- } catch {
25
- }
19
+ function buildDateRangeFilter(dateRange, fieldName) {
20
+ const filters = {};
21
+ if (dateRange?.start) {
22
+ filters[`${fieldName}_gte`] = dateRange.start;
26
23
  }
27
- if (row.test_info) {
28
- try {
29
- resultValue = typeof row.result === "string" ? JSON.parse(row.result) : row.result;
30
- } catch {
31
- }
24
+ if (dateRange?.end) {
25
+ filters[`${fieldName}_lte`] = dateRange.end;
32
26
  }
27
+ return filters;
28
+ }
29
+ function prepareWhereClause(filters, _schema) {
30
+ const conditions = [];
31
+ const params = {};
32
+ let paramIndex = 1;
33
+ Object.entries(filters).forEach(([key, value]) => {
34
+ if (value === void 0) return;
35
+ const paramName = `p${paramIndex++}`;
36
+ if (key.endsWith("_gte")) {
37
+ const fieldName = key.slice(0, -4);
38
+ conditions.push(`[${parseSqlIdentifier(fieldName, "field name")}] >= @${paramName}`);
39
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
40
+ } else if (key.endsWith("_lte")) {
41
+ const fieldName = key.slice(0, -4);
42
+ conditions.push(`[${parseSqlIdentifier(fieldName, "field name")}] <= @${paramName}`);
43
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
44
+ } else if (value === null) {
45
+ conditions.push(`[${parseSqlIdentifier(key, "field name")}] IS NULL`);
46
+ } else {
47
+ conditions.push(`[${parseSqlIdentifier(key, "field name")}] = @${paramName}`);
48
+ params[paramName] = value instanceof Date ? value.toISOString() : value;
49
+ }
50
+ });
33
51
  return {
34
- agentName: row.agent_name,
35
- input: row.input,
36
- output: row.output,
37
- result: resultValue,
38
- metricName: row.metric_name,
39
- instructions: row.instructions,
40
- testInfo: testInfoValue,
41
- globalRunId: row.global_run_id,
42
- runId: row.run_id,
43
- createdAt: row.created_at
52
+ sql: conditions.length > 0 ? ` WHERE ${conditions.join(" AND ")}` : "",
53
+ params
44
54
  };
45
55
  }
46
- var LegacyEvalsMSSQL = class extends LegacyEvalsStorage {
47
- pool;
48
- schema;
49
- constructor({ pool, schema }) {
50
- super();
51
- this.pool = pool;
52
- this.schema = schema;
53
- }
54
- /** @deprecated use getEvals instead */
55
- async getEvalsByAgentName(agentName, type) {
56
- try {
57
- let query = `SELECT * FROM ${getTableName({ indexName: TABLE_EVALS, schemaName: getSchemaName(this.schema) })} WHERE agent_name = @p1`;
58
- if (type === "test") {
59
- query += " AND test_info IS NOT NULL AND JSON_VALUE(test_info, '$.testPath') IS NOT NULL";
60
- } else if (type === "live") {
61
- query += " AND (test_info IS NULL OR JSON_VALUE(test_info, '$.testPath') IS NULL)";
62
- }
63
- query += " ORDER BY created_at DESC";
64
- const request = this.pool.request();
65
- request.input("p1", agentName);
66
- const result = await request.query(query);
67
- const rows = result.recordset;
68
- return typeof transformEvalRow === "function" ? rows?.map((row) => transformEvalRow(row)) ?? [] : rows ?? [];
69
- } catch (error) {
70
- if (error && error.number === 208 && error.message && error.message.includes("Invalid object name")) {
71
- return [];
72
- }
73
- console.error("Failed to get evals for the specified agent: " + error?.message);
74
- throw error;
75
- }
76
- }
77
- async getEvals(options = {}) {
78
- const { agentName, type, page = 0, perPage = 100, dateRange } = options;
79
- const fromDate = dateRange?.start;
80
- const toDate = dateRange?.end;
81
- const where = [];
82
- const params = {};
83
- if (agentName) {
84
- where.push("agent_name = @agentName");
85
- params["agentName"] = agentName;
86
- }
87
- if (type === "test") {
88
- where.push("test_info IS NOT NULL AND JSON_VALUE(test_info, '$.testPath') IS NOT NULL");
89
- } else if (type === "live") {
90
- where.push("(test_info IS NULL OR JSON_VALUE(test_info, '$.testPath') IS NULL)");
91
- }
92
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
93
- where.push(`[created_at] >= @fromDate`);
94
- params[`fromDate`] = fromDate.toISOString();
95
- }
96
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
97
- where.push(`[created_at] <= @toDate`);
98
- params[`toDate`] = toDate.toISOString();
99
- }
100
- const whereClause = where.length > 0 ? `WHERE ${where.join(" AND ")}` : "";
101
- const tableName = getTableName({ indexName: TABLE_EVALS, schemaName: getSchemaName(this.schema) });
102
- const offset = page * perPage;
103
- const countQuery = `SELECT COUNT(*) as total FROM ${tableName} ${whereClause}`;
104
- const dataQuery = `SELECT * FROM ${tableName} ${whereClause} ORDER BY seq_id DESC OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
105
- try {
106
- const countReq = this.pool.request();
107
- Object.entries(params).forEach(([key, value]) => {
108
- if (value instanceof Date) {
109
- countReq.input(key, sql2.DateTime, value);
110
- } else {
111
- countReq.input(key, value);
112
- }
113
- });
114
- const countResult = await countReq.query(countQuery);
115
- const total = countResult.recordset[0]?.total || 0;
116
- if (total === 0) {
117
- return {
118
- evals: [],
119
- total: 0,
120
- page,
121
- perPage,
122
- hasMore: false
123
- };
56
+ function transformFromSqlRow({
57
+ tableName,
58
+ sqlRow
59
+ }) {
60
+ const schema = TABLE_SCHEMAS[tableName];
61
+ const result = {};
62
+ Object.entries(sqlRow).forEach(([key, value]) => {
63
+ const columnSchema = schema?.[key];
64
+ if (columnSchema?.type === "jsonb" && typeof value === "string") {
65
+ try {
66
+ result[key] = JSON.parse(value);
67
+ } catch {
68
+ result[key] = value;
124
69
  }
125
- const req = this.pool.request();
126
- Object.entries(params).forEach(([key, value]) => {
127
- if (value instanceof Date) {
128
- req.input(key, sql2.DateTime, value);
129
- } else {
130
- req.input(key, value);
131
- }
132
- });
133
- req.input("offset", offset);
134
- req.input("perPage", perPage);
135
- const result = await req.query(dataQuery);
136
- const rows = result.recordset;
137
- return {
138
- evals: rows?.map((row) => transformEvalRow(row)) ?? [],
139
- total,
140
- page,
141
- perPage,
142
- hasMore: offset + (rows?.length ?? 0) < total
143
- };
144
- } catch (error) {
145
- const mastraError = new MastraError(
146
- {
147
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_EVALS_FAILED",
148
- domain: ErrorDomain.STORAGE,
149
- category: ErrorCategory.THIRD_PARTY,
150
- details: {
151
- agentName: agentName || "all",
152
- type: type || "all",
153
- page,
154
- perPage
155
- }
156
- },
157
- error
158
- );
159
- this.logger?.error?.(mastraError.toString());
160
- this.logger?.trackException(mastraError);
161
- throw mastraError;
70
+ } else if (columnSchema?.type === "timestamp" && value && typeof value === "string") {
71
+ result[key] = new Date(value);
72
+ } else if (columnSchema?.type === "timestamp" && value instanceof Date) {
73
+ result[key] = value;
74
+ } else if (columnSchema?.type === "boolean") {
75
+ result[key] = Boolean(value);
76
+ } else {
77
+ result[key] = value;
162
78
  }
163
- }
164
- };
79
+ });
80
+ return result;
81
+ }
82
+
83
+ // src/storage/domains/memory/index.ts
165
84
  var MemoryMSSQL = class extends MemoryStorage {
166
85
  pool;
167
86
  schema;
@@ -179,7 +98,7 @@ var MemoryMSSQL = class extends MemoryStorage {
179
98
  });
180
99
  const cleanMessages = messagesWithParsedContent.map(({ seq_id, ...rest }) => rest);
181
100
  const list = new MessageList().add(cleanMessages, "memory");
182
- return format === "v2" ? list.get.all.v2() : list.get.all.v1();
101
+ return format === "v2" ? list.get.all.db() : list.get.all.v1();
183
102
  }
184
103
  constructor({
185
104
  pool,
@@ -193,7 +112,7 @@ var MemoryMSSQL = class extends MemoryStorage {
193
112
  }
194
113
  async getThreadById({ threadId }) {
195
114
  try {
196
- const sql7 = `SELECT
115
+ const sql5 = `SELECT
197
116
  id,
198
117
  [resourceId],
199
118
  title,
@@ -204,7 +123,7 @@ var MemoryMSSQL = class extends MemoryStorage {
204
123
  WHERE id = @threadId`;
205
124
  const request = this.pool.request();
206
125
  request.input("threadId", threadId);
207
- const resultSet = await request.query(sql7);
126
+ const resultSet = await request.query(sql5);
208
127
  const thread = resultSet.recordset[0] || null;
209
128
  if (!thread) {
210
129
  return null;
@@ -229,11 +148,12 @@ var MemoryMSSQL = class extends MemoryStorage {
229
148
  );
230
149
  }
231
150
  }
232
- async getThreadsByResourceIdPaginated(args) {
233
- const { resourceId, page = 0, perPage: perPageInput, orderBy = "createdAt", sortDirection = "DESC" } = args;
151
+ async listThreadsByResourceId(args) {
152
+ const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
153
+ const perPage = normalizePerPage(perPageInput, 100);
154
+ const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
155
+ const { field, direction } = this.parseOrderBy(orderBy);
234
156
  try {
235
- const perPage = perPageInput !== void 0 ? perPageInput : 100;
236
- const currentOffset = page * perPage;
237
157
  const baseQuery = `FROM ${getTableName({ indexName: TABLE_THREADS, schemaName: getSchemaName(this.schema) })} WHERE [resourceId] = @resourceId`;
238
158
  const countQuery = `SELECT COUNT(*) as count ${baseQuery}`;
239
159
  const countRequest = this.pool.request();
@@ -245,16 +165,22 @@ var MemoryMSSQL = class extends MemoryStorage {
245
165
  threads: [],
246
166
  total: 0,
247
167
  page,
248
- perPage,
168
+ perPage: perPageForResponse,
249
169
  hasMore: false
250
170
  };
251
171
  }
252
- const orderByField = orderBy === "createdAt" ? "[createdAt]" : "[updatedAt]";
253
- const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY ${orderByField} ${sortDirection} OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
172
+ const orderByField = field === "createdAt" ? "[createdAt]" : "[updatedAt]";
173
+ const dir = (direction || "DESC").toUpperCase() === "ASC" ? "ASC" : "DESC";
174
+ const limitValue = perPageInput === false ? total : perPage;
175
+ const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY ${orderByField} ${dir} OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
254
176
  const dataRequest = this.pool.request();
255
177
  dataRequest.input("resourceId", resourceId);
256
- dataRequest.input("perPage", perPage);
257
- dataRequest.input("offset", currentOffset);
178
+ dataRequest.input("offset", offset);
179
+ if (limitValue > 2147483647) {
180
+ dataRequest.input("perPage", sql2.BigInt, limitValue);
181
+ } else {
182
+ dataRequest.input("perPage", limitValue);
183
+ }
258
184
  const rowsResult = await dataRequest.query(dataQuery);
259
185
  const rows = rowsResult.recordset || [];
260
186
  const threads = rows.map((thread) => ({
@@ -267,13 +193,13 @@ var MemoryMSSQL = class extends MemoryStorage {
267
193
  threads,
268
194
  total,
269
195
  page,
270
- perPage,
271
- hasMore: currentOffset + threads.length < total
196
+ perPage: perPageForResponse,
197
+ hasMore: perPageInput === false ? false : offset + perPage < total
272
198
  };
273
199
  } catch (error) {
274
200
  const mastraError = new MastraError(
275
201
  {
276
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_THREADS_BY_RESOURCE_ID_PAGINATED_FAILED",
202
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_THREADS_BY_RESOURCE_ID_FAILED",
277
203
  domain: ErrorDomain.STORAGE,
278
204
  category: ErrorCategory.THIRD_PARTY,
279
205
  details: {
@@ -285,7 +211,13 @@ var MemoryMSSQL = class extends MemoryStorage {
285
211
  );
286
212
  this.logger?.error?.(mastraError.toString());
287
213
  this.logger?.trackException?.(mastraError);
288
- return { threads: [], total: 0, page, perPage: perPageInput || 100, hasMore: false };
214
+ return {
215
+ threads: [],
216
+ total: 0,
217
+ page,
218
+ perPage: perPageForResponse,
219
+ hasMore: false
220
+ };
289
221
  }
290
222
  }
291
223
  async saveThread({ thread }) {
@@ -307,7 +239,12 @@ var MemoryMSSQL = class extends MemoryStorage {
307
239
  req.input("id", thread.id);
308
240
  req.input("resourceId", thread.resourceId);
309
241
  req.input("title", thread.title);
310
- req.input("metadata", thread.metadata ? JSON.stringify(thread.metadata) : null);
242
+ const metadata = thread.metadata ? JSON.stringify(thread.metadata) : null;
243
+ if (metadata === null) {
244
+ req.input("metadata", sql2.NVarChar, null);
245
+ } else {
246
+ req.input("metadata", metadata);
247
+ }
311
248
  req.input("createdAt", sql2.DateTime2, thread.createdAt);
312
249
  req.input("updatedAt", sql2.DateTime2, thread.updatedAt);
313
250
  await req.query(mergeSql);
@@ -326,30 +263,6 @@ var MemoryMSSQL = class extends MemoryStorage {
326
263
  );
327
264
  }
328
265
  }
329
- /**
330
- * @deprecated use getThreadsByResourceIdPaginated instead
331
- */
332
- async getThreadsByResourceId(args) {
333
- const { resourceId, orderBy = "createdAt", sortDirection = "DESC" } = args;
334
- try {
335
- const baseQuery = `FROM ${getTableName({ indexName: TABLE_THREADS, schemaName: getSchemaName(this.schema) })} WHERE [resourceId] = @resourceId`;
336
- const orderByField = orderBy === "createdAt" ? "[createdAt]" : "[updatedAt]";
337
- const dataQuery = `SELECT id, [resourceId], title, metadata, [createdAt], [updatedAt] ${baseQuery} ORDER BY ${orderByField} ${sortDirection}`;
338
- const request = this.pool.request();
339
- request.input("resourceId", resourceId);
340
- const resultSet = await request.query(dataQuery);
341
- const rows = resultSet.recordset || [];
342
- return rows.map((thread) => ({
343
- ...thread,
344
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata,
345
- createdAt: thread.createdAt,
346
- updatedAt: thread.updatedAt
347
- }));
348
- } catch (error) {
349
- this.logger?.error?.(`Error getting threads for resource ${resourceId}:`, error);
350
- return [];
351
- }
352
- }
353
266
  /**
354
267
  * Updates a thread's title and metadata, merging with existing metadata. Returns the updated thread.
355
268
  */
@@ -377,7 +290,7 @@ var MemoryMSSQL = class extends MemoryStorage {
377
290
  };
378
291
  try {
379
292
  const table = getTableName({ indexName: TABLE_THREADS, schemaName: getSchemaName(this.schema) });
380
- const sql7 = `UPDATE ${table}
293
+ const sql5 = `UPDATE ${table}
381
294
  SET title = @title,
382
295
  metadata = @metadata,
383
296
  [updatedAt] = @updatedAt
@@ -388,7 +301,7 @@ var MemoryMSSQL = class extends MemoryStorage {
388
301
  req.input("title", title);
389
302
  req.input("metadata", JSON.stringify(mergedMetadata));
390
303
  req.input("updatedAt", /* @__PURE__ */ new Date());
391
- const result = await req.query(sql7);
304
+ const result = await req.query(sql5);
392
305
  let thread = result.recordset && result.recordset[0];
393
306
  if (thread && "seq_id" in thread) {
394
307
  const { seq_id, ...rest } = thread;
@@ -458,8 +371,7 @@ var MemoryMSSQL = class extends MemoryStorage {
458
371
  }
459
372
  async _getIncludedMessages({
460
373
  threadId,
461
- selectBy,
462
- orderByStatement
374
+ selectBy
463
375
  }) {
464
376
  if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
465
377
  const include = selectBy?.include;
@@ -487,7 +399,7 @@ var MemoryMSSQL = class extends MemoryStorage {
487
399
  m.[resourceId],
488
400
  m.seq_id
489
401
  FROM (
490
- SELECT *, ROW_NUMBER() OVER (${orderByStatement}) as row_num
402
+ SELECT *, ROW_NUMBER() OVER (ORDER BY [createdAt] ASC) as row_num
491
403
  FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })}
492
404
  WHERE [thread_id] = ${pThreadId}
493
405
  ) AS m
@@ -495,15 +407,17 @@ var MemoryMSSQL = class extends MemoryStorage {
495
407
  OR EXISTS (
496
408
  SELECT 1
497
409
  FROM (
498
- SELECT *, ROW_NUMBER() OVER (${orderByStatement}) as row_num
410
+ SELECT *, ROW_NUMBER() OVER (ORDER BY [createdAt] ASC) as row_num
499
411
  FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })}
500
412
  WHERE [thread_id] = ${pThreadId}
501
413
  ) AS target
502
414
  WHERE target.id = ${pId}
503
415
  AND (
504
- (m.row_num <= target.row_num + ${pPrev} AND m.row_num > target.row_num)
416
+ -- Get previous messages (messages that come BEFORE the target)
417
+ (m.row_num < target.row_num AND m.row_num >= target.row_num - ${pPrev})
505
418
  OR
506
- (m.row_num >= target.row_num - ${pNext} AND m.row_num < target.row_num)
419
+ -- Get next messages (messages that come AFTER the target)
420
+ (m.row_num > target.row_num AND m.row_num <= target.row_num + ${pNext})
507
421
  )
508
422
  )
509
423
  `
@@ -532,8 +446,11 @@ var MemoryMSSQL = class extends MemoryStorage {
532
446
  });
533
447
  return dedupedRows;
534
448
  }
449
+ /**
450
+ * @deprecated use listMessages instead
451
+ */
535
452
  async getMessages(args) {
536
- const { threadId, resourceId, format, selectBy } = args;
453
+ const { threadId, resourceId, selectBy } = args;
537
454
  const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId, resourceId`;
538
455
  const orderByStatement = `ORDER BY [seq_id] DESC`;
539
456
  const limit = resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
@@ -542,7 +459,7 @@ var MemoryMSSQL = class extends MemoryStorage {
542
459
  let rows = [];
543
460
  const include = selectBy?.include || [];
544
461
  if (include?.length) {
545
- const includeMessages = await this._getIncludedMessages({ threadId, selectBy, orderByStatement });
462
+ const includeMessages = await this._getIncludedMessages({ threadId, selectBy });
546
463
  if (includeMessages) {
547
464
  rows.push(...includeMessages);
548
465
  }
@@ -567,8 +484,19 @@ var MemoryMSSQL = class extends MemoryStorage {
567
484
  const timeDiff = a.seq_id - b.seq_id;
568
485
  return timeDiff;
569
486
  });
570
- rows = rows.map(({ seq_id, ...rest }) => rest);
571
- return this._parseAndFormatMessages(rows, format);
487
+ const messagesWithParsedContent = rows.map((row) => {
488
+ if (typeof row.content === "string") {
489
+ try {
490
+ return { ...row, content: JSON.parse(row.content) };
491
+ } catch {
492
+ return row;
493
+ }
494
+ }
495
+ return row;
496
+ });
497
+ const cleanMessages = messagesWithParsedContent.map(({ seq_id, ...rest }) => rest);
498
+ const list = new MessageList().add(cleanMessages, "memory");
499
+ return { messages: list.get.all.db() };
572
500
  } catch (error) {
573
501
  const mastraError = new MastraError(
574
502
  {
@@ -583,15 +511,12 @@ var MemoryMSSQL = class extends MemoryStorage {
583
511
  error
584
512
  );
585
513
  this.logger?.error?.(mastraError.toString());
586
- this.logger?.trackException(mastraError);
587
- return [];
514
+ this.logger?.trackException?.(mastraError);
515
+ return { messages: [] };
588
516
  }
589
517
  }
590
- async getMessagesById({
591
- messageIds,
592
- format
593
- }) {
594
- if (messageIds.length === 0) return [];
518
+ async listMessagesById({ messageIds }) {
519
+ if (messageIds.length === 0) return { messages: [] };
595
520
  const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId, resourceId`;
596
521
  const orderByStatement = `ORDER BY [seq_id] DESC`;
597
522
  try {
@@ -607,13 +532,23 @@ var MemoryMSSQL = class extends MemoryStorage {
607
532
  const timeDiff = a.seq_id - b.seq_id;
608
533
  return timeDiff;
609
534
  });
610
- rows = rows.map(({ seq_id, ...rest }) => rest);
611
- if (format === `v1`) return this._parseAndFormatMessages(rows, format);
612
- return this._parseAndFormatMessages(rows, `v2`);
535
+ const messagesWithParsedContent = rows.map((row) => {
536
+ if (typeof row.content === "string") {
537
+ try {
538
+ return { ...row, content: JSON.parse(row.content) };
539
+ } catch {
540
+ return row;
541
+ }
542
+ }
543
+ return row;
544
+ });
545
+ const cleanMessages = messagesWithParsedContent.map(({ seq_id, ...rest }) => rest);
546
+ const list = new MessageList().add(cleanMessages, "memory");
547
+ return { messages: list.get.all.db() };
613
548
  } catch (error) {
614
549
  const mastraError = new MastraError(
615
550
  {
616
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_MESSAGES_BY_ID_FAILED",
551
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_MESSAGES_BY_ID_FAILED",
617
552
  domain: ErrorDomain.STORAGE,
618
553
  category: ErrorCategory.THIRD_PARTY,
619
554
  details: {
@@ -623,97 +558,125 @@ var MemoryMSSQL = class extends MemoryStorage {
623
558
  error
624
559
  );
625
560
  this.logger?.error?.(mastraError.toString());
626
- this.logger?.trackException(mastraError);
627
- return [];
561
+ this.logger?.trackException?.(mastraError);
562
+ return { messages: [] };
628
563
  }
629
564
  }
630
- async getMessagesPaginated(args) {
631
- const { threadId, resourceId, format, selectBy } = args;
632
- const { page = 0, perPage: perPageInput, dateRange } = selectBy?.pagination || {};
565
+ async listMessages(args) {
566
+ const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
567
+ if (!threadId.trim()) {
568
+ throw new MastraError(
569
+ {
570
+ id: "STORAGE_MSSQL_LIST_MESSAGES_INVALID_THREAD_ID",
571
+ domain: ErrorDomain.STORAGE,
572
+ category: ErrorCategory.THIRD_PARTY,
573
+ details: { threadId }
574
+ },
575
+ new Error("threadId must be a non-empty string")
576
+ );
577
+ }
578
+ const perPage = normalizePerPage(perPageInput, 40);
579
+ const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
633
580
  try {
634
- if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
635
- const fromDate = dateRange?.start;
636
- const toDate = dateRange?.end;
581
+ const { field, direction } = this.parseOrderBy(orderBy);
582
+ const orderByStatement = `ORDER BY [${field}] ${direction}`;
637
583
  const selectStatement = `SELECT seq_id, id, content, role, type, [createdAt], thread_id AS threadId, resourceId`;
638
- const orderByStatement = `ORDER BY [seq_id] DESC`;
639
- let messages = [];
640
- if (selectBy?.include?.length) {
641
- const includeMessages = await this._getIncludedMessages({ threadId, selectBy, orderByStatement });
642
- if (includeMessages) messages.push(...includeMessages);
643
- }
644
- const perPage = perPageInput !== void 0 ? perPageInput : resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
645
- const currentOffset = page * perPage;
584
+ const tableName = getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) });
646
585
  const conditions = ["[thread_id] = @threadId"];
647
586
  const request = this.pool.request();
648
587
  request.input("threadId", threadId);
649
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
588
+ if (resourceId) {
589
+ conditions.push("[resourceId] = @resourceId");
590
+ request.input("resourceId", resourceId);
591
+ }
592
+ if (filter?.dateRange?.start) {
650
593
  conditions.push("[createdAt] >= @fromDate");
651
- request.input("fromDate", fromDate.toISOString());
594
+ request.input("fromDate", filter.dateRange.start);
652
595
  }
653
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
596
+ if (filter?.dateRange?.end) {
654
597
  conditions.push("[createdAt] <= @toDate");
655
- request.input("toDate", toDate.toISOString());
598
+ request.input("toDate", filter.dateRange.end);
656
599
  }
657
600
  const whereClause = `WHERE ${conditions.join(" AND ")}`;
658
- const countQuery = `SELECT COUNT(*) as total FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })} ${whereClause}`;
601
+ const countQuery = `SELECT COUNT(*) as total FROM ${tableName} ${whereClause}`;
659
602
  const countResult = await request.query(countQuery);
660
603
  const total = parseInt(countResult.recordset[0]?.total, 10) || 0;
661
- if (total === 0 && messages.length > 0) {
662
- const parsedIncluded = this._parseAndFormatMessages(messages, format);
604
+ const limitValue = perPageInput === false ? total : perPage;
605
+ const dataQuery = `${selectStatement} FROM ${tableName} ${whereClause} ${orderByStatement} OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
606
+ request.input("offset", offset);
607
+ if (limitValue > 2147483647) {
608
+ request.input("limit", sql2.BigInt, limitValue);
609
+ } else {
610
+ request.input("limit", limitValue);
611
+ }
612
+ const rowsResult = await request.query(dataQuery);
613
+ const rows = rowsResult.recordset || [];
614
+ const messages = [...rows];
615
+ if (total === 0 && messages.length === 0 && (!include || include.length === 0)) {
663
616
  return {
664
- messages: parsedIncluded,
665
- total: parsedIncluded.length,
617
+ messages: [],
618
+ total: 0,
666
619
  page,
667
- perPage,
620
+ perPage: perPageForResponse,
668
621
  hasMore: false
669
622
  };
670
623
  }
671
- const excludeIds = messages.map((m) => m.id);
672
- if (excludeIds.length > 0) {
673
- const excludeParams = excludeIds.map((_, idx) => `@id${idx}`);
674
- conditions.push(`id NOT IN (${excludeParams.join(", ")})`);
675
- excludeIds.forEach((id, idx) => request.input(`id${idx}`, id));
624
+ const messageIds = new Set(messages.map((m) => m.id));
625
+ if (include && include.length > 0) {
626
+ const selectBy = { include };
627
+ const includeMessages = await this._getIncludedMessages({ threadId, selectBy });
628
+ if (includeMessages) {
629
+ for (const includeMsg of includeMessages) {
630
+ if (!messageIds.has(includeMsg.id)) {
631
+ messages.push(includeMsg);
632
+ messageIds.add(includeMsg.id);
633
+ }
634
+ }
635
+ }
676
636
  }
677
- const finalWhereClause = `WHERE ${conditions.join(" AND ")}`;
678
- const dataQuery = `${selectStatement} FROM ${getTableName({ indexName: TABLE_MESSAGES, schemaName: getSchemaName(this.schema) })} ${finalWhereClause} ${orderByStatement} OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
679
- request.input("offset", currentOffset);
680
- request.input("limit", perPage);
681
- const rowsResult = await request.query(dataQuery);
682
- const rows = rowsResult.recordset || [];
683
- rows.sort((a, b) => a.seq_id - b.seq_id);
684
- messages.push(...rows);
685
- const parsed = this._parseAndFormatMessages(messages, format);
637
+ const parsed = this._parseAndFormatMessages(messages, "v2");
638
+ let finalMessages = parsed;
639
+ finalMessages = finalMessages.sort((a, b) => {
640
+ const aValue = field === "createdAt" ? new Date(a.createdAt).getTime() : a[field];
641
+ const bValue = field === "createdAt" ? new Date(b.createdAt).getTime() : b[field];
642
+ return direction === "ASC" ? aValue - bValue : bValue - aValue;
643
+ });
644
+ const returnedThreadMessageIds = new Set(finalMessages.filter((m) => m.threadId === threadId).map((m) => m.id));
645
+ const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
646
+ const hasMore = perPageInput !== false && !allThreadMessagesReturned && offset + perPage < total;
686
647
  return {
687
- messages: parsed,
688
- total: total + excludeIds.length,
648
+ messages: finalMessages,
649
+ total,
689
650
  page,
690
- perPage,
691
- hasMore: currentOffset + rows.length < total
651
+ perPage: perPageForResponse,
652
+ hasMore
692
653
  };
693
654
  } catch (error) {
694
655
  const mastraError = new MastraError(
695
656
  {
696
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_MESSAGES_PAGINATED_FAILED",
657
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_MESSAGES_FAILED",
697
658
  domain: ErrorDomain.STORAGE,
698
659
  category: ErrorCategory.THIRD_PARTY,
699
660
  details: {
700
661
  threadId,
701
- resourceId: resourceId ?? "",
702
- page
662
+ resourceId: resourceId ?? ""
703
663
  }
704
664
  },
705
665
  error
706
666
  );
707
667
  this.logger?.error?.(mastraError.toString());
708
- this.logger?.trackException(mastraError);
709
- return { messages: [], total: 0, page, perPage: perPageInput || 40, hasMore: false };
668
+ this.logger?.trackException?.(mastraError);
669
+ return {
670
+ messages: [],
671
+ total: 0,
672
+ page,
673
+ perPage: perPageForResponse,
674
+ hasMore: false
675
+ };
710
676
  }
711
677
  }
712
- async saveMessages({
713
- messages,
714
- format
715
- }) {
716
- if (messages.length === 0) return messages;
678
+ async saveMessages({ messages }) {
679
+ if (messages.length === 0) return { messages: [] };
717
680
  const threadId = messages[0]?.threadId;
718
681
  if (!threadId) {
719
682
  throw new MastraError({
@@ -795,8 +758,7 @@ var MemoryMSSQL = class extends MemoryStorage {
795
758
  return message;
796
759
  });
797
760
  const list = new MessageList().add(messagesWithParsedContent, "memory");
798
- if (format === "v2") return list.get.all.v2();
799
- return list.get.all.v1();
761
+ return { messages: list.get.all.db() };
800
762
  } catch (error) {
801
763
  throw new MastraError(
802
764
  {
@@ -972,8 +934,10 @@ var MemoryMSSQL = class extends MemoryStorage {
972
934
  return null;
973
935
  }
974
936
  return {
975
- ...result,
976
- workingMemory: typeof result.workingMemory === "object" ? JSON.stringify(result.workingMemory) : result.workingMemory,
937
+ id: result.id,
938
+ createdAt: result.createdAt,
939
+ updatedAt: result.updatedAt,
940
+ workingMemory: result.workingMemory,
977
941
  metadata: typeof result.metadata === "string" ? JSON.parse(result.metadata) : result.metadata
978
942
  };
979
943
  } catch (error) {
@@ -987,7 +951,7 @@ var MemoryMSSQL = class extends MemoryStorage {
987
951
  error
988
952
  );
989
953
  this.logger?.error?.(mastraError.toString());
990
- this.logger?.trackException(mastraError);
954
+ this.logger?.trackException?.(mastraError);
991
955
  throw mastraError;
992
956
  }
993
957
  }
@@ -996,7 +960,7 @@ var MemoryMSSQL = class extends MemoryStorage {
996
960
  tableName: TABLE_RESOURCES,
997
961
  record: {
998
962
  ...resource,
999
- metadata: JSON.stringify(resource.metadata)
963
+ metadata: resource.metadata
1000
964
  }
1001
965
  });
1002
966
  return resource;
@@ -1054,119 +1018,444 @@ var MemoryMSSQL = class extends MemoryStorage {
1054
1018
  error
1055
1019
  );
1056
1020
  this.logger?.error?.(mastraError.toString());
1057
- this.logger?.trackException(mastraError);
1021
+ this.logger?.trackException?.(mastraError);
1058
1022
  throw mastraError;
1059
1023
  }
1060
1024
  }
1061
1025
  };
1062
- var StoreOperationsMSSQL = class extends StoreOperations {
1026
+ var ObservabilityMSSQL = class extends ObservabilityStorage {
1063
1027
  pool;
1064
- schemaName;
1065
- setupSchemaPromise = null;
1066
- schemaSetupComplete = void 0;
1067
- getSqlType(type, isPrimaryKey = false) {
1068
- switch (type) {
1069
- case "text":
1070
- return isPrimaryKey ? "NVARCHAR(255)" : "NVARCHAR(MAX)";
1071
- case "timestamp":
1072
- return "DATETIME2(7)";
1073
- case "uuid":
1074
- return "UNIQUEIDENTIFIER";
1075
- case "jsonb":
1076
- return "NVARCHAR(MAX)";
1077
- case "integer":
1078
- return "INT";
1079
- case "bigint":
1080
- return "BIGINT";
1081
- case "float":
1082
- return "FLOAT";
1083
- default:
1084
- throw new MastraError({
1085
- id: "MASTRA_STORAGE_MSSQL_STORE_TYPE_NOT_SUPPORTED",
1086
- domain: ErrorDomain.STORAGE,
1087
- category: ErrorCategory.THIRD_PARTY
1088
- });
1089
- }
1090
- }
1091
- constructor({ pool, schemaName }) {
1028
+ operations;
1029
+ schema;
1030
+ constructor({
1031
+ pool,
1032
+ operations,
1033
+ schema
1034
+ }) {
1092
1035
  super();
1093
1036
  this.pool = pool;
1094
- this.schemaName = schemaName;
1037
+ this.operations = operations;
1038
+ this.schema = schema;
1095
1039
  }
1096
- async hasColumn(table, column) {
1097
- const schema = this.schemaName || "dbo";
1098
- const request = this.pool.request();
1099
- request.input("schema", schema);
1100
- request.input("table", table);
1101
- request.input("column", column);
1102
- request.input("columnLower", column.toLowerCase());
1103
- const result = await request.query(
1104
- `SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @table AND (COLUMN_NAME = @column OR COLUMN_NAME = @columnLower)`
1105
- );
1106
- return result.recordset.length > 0;
1040
+ get aiTracingStrategy() {
1041
+ return {
1042
+ preferred: "batch-with-updates",
1043
+ supported: ["batch-with-updates", "insert-only"]
1044
+ };
1107
1045
  }
1108
- async setupSchema() {
1109
- if (!this.schemaName || this.schemaSetupComplete) {
1110
- return;
1111
- }
1112
- if (!this.setupSchemaPromise) {
1113
- this.setupSchemaPromise = (async () => {
1114
- try {
1115
- const checkRequest = this.pool.request();
1116
- checkRequest.input("schemaName", this.schemaName);
1117
- const checkResult = await checkRequest.query(`
1118
- SELECT 1 AS found FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = @schemaName
1119
- `);
1120
- const schemaExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
1121
- if (!schemaExists) {
1122
- try {
1123
- await this.pool.request().query(`CREATE SCHEMA [${this.schemaName}]`);
1124
- this.logger?.info?.(`Schema "${this.schemaName}" created successfully`);
1125
- } catch (error) {
1126
- this.logger?.error?.(`Failed to create schema "${this.schemaName}"`, { error });
1127
- throw new Error(
1128
- `Unable to create schema "${this.schemaName}". This requires CREATE privilege on the database. Either create the schema manually or grant CREATE privilege to the user.`
1129
- );
1130
- }
1046
+ async createAISpan(span) {
1047
+ try {
1048
+ const startedAt = span.startedAt instanceof Date ? span.startedAt.toISOString() : span.startedAt;
1049
+ const endedAt = span.endedAt instanceof Date ? span.endedAt.toISOString() : span.endedAt;
1050
+ const record = {
1051
+ ...span,
1052
+ startedAt,
1053
+ endedAt
1054
+ // Note: createdAt/updatedAt will be set by default values
1055
+ };
1056
+ return this.operations.insert({ tableName: TABLE_AI_SPANS, record });
1057
+ } catch (error) {
1058
+ throw new MastraError(
1059
+ {
1060
+ id: "MSSQL_STORE_CREATE_AI_SPAN_FAILED",
1061
+ domain: ErrorDomain.STORAGE,
1062
+ category: ErrorCategory.USER,
1063
+ details: {
1064
+ spanId: span.spanId,
1065
+ traceId: span.traceId,
1066
+ spanType: span.spanType,
1067
+ spanName: span.name
1131
1068
  }
1132
- this.schemaSetupComplete = true;
1133
- this.logger?.debug?.(`Schema "${this.schemaName}" is ready for use`);
1134
- } catch (error) {
1135
- this.schemaSetupComplete = void 0;
1136
- this.setupSchemaPromise = null;
1137
- throw error;
1138
- } finally {
1139
- this.setupSchemaPromise = null;
1140
- }
1141
- })();
1069
+ },
1070
+ error
1071
+ );
1142
1072
  }
1143
- await this.setupSchemaPromise;
1144
1073
  }
1145
- async insert({ tableName, record }) {
1074
+ async getAITrace(traceId) {
1146
1075
  try {
1147
- const columns = Object.keys(record).map((col) => parseSqlIdentifier(col, "column name"));
1148
- const values = Object.values(record);
1149
- const paramNames = values.map((_, i) => `@param${i}`);
1150
- const insertSql = `INSERT INTO ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} (${columns.map((c) => `[${c}]`).join(", ")}) VALUES (${paramNames.join(", ")})`;
1151
- const request = this.pool.request();
1152
- values.forEach((value, i) => {
1153
- if (value instanceof Date) {
1154
- request.input(`param${i}`, sql2.DateTime2, value);
1155
- } else if (typeof value === "object" && value !== null) {
1156
- request.input(`param${i}`, JSON.stringify(value));
1157
- } else {
1158
- request.input(`param${i}`, value);
1159
- }
1076
+ const tableName = getTableName({
1077
+ indexName: TABLE_AI_SPANS,
1078
+ schemaName: getSchemaName(this.schema)
1160
1079
  });
1161
- await request.query(insertSql);
1080
+ const request = this.pool.request();
1081
+ request.input("traceId", traceId);
1082
+ const result = await request.query(
1083
+ `SELECT
1084
+ [traceId], [spanId], [parentSpanId], [name], [scope], [spanType],
1085
+ [attributes], [metadata], [links], [input], [output], [error], [isEvent],
1086
+ [startedAt], [endedAt], [createdAt], [updatedAt]
1087
+ FROM ${tableName}
1088
+ WHERE [traceId] = @traceId
1089
+ ORDER BY [startedAt] DESC`
1090
+ );
1091
+ if (!result.recordset || result.recordset.length === 0) {
1092
+ return null;
1093
+ }
1094
+ return {
1095
+ traceId,
1096
+ spans: result.recordset.map(
1097
+ (span) => transformFromSqlRow({
1098
+ tableName: TABLE_AI_SPANS,
1099
+ sqlRow: span
1100
+ })
1101
+ )
1102
+ };
1162
1103
  } catch (error) {
1163
1104
  throw new MastraError(
1164
1105
  {
1165
- id: "MASTRA_STORAGE_MSSQL_STORE_INSERT_FAILED",
1106
+ id: "MSSQL_STORE_GET_AI_TRACE_FAILED",
1166
1107
  domain: ErrorDomain.STORAGE,
1167
- category: ErrorCategory.THIRD_PARTY,
1108
+ category: ErrorCategory.USER,
1168
1109
  details: {
1169
- tableName
1110
+ traceId
1111
+ }
1112
+ },
1113
+ error
1114
+ );
1115
+ }
1116
+ }
1117
+ async updateAISpan({
1118
+ spanId,
1119
+ traceId,
1120
+ updates
1121
+ }) {
1122
+ try {
1123
+ const data = { ...updates };
1124
+ if (data.endedAt instanceof Date) {
1125
+ data.endedAt = data.endedAt.toISOString();
1126
+ }
1127
+ if (data.startedAt instanceof Date) {
1128
+ data.startedAt = data.startedAt.toISOString();
1129
+ }
1130
+ await this.operations.update({
1131
+ tableName: TABLE_AI_SPANS,
1132
+ keys: { spanId, traceId },
1133
+ data
1134
+ });
1135
+ } catch (error) {
1136
+ throw new MastraError(
1137
+ {
1138
+ id: "MSSQL_STORE_UPDATE_AI_SPAN_FAILED",
1139
+ domain: ErrorDomain.STORAGE,
1140
+ category: ErrorCategory.USER,
1141
+ details: {
1142
+ spanId,
1143
+ traceId
1144
+ }
1145
+ },
1146
+ error
1147
+ );
1148
+ }
1149
+ }
1150
+ async getAITracesPaginated({
1151
+ filters,
1152
+ pagination
1153
+ }) {
1154
+ const page = pagination?.page ?? 0;
1155
+ const perPage = pagination?.perPage ?? 10;
1156
+ const { entityId, entityType, ...actualFilters } = filters || {};
1157
+ const filtersWithDateRange = {
1158
+ ...actualFilters,
1159
+ ...buildDateRangeFilter(pagination?.dateRange, "startedAt"),
1160
+ parentSpanId: null
1161
+ // Only get root spans for traces
1162
+ };
1163
+ const whereClause = prepareWhereClause(filtersWithDateRange);
1164
+ let actualWhereClause = whereClause.sql;
1165
+ const params = { ...whereClause.params };
1166
+ let currentParamIndex = Object.keys(params).length + 1;
1167
+ if (entityId && entityType) {
1168
+ let name = "";
1169
+ if (entityType === "workflow") {
1170
+ name = `workflow run: '${entityId}'`;
1171
+ } else if (entityType === "agent") {
1172
+ name = `agent run: '${entityId}'`;
1173
+ } else {
1174
+ const error = new MastraError({
1175
+ id: "MSSQL_STORE_GET_AI_TRACES_PAGINATED_FAILED",
1176
+ domain: ErrorDomain.STORAGE,
1177
+ category: ErrorCategory.USER,
1178
+ details: {
1179
+ entityType
1180
+ },
1181
+ text: `Cannot filter by entity type: ${entityType}`
1182
+ });
1183
+ throw error;
1184
+ }
1185
+ const entityParam = `p${currentParamIndex++}`;
1186
+ if (actualWhereClause) {
1187
+ actualWhereClause += ` AND [name] = @${entityParam}`;
1188
+ } else {
1189
+ actualWhereClause = ` WHERE [name] = @${entityParam}`;
1190
+ }
1191
+ params[entityParam] = name;
1192
+ }
1193
+ const tableName = getTableName({
1194
+ indexName: TABLE_AI_SPANS,
1195
+ schemaName: getSchemaName(this.schema)
1196
+ });
1197
+ try {
1198
+ const countRequest = this.pool.request();
1199
+ Object.entries(params).forEach(([key, value]) => {
1200
+ countRequest.input(key, value);
1201
+ });
1202
+ const countResult = await countRequest.query(
1203
+ `SELECT COUNT(*) as count FROM ${tableName}${actualWhereClause}`
1204
+ );
1205
+ const total = countResult.recordset[0]?.count ?? 0;
1206
+ if (total === 0) {
1207
+ return {
1208
+ pagination: {
1209
+ total: 0,
1210
+ page,
1211
+ perPage,
1212
+ hasMore: false
1213
+ },
1214
+ spans: []
1215
+ };
1216
+ }
1217
+ const dataRequest = this.pool.request();
1218
+ Object.entries(params).forEach(([key, value]) => {
1219
+ dataRequest.input(key, value);
1220
+ });
1221
+ dataRequest.input("offset", page * perPage);
1222
+ dataRequest.input("limit", perPage);
1223
+ const dataResult = await dataRequest.query(
1224
+ `SELECT * FROM ${tableName}${actualWhereClause} ORDER BY [startedAt] DESC OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`
1225
+ );
1226
+ const spans = dataResult.recordset.map(
1227
+ (row) => transformFromSqlRow({
1228
+ tableName: TABLE_AI_SPANS,
1229
+ sqlRow: row
1230
+ })
1231
+ );
1232
+ return {
1233
+ pagination: {
1234
+ total,
1235
+ page,
1236
+ perPage,
1237
+ hasMore: (page + 1) * perPage < total
1238
+ },
1239
+ spans
1240
+ };
1241
+ } catch (error) {
1242
+ throw new MastraError(
1243
+ {
1244
+ id: "MSSQL_STORE_GET_AI_TRACES_PAGINATED_FAILED",
1245
+ domain: ErrorDomain.STORAGE,
1246
+ category: ErrorCategory.USER
1247
+ },
1248
+ error
1249
+ );
1250
+ }
1251
+ }
1252
+ async batchCreateAISpans(args) {
1253
+ if (!args.records || args.records.length === 0) {
1254
+ return;
1255
+ }
1256
+ try {
1257
+ await this.operations.batchInsert({
1258
+ tableName: TABLE_AI_SPANS,
1259
+ records: args.records.map((span) => ({
1260
+ ...span,
1261
+ startedAt: span.startedAt instanceof Date ? span.startedAt.toISOString() : span.startedAt,
1262
+ endedAt: span.endedAt instanceof Date ? span.endedAt.toISOString() : span.endedAt
1263
+ }))
1264
+ });
1265
+ } catch (error) {
1266
+ throw new MastraError(
1267
+ {
1268
+ id: "MSSQL_STORE_BATCH_CREATE_AI_SPANS_FAILED",
1269
+ domain: ErrorDomain.STORAGE,
1270
+ category: ErrorCategory.USER,
1271
+ details: {
1272
+ count: args.records.length
1273
+ }
1274
+ },
1275
+ error
1276
+ );
1277
+ }
1278
+ }
1279
+ async batchUpdateAISpans(args) {
1280
+ if (!args.records || args.records.length === 0) {
1281
+ return;
1282
+ }
1283
+ try {
1284
+ const updates = args.records.map(({ traceId, spanId, updates: data }) => {
1285
+ const processedData = { ...data };
1286
+ if (processedData.endedAt instanceof Date) {
1287
+ processedData.endedAt = processedData.endedAt.toISOString();
1288
+ }
1289
+ if (processedData.startedAt instanceof Date) {
1290
+ processedData.startedAt = processedData.startedAt.toISOString();
1291
+ }
1292
+ return {
1293
+ keys: { spanId, traceId },
1294
+ data: processedData
1295
+ };
1296
+ });
1297
+ await this.operations.batchUpdate({
1298
+ tableName: TABLE_AI_SPANS,
1299
+ updates
1300
+ });
1301
+ } catch (error) {
1302
+ throw new MastraError(
1303
+ {
1304
+ id: "MSSQL_STORE_BATCH_UPDATE_AI_SPANS_FAILED",
1305
+ domain: ErrorDomain.STORAGE,
1306
+ category: ErrorCategory.USER,
1307
+ details: {
1308
+ count: args.records.length
1309
+ }
1310
+ },
1311
+ error
1312
+ );
1313
+ }
1314
+ }
1315
+ async batchDeleteAITraces(args) {
1316
+ if (!args.traceIds || args.traceIds.length === 0) {
1317
+ return;
1318
+ }
1319
+ try {
1320
+ const keys = args.traceIds.map((traceId) => ({ traceId }));
1321
+ await this.operations.batchDelete({
1322
+ tableName: TABLE_AI_SPANS,
1323
+ keys
1324
+ });
1325
+ } catch (error) {
1326
+ throw new MastraError(
1327
+ {
1328
+ id: "MSSQL_STORE_BATCH_DELETE_AI_TRACES_FAILED",
1329
+ domain: ErrorDomain.STORAGE,
1330
+ category: ErrorCategory.USER,
1331
+ details: {
1332
+ count: args.traceIds.length
1333
+ }
1334
+ },
1335
+ error
1336
+ );
1337
+ }
1338
+ }
1339
+ };
1340
+ var StoreOperationsMSSQL = class extends StoreOperations {
1341
+ pool;
1342
+ schemaName;
1343
+ setupSchemaPromise = null;
1344
+ schemaSetupComplete = void 0;
1345
+ getSqlType(type, isPrimaryKey = false, useLargeStorage = false) {
1346
+ switch (type) {
1347
+ case "text":
1348
+ if (useLargeStorage) {
1349
+ return "NVARCHAR(MAX)";
1350
+ }
1351
+ return isPrimaryKey ? "NVARCHAR(255)" : "NVARCHAR(400)";
1352
+ case "timestamp":
1353
+ return "DATETIME2(7)";
1354
+ case "uuid":
1355
+ return "UNIQUEIDENTIFIER";
1356
+ case "jsonb":
1357
+ return "NVARCHAR(MAX)";
1358
+ case "integer":
1359
+ return "INT";
1360
+ case "bigint":
1361
+ return "BIGINT";
1362
+ case "float":
1363
+ return "FLOAT";
1364
+ case "boolean":
1365
+ return "BIT";
1366
+ default:
1367
+ throw new MastraError({
1368
+ id: "MASTRA_STORAGE_MSSQL_STORE_TYPE_NOT_SUPPORTED",
1369
+ domain: ErrorDomain.STORAGE,
1370
+ category: ErrorCategory.THIRD_PARTY
1371
+ });
1372
+ }
1373
+ }
1374
+ constructor({ pool, schemaName }) {
1375
+ super();
1376
+ this.pool = pool;
1377
+ this.schemaName = schemaName;
1378
+ }
1379
+ async hasColumn(table, column) {
1380
+ const schema = this.schemaName || "dbo";
1381
+ const request = this.pool.request();
1382
+ request.input("schema", schema);
1383
+ request.input("table", table);
1384
+ request.input("column", column);
1385
+ request.input("columnLower", column.toLowerCase());
1386
+ const result = await request.query(
1387
+ `SELECT 1 FROM INFORMATION_SCHEMA.COLUMNS WHERE TABLE_SCHEMA = @schema AND TABLE_NAME = @table AND (COLUMN_NAME = @column OR COLUMN_NAME = @columnLower)`
1388
+ );
1389
+ return result.recordset.length > 0;
1390
+ }
1391
+ async setupSchema() {
1392
+ if (!this.schemaName || this.schemaSetupComplete) {
1393
+ return;
1394
+ }
1395
+ if (!this.setupSchemaPromise) {
1396
+ this.setupSchemaPromise = (async () => {
1397
+ try {
1398
+ const checkRequest = this.pool.request();
1399
+ checkRequest.input("schemaName", this.schemaName);
1400
+ const checkResult = await checkRequest.query(`
1401
+ SELECT 1 AS found FROM INFORMATION_SCHEMA.SCHEMATA WHERE SCHEMA_NAME = @schemaName
1402
+ `);
1403
+ const schemaExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
1404
+ if (!schemaExists) {
1405
+ try {
1406
+ await this.pool.request().query(`CREATE SCHEMA [${this.schemaName}]`);
1407
+ this.logger?.info?.(`Schema "${this.schemaName}" created successfully`);
1408
+ } catch (error) {
1409
+ this.logger?.error?.(`Failed to create schema "${this.schemaName}"`, { error });
1410
+ throw new Error(
1411
+ `Unable to create schema "${this.schemaName}". This requires CREATE privilege on the database. Either create the schema manually or grant CREATE privilege to the user.`
1412
+ );
1413
+ }
1414
+ }
1415
+ this.schemaSetupComplete = true;
1416
+ this.logger?.debug?.(`Schema "${this.schemaName}" is ready for use`);
1417
+ } catch (error) {
1418
+ this.schemaSetupComplete = void 0;
1419
+ this.setupSchemaPromise = null;
1420
+ throw error;
1421
+ } finally {
1422
+ this.setupSchemaPromise = null;
1423
+ }
1424
+ })();
1425
+ }
1426
+ await this.setupSchemaPromise;
1427
+ }
1428
+ async insert({
1429
+ tableName,
1430
+ record,
1431
+ transaction
1432
+ }) {
1433
+ try {
1434
+ const columns = Object.keys(record);
1435
+ const parsedColumns = columns.map((col) => parseSqlIdentifier(col, "column name"));
1436
+ const paramNames = columns.map((_, i) => `@param${i}`);
1437
+ const insertSql = `INSERT INTO ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} (${parsedColumns.map((c) => `[${c}]`).join(", ")}) VALUES (${paramNames.join(", ")})`;
1438
+ const request = transaction ? transaction.request() : this.pool.request();
1439
+ columns.forEach((col, i) => {
1440
+ const value = record[col];
1441
+ const preparedValue = this.prepareValue(value, col, tableName);
1442
+ if (preparedValue instanceof Date) {
1443
+ request.input(`param${i}`, sql2.DateTime2, preparedValue);
1444
+ } else if (preparedValue === null || preparedValue === void 0) {
1445
+ request.input(`param${i}`, this.getMssqlType(tableName, col), null);
1446
+ } else {
1447
+ request.input(`param${i}`, preparedValue);
1448
+ }
1449
+ });
1450
+ await request.query(insertSql);
1451
+ } catch (error) {
1452
+ throw new MastraError(
1453
+ {
1454
+ id: "MASTRA_STORAGE_MSSQL_STORE_INSERT_FAILED",
1455
+ domain: ErrorDomain.STORAGE,
1456
+ category: ErrorCategory.THIRD_PARTY,
1457
+ details: {
1458
+ tableName
1170
1459
  }
1171
1460
  },
1172
1461
  error
@@ -1179,7 +1468,7 @@ var StoreOperationsMSSQL = class extends StoreOperations {
1179
1468
  try {
1180
1469
  await this.pool.request().query(`TRUNCATE TABLE ${fullTableName}`);
1181
1470
  } catch (truncateError) {
1182
- if (truncateError.message && truncateError.message.includes("foreign key")) {
1471
+ if (truncateError?.number === 4712) {
1183
1472
  await this.pool.request().query(`DELETE FROM ${fullTableName}`);
1184
1473
  } else {
1185
1474
  throw truncateError;
@@ -1202,9 +1491,11 @@ var StoreOperationsMSSQL = class extends StoreOperations {
1202
1491
  getDefaultValue(type) {
1203
1492
  switch (type) {
1204
1493
  case "timestamp":
1205
- return "DEFAULT SYSDATETIMEOFFSET()";
1494
+ return "DEFAULT SYSUTCDATETIME()";
1206
1495
  case "jsonb":
1207
1496
  return "DEFAULT N'{}'";
1497
+ case "boolean":
1498
+ return "DEFAULT 0";
1208
1499
  default:
1209
1500
  return super.getDefaultValue(type);
1210
1501
  }
@@ -1215,13 +1506,29 @@ var StoreOperationsMSSQL = class extends StoreOperations {
1215
1506
  }) {
1216
1507
  try {
1217
1508
  const uniqueConstraintColumns = tableName === TABLE_WORKFLOW_SNAPSHOT ? ["workflow_name", "run_id"] : [];
1509
+ const largeDataColumns = [
1510
+ "workingMemory",
1511
+ "snapshot",
1512
+ "metadata",
1513
+ "content",
1514
+ // messages.content - can be very long conversation content
1515
+ "input",
1516
+ // evals.input - test input data
1517
+ "output",
1518
+ // evals.output - test output data
1519
+ "instructions",
1520
+ // evals.instructions - evaluation instructions
1521
+ "other"
1522
+ // traces.other - additional trace data
1523
+ ];
1218
1524
  const columns = Object.entries(schema).map(([name, def]) => {
1219
1525
  const parsedName = parseSqlIdentifier(name, "column name");
1220
1526
  const constraints = [];
1221
1527
  if (def.primaryKey) constraints.push("PRIMARY KEY");
1222
1528
  if (!def.nullable) constraints.push("NOT NULL");
1223
1529
  const isIndexed = !!def.primaryKey || uniqueConstraintColumns.includes(name);
1224
- return `[${parsedName}] ${this.getSqlType(def.type, isIndexed)} ${constraints.join(" ")}`.trim();
1530
+ const useLargeStorage = largeDataColumns.includes(name);
1531
+ return `[${parsedName}] ${this.getSqlType(def.type, isIndexed, useLargeStorage)} ${constraints.join(" ")}`.trim();
1225
1532
  }).join(",\n");
1226
1533
  if (this.schemaName) {
1227
1534
  await this.setupSchema();
@@ -1308,7 +1615,19 @@ ${columns}
1308
1615
  const columnExists = Array.isArray(checkResult.recordset) && checkResult.recordset.length > 0;
1309
1616
  if (!columnExists) {
1310
1617
  const columnDef = schema[columnName];
1311
- const sqlType = this.getSqlType(columnDef.type);
1618
+ const largeDataColumns = [
1619
+ "workingMemory",
1620
+ "snapshot",
1621
+ "metadata",
1622
+ "content",
1623
+ "input",
1624
+ "output",
1625
+ "instructions",
1626
+ "other"
1627
+ ];
1628
+ const useLargeStorage = largeDataColumns.includes(columnName);
1629
+ const isIndexed = !!columnDef.primaryKey;
1630
+ const sqlType = this.getSqlType(columnDef.type, isIndexed, useLargeStorage);
1312
1631
  const nullable = columnDef.nullable === false ? "NOT NULL" : "";
1313
1632
  const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : "";
1314
1633
  const parsedColumnName = parseSqlIdentifier(columnName, "column name");
@@ -1336,13 +1655,17 @@ ${columns}
1336
1655
  try {
1337
1656
  const keyEntries = Object.entries(keys).map(([key, value]) => [parseSqlIdentifier(key, "column name"), value]);
1338
1657
  const conditions = keyEntries.map(([key], i) => `[${key}] = @param${i}`).join(" AND ");
1339
- const values = keyEntries.map(([_, value]) => value);
1340
- const sql7 = `SELECT * FROM ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} WHERE ${conditions}`;
1658
+ const sql5 = `SELECT * FROM ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} WHERE ${conditions}`;
1341
1659
  const request = this.pool.request();
1342
- values.forEach((value, i) => {
1343
- request.input(`param${i}`, value);
1660
+ keyEntries.forEach(([key, value], i) => {
1661
+ const preparedValue = this.prepareValue(value, key, tableName);
1662
+ if (preparedValue === null || preparedValue === void 0) {
1663
+ request.input(`param${i}`, this.getMssqlType(tableName, key), null);
1664
+ } else {
1665
+ request.input(`param${i}`, preparedValue);
1666
+ }
1344
1667
  });
1345
- const resultSet = await request.query(sql7);
1668
+ const resultSet = await request.query(sql5);
1346
1669
  const result = resultSet.recordset[0] || null;
1347
1670
  if (!result) {
1348
1671
  return null;
@@ -1374,63 +1697,599 @@ ${columns}
1374
1697
  try {
1375
1698
  await transaction.begin();
1376
1699
  for (const record of records) {
1377
- await this.insert({ tableName, record });
1700
+ await this.insert({ tableName, record, transaction });
1701
+ }
1702
+ await transaction.commit();
1703
+ } catch (error) {
1704
+ await transaction.rollback();
1705
+ throw new MastraError(
1706
+ {
1707
+ id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_INSERT_FAILED",
1708
+ domain: ErrorDomain.STORAGE,
1709
+ category: ErrorCategory.THIRD_PARTY,
1710
+ details: {
1711
+ tableName,
1712
+ numberOfRecords: records.length
1713
+ }
1714
+ },
1715
+ error
1716
+ );
1717
+ }
1718
+ }
1719
+ async dropTable({ tableName }) {
1720
+ try {
1721
+ const tableNameWithSchema = getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) });
1722
+ await this.pool.request().query(`DROP TABLE IF EXISTS ${tableNameWithSchema}`);
1723
+ } catch (error) {
1724
+ throw new MastraError(
1725
+ {
1726
+ id: "MASTRA_STORAGE_MSSQL_STORE_DROP_TABLE_FAILED",
1727
+ domain: ErrorDomain.STORAGE,
1728
+ category: ErrorCategory.THIRD_PARTY,
1729
+ details: {
1730
+ tableName
1731
+ }
1732
+ },
1733
+ error
1734
+ );
1735
+ }
1736
+ }
1737
+ /**
1738
+ * Prepares a value for database operations, handling Date objects and JSON serialization
1739
+ */
1740
+ prepareValue(value, columnName, tableName) {
1741
+ if (value === null || value === void 0) {
1742
+ return value;
1743
+ }
1744
+ if (value instanceof Date) {
1745
+ return value;
1746
+ }
1747
+ const schema = TABLE_SCHEMAS[tableName];
1748
+ const columnSchema = schema?.[columnName];
1749
+ if (columnSchema?.type === "boolean") {
1750
+ return value ? 1 : 0;
1751
+ }
1752
+ if (columnSchema?.type === "jsonb") {
1753
+ return JSON.stringify(value);
1754
+ }
1755
+ if (typeof value === "object") {
1756
+ return JSON.stringify(value);
1757
+ }
1758
+ return value;
1759
+ }
1760
+ /**
1761
+ * Maps TABLE_SCHEMAS types to mssql param types (used when value is null)
1762
+ */
1763
+ getMssqlType(tableName, columnName) {
1764
+ const col = TABLE_SCHEMAS[tableName]?.[columnName];
1765
+ switch (col?.type) {
1766
+ case "text":
1767
+ return sql2.NVarChar;
1768
+ case "timestamp":
1769
+ return sql2.DateTime2;
1770
+ case "uuid":
1771
+ return sql2.UniqueIdentifier;
1772
+ case "jsonb":
1773
+ return sql2.NVarChar;
1774
+ case "integer":
1775
+ return sql2.Int;
1776
+ case "bigint":
1777
+ return sql2.BigInt;
1778
+ case "float":
1779
+ return sql2.Float;
1780
+ case "boolean":
1781
+ return sql2.Bit;
1782
+ default:
1783
+ return sql2.NVarChar;
1784
+ }
1785
+ }
1786
+ /**
1787
+ * Update a single record in the database
1788
+ */
1789
+ async update({
1790
+ tableName,
1791
+ keys,
1792
+ data,
1793
+ transaction
1794
+ }) {
1795
+ try {
1796
+ if (!data || Object.keys(data).length === 0) {
1797
+ throw new MastraError({
1798
+ id: "MASTRA_STORAGE_MSSQL_UPDATE_EMPTY_DATA",
1799
+ domain: ErrorDomain.STORAGE,
1800
+ category: ErrorCategory.USER,
1801
+ text: "Cannot update with empty data payload"
1802
+ });
1803
+ }
1804
+ if (!keys || Object.keys(keys).length === 0) {
1805
+ throw new MastraError({
1806
+ id: "MASTRA_STORAGE_MSSQL_UPDATE_EMPTY_KEYS",
1807
+ domain: ErrorDomain.STORAGE,
1808
+ category: ErrorCategory.USER,
1809
+ text: "Cannot update without keys to identify records"
1810
+ });
1811
+ }
1812
+ const setClauses = [];
1813
+ const request = transaction ? transaction.request() : this.pool.request();
1814
+ let paramIndex = 0;
1815
+ Object.entries(data).forEach(([key, value]) => {
1816
+ const parsedKey = parseSqlIdentifier(key, "column name");
1817
+ const paramName = `set${paramIndex++}`;
1818
+ setClauses.push(`[${parsedKey}] = @${paramName}`);
1819
+ const preparedValue = this.prepareValue(value, key, tableName);
1820
+ if (preparedValue === null || preparedValue === void 0) {
1821
+ request.input(paramName, this.getMssqlType(tableName, key), null);
1822
+ } else {
1823
+ request.input(paramName, preparedValue);
1824
+ }
1825
+ });
1826
+ const whereConditions = [];
1827
+ Object.entries(keys).forEach(([key, value]) => {
1828
+ const parsedKey = parseSqlIdentifier(key, "column name");
1829
+ const paramName = `where${paramIndex++}`;
1830
+ whereConditions.push(`[${parsedKey}] = @${paramName}`);
1831
+ const preparedValue = this.prepareValue(value, key, tableName);
1832
+ if (preparedValue === null || preparedValue === void 0) {
1833
+ request.input(paramName, this.getMssqlType(tableName, key), null);
1834
+ } else {
1835
+ request.input(paramName, preparedValue);
1836
+ }
1837
+ });
1838
+ const tableName_ = getTableName({
1839
+ indexName: tableName,
1840
+ schemaName: getSchemaName(this.schemaName)
1841
+ });
1842
+ const updateSql = `UPDATE ${tableName_} SET ${setClauses.join(", ")} WHERE ${whereConditions.join(" AND ")}`;
1843
+ await request.query(updateSql);
1844
+ } catch (error) {
1845
+ throw new MastraError(
1846
+ {
1847
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_FAILED",
1848
+ domain: ErrorDomain.STORAGE,
1849
+ category: ErrorCategory.THIRD_PARTY,
1850
+ details: {
1851
+ tableName
1852
+ }
1853
+ },
1854
+ error
1855
+ );
1856
+ }
1857
+ }
1858
+ /**
1859
+ * Update multiple records in a single batch transaction
1860
+ */
1861
+ async batchUpdate({
1862
+ tableName,
1863
+ updates
1864
+ }) {
1865
+ const transaction = this.pool.transaction();
1866
+ try {
1867
+ await transaction.begin();
1868
+ for (const { keys, data } of updates) {
1869
+ await this.update({ tableName, keys, data, transaction });
1870
+ }
1871
+ await transaction.commit();
1872
+ } catch (error) {
1873
+ await transaction.rollback();
1874
+ throw new MastraError(
1875
+ {
1876
+ id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_UPDATE_FAILED",
1877
+ domain: ErrorDomain.STORAGE,
1878
+ category: ErrorCategory.THIRD_PARTY,
1879
+ details: {
1880
+ tableName,
1881
+ numberOfRecords: updates.length
1882
+ }
1883
+ },
1884
+ error
1885
+ );
1886
+ }
1887
+ }
1888
+ /**
1889
+ * Delete multiple records by keys
1890
+ */
1891
+ async batchDelete({ tableName, keys }) {
1892
+ if (keys.length === 0) {
1893
+ return;
1894
+ }
1895
+ const tableName_ = getTableName({
1896
+ indexName: tableName,
1897
+ schemaName: getSchemaName(this.schemaName)
1898
+ });
1899
+ const transaction = this.pool.transaction();
1900
+ try {
1901
+ await transaction.begin();
1902
+ for (const keySet of keys) {
1903
+ const conditions = [];
1904
+ const request = transaction.request();
1905
+ let paramIndex = 0;
1906
+ Object.entries(keySet).forEach(([key, value]) => {
1907
+ const parsedKey = parseSqlIdentifier(key, "column name");
1908
+ const paramName = `p${paramIndex++}`;
1909
+ conditions.push(`[${parsedKey}] = @${paramName}`);
1910
+ const preparedValue = this.prepareValue(value, key, tableName);
1911
+ if (preparedValue === null || preparedValue === void 0) {
1912
+ request.input(paramName, this.getMssqlType(tableName, key), null);
1913
+ } else {
1914
+ request.input(paramName, preparedValue);
1915
+ }
1916
+ });
1917
+ const deleteSql = `DELETE FROM ${tableName_} WHERE ${conditions.join(" AND ")}`;
1918
+ await request.query(deleteSql);
1919
+ }
1920
+ await transaction.commit();
1921
+ } catch (error) {
1922
+ await transaction.rollback();
1923
+ throw new MastraError(
1924
+ {
1925
+ id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_DELETE_FAILED",
1926
+ domain: ErrorDomain.STORAGE,
1927
+ category: ErrorCategory.THIRD_PARTY,
1928
+ details: {
1929
+ tableName,
1930
+ numberOfRecords: keys.length
1931
+ }
1932
+ },
1933
+ error
1934
+ );
1935
+ }
1936
+ }
1937
+ /**
1938
+ * Create a new index on a table
1939
+ */
1940
+ async createIndex(options) {
1941
+ try {
1942
+ const { name, table, columns, unique = false, where } = options;
1943
+ const schemaName = this.schemaName || "dbo";
1944
+ const fullTableName = getTableName({
1945
+ indexName: table,
1946
+ schemaName: getSchemaName(this.schemaName)
1947
+ });
1948
+ const indexNameSafe = parseSqlIdentifier(name, "index name");
1949
+ const checkRequest = this.pool.request();
1950
+ checkRequest.input("indexName", indexNameSafe);
1951
+ checkRequest.input("schemaName", schemaName);
1952
+ checkRequest.input("tableName", table);
1953
+ const indexExists = await checkRequest.query(`
1954
+ SELECT 1 as found
1955
+ FROM sys.indexes i
1956
+ INNER JOIN sys.tables t ON i.object_id = t.object_id
1957
+ INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
1958
+ WHERE i.name = @indexName
1959
+ AND s.name = @schemaName
1960
+ AND t.name = @tableName
1961
+ `);
1962
+ if (indexExists.recordset && indexExists.recordset.length > 0) {
1963
+ return;
1964
+ }
1965
+ const uniqueStr = unique ? "UNIQUE " : "";
1966
+ const columnsStr = columns.map((col) => {
1967
+ if (col.includes(" DESC") || col.includes(" ASC")) {
1968
+ const [colName, ...modifiers] = col.split(" ");
1969
+ if (!colName) {
1970
+ throw new Error(`Invalid column specification: ${col}`);
1971
+ }
1972
+ return `[${parseSqlIdentifier(colName, "column name")}] ${modifiers.join(" ")}`;
1973
+ }
1974
+ return `[${parseSqlIdentifier(col, "column name")}]`;
1975
+ }).join(", ");
1976
+ const whereStr = where ? ` WHERE ${where}` : "";
1977
+ const createIndexSql = `CREATE ${uniqueStr}INDEX [${indexNameSafe}] ON ${fullTableName} (${columnsStr})${whereStr}`;
1978
+ await this.pool.request().query(createIndexSql);
1979
+ } catch (error) {
1980
+ throw new MastraError(
1981
+ {
1982
+ id: "MASTRA_STORAGE_MSSQL_INDEX_CREATE_FAILED",
1983
+ domain: ErrorDomain.STORAGE,
1984
+ category: ErrorCategory.THIRD_PARTY,
1985
+ details: {
1986
+ indexName: options.name,
1987
+ tableName: options.table
1988
+ }
1989
+ },
1990
+ error
1991
+ );
1992
+ }
1993
+ }
1994
+ /**
1995
+ * Drop an existing index
1996
+ */
1997
+ async dropIndex(indexName) {
1998
+ try {
1999
+ const schemaName = this.schemaName || "dbo";
2000
+ const indexNameSafe = parseSqlIdentifier(indexName, "index name");
2001
+ const checkRequest = this.pool.request();
2002
+ checkRequest.input("indexName", indexNameSafe);
2003
+ checkRequest.input("schemaName", schemaName);
2004
+ const result = await checkRequest.query(`
2005
+ SELECT t.name as table_name
2006
+ FROM sys.indexes i
2007
+ INNER JOIN sys.tables t ON i.object_id = t.object_id
2008
+ INNER JOIN sys.schemas s ON t.schema_id = s.schema_id
2009
+ WHERE i.name = @indexName
2010
+ AND s.name = @schemaName
2011
+ `);
2012
+ if (!result.recordset || result.recordset.length === 0) {
2013
+ return;
2014
+ }
2015
+ if (result.recordset.length > 1) {
2016
+ const tables = result.recordset.map((r) => r.table_name).join(", ");
2017
+ throw new MastraError({
2018
+ id: "MASTRA_STORAGE_MSSQL_INDEX_AMBIGUOUS",
2019
+ domain: ErrorDomain.STORAGE,
2020
+ category: ErrorCategory.USER,
2021
+ text: `Index "${indexNameSafe}" exists on multiple tables (${tables}) in schema "${schemaName}". Please drop indexes manually or ensure unique index names.`
2022
+ });
2023
+ }
2024
+ const tableName = result.recordset[0].table_name;
2025
+ const fullTableName = getTableName({
2026
+ indexName: tableName,
2027
+ schemaName: getSchemaName(this.schemaName)
2028
+ });
2029
+ const dropSql = `DROP INDEX [${indexNameSafe}] ON ${fullTableName}`;
2030
+ await this.pool.request().query(dropSql);
2031
+ } catch (error) {
2032
+ throw new MastraError(
2033
+ {
2034
+ id: "MASTRA_STORAGE_MSSQL_INDEX_DROP_FAILED",
2035
+ domain: ErrorDomain.STORAGE,
2036
+ category: ErrorCategory.THIRD_PARTY,
2037
+ details: {
2038
+ indexName
2039
+ }
2040
+ },
2041
+ error
2042
+ );
2043
+ }
2044
+ }
2045
+ /**
2046
+ * List indexes for a specific table or all tables
2047
+ */
2048
+ async listIndexes(tableName) {
2049
+ try {
2050
+ const schemaName = this.schemaName || "dbo";
2051
+ let query;
2052
+ const request = this.pool.request();
2053
+ request.input("schemaName", schemaName);
2054
+ if (tableName) {
2055
+ query = `
2056
+ SELECT
2057
+ i.name as name,
2058
+ o.name as [table],
2059
+ i.is_unique as is_unique,
2060
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size
2061
+ FROM sys.indexes i
2062
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2063
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
2064
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
2065
+ WHERE sch.name = @schemaName
2066
+ AND o.name = @tableName
2067
+ AND i.name IS NOT NULL
2068
+ GROUP BY i.name, o.name, i.is_unique
2069
+ `;
2070
+ request.input("tableName", tableName);
2071
+ } else {
2072
+ query = `
2073
+ SELECT
2074
+ i.name as name,
2075
+ o.name as [table],
2076
+ i.is_unique as is_unique,
2077
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size
2078
+ FROM sys.indexes i
2079
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2080
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
2081
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
2082
+ WHERE sch.name = @schemaName
2083
+ AND i.name IS NOT NULL
2084
+ GROUP BY i.name, o.name, i.is_unique
2085
+ `;
2086
+ }
2087
+ const result = await request.query(query);
2088
+ const indexes = [];
2089
+ for (const row of result.recordset) {
2090
+ const colRequest = this.pool.request();
2091
+ colRequest.input("indexName", row.name);
2092
+ colRequest.input("schemaName", schemaName);
2093
+ const colResult = await colRequest.query(`
2094
+ SELECT c.name as column_name
2095
+ FROM sys.indexes i
2096
+ INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
2097
+ INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
2098
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2099
+ INNER JOIN sys.schemas s ON o.schema_id = s.schema_id
2100
+ WHERE i.name = @indexName
2101
+ AND s.name = @schemaName
2102
+ ORDER BY ic.key_ordinal
2103
+ `);
2104
+ indexes.push({
2105
+ name: row.name,
2106
+ table: row.table,
2107
+ columns: colResult.recordset.map((c) => c.column_name),
2108
+ unique: row.is_unique || false,
2109
+ size: row.size || "0 MB",
2110
+ definition: ""
2111
+ // MSSQL doesn't store definition like PG
2112
+ });
2113
+ }
2114
+ return indexes;
2115
+ } catch (error) {
2116
+ throw new MastraError(
2117
+ {
2118
+ id: "MASTRA_STORAGE_MSSQL_INDEX_LIST_FAILED",
2119
+ domain: ErrorDomain.STORAGE,
2120
+ category: ErrorCategory.THIRD_PARTY,
2121
+ details: tableName ? {
2122
+ tableName
2123
+ } : {}
2124
+ },
2125
+ error
2126
+ );
2127
+ }
2128
+ }
2129
+ /**
2130
+ * Get detailed statistics for a specific index
2131
+ */
2132
+ async describeIndex(indexName) {
2133
+ try {
2134
+ const schemaName = this.schemaName || "dbo";
2135
+ const request = this.pool.request();
2136
+ request.input("indexName", indexName);
2137
+ request.input("schemaName", schemaName);
2138
+ const query = `
2139
+ SELECT
2140
+ i.name as name,
2141
+ o.name as [table],
2142
+ i.is_unique as is_unique,
2143
+ CAST(SUM(s.used_page_count) * 8 / 1024.0 AS VARCHAR(50)) + ' MB' as size,
2144
+ i.type_desc as method,
2145
+ ISNULL(us.user_scans, 0) as scans,
2146
+ ISNULL(us.user_seeks + us.user_scans, 0) as tuples_read,
2147
+ ISNULL(us.user_lookups, 0) as tuples_fetched
2148
+ FROM sys.indexes i
2149
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2150
+ INNER JOIN sys.schemas sch ON o.schema_id = sch.schema_id
2151
+ LEFT JOIN sys.dm_db_partition_stats s ON i.object_id = s.object_id AND i.index_id = s.index_id
2152
+ LEFT JOIN sys.dm_db_index_usage_stats us ON i.object_id = us.object_id AND i.index_id = us.index_id
2153
+ WHERE i.name = @indexName
2154
+ AND sch.name = @schemaName
2155
+ GROUP BY i.name, o.name, i.is_unique, i.type_desc, us.user_seeks, us.user_scans, us.user_lookups
2156
+ `;
2157
+ const result = await request.query(query);
2158
+ if (!result.recordset || result.recordset.length === 0) {
2159
+ throw new Error(`Index "${indexName}" not found in schema "${schemaName}"`);
1378
2160
  }
1379
- await transaction.commit();
2161
+ const row = result.recordset[0];
2162
+ const colRequest = this.pool.request();
2163
+ colRequest.input("indexName", indexName);
2164
+ colRequest.input("schemaName", schemaName);
2165
+ const colResult = await colRequest.query(`
2166
+ SELECT c.name as column_name
2167
+ FROM sys.indexes i
2168
+ INNER JOIN sys.index_columns ic ON i.object_id = ic.object_id AND i.index_id = ic.index_id
2169
+ INNER JOIN sys.columns c ON ic.object_id = c.object_id AND ic.column_id = c.column_id
2170
+ INNER JOIN sys.objects o ON i.object_id = o.object_id
2171
+ INNER JOIN sys.schemas s ON o.schema_id = s.schema_id
2172
+ WHERE i.name = @indexName
2173
+ AND s.name = @schemaName
2174
+ ORDER BY ic.key_ordinal
2175
+ `);
2176
+ return {
2177
+ name: row.name,
2178
+ table: row.table,
2179
+ columns: colResult.recordset.map((c) => c.column_name),
2180
+ unique: row.is_unique || false,
2181
+ size: row.size || "0 MB",
2182
+ definition: "",
2183
+ method: row.method?.toLowerCase() || "nonclustered",
2184
+ scans: Number(row.scans) || 0,
2185
+ tuples_read: Number(row.tuples_read) || 0,
2186
+ tuples_fetched: Number(row.tuples_fetched) || 0
2187
+ };
1380
2188
  } catch (error) {
1381
- await transaction.rollback();
1382
2189
  throw new MastraError(
1383
2190
  {
1384
- id: "MASTRA_STORAGE_MSSQL_STORE_BATCH_INSERT_FAILED",
2191
+ id: "MASTRA_STORAGE_MSSQL_INDEX_DESCRIBE_FAILED",
1385
2192
  domain: ErrorDomain.STORAGE,
1386
2193
  category: ErrorCategory.THIRD_PARTY,
1387
2194
  details: {
1388
- tableName,
1389
- numberOfRecords: records.length
2195
+ indexName
1390
2196
  }
1391
2197
  },
1392
2198
  error
1393
2199
  );
1394
2200
  }
1395
2201
  }
1396
- async dropTable({ tableName }) {
2202
+ /**
2203
+ * Returns definitions for automatic performance indexes
2204
+ * IMPORTANT: Uses seq_id DESC instead of createdAt DESC for MSSQL due to millisecond accuracy limitations
2205
+ * NOTE: Using NVARCHAR(400) for text columns (800 bytes) leaves room for composite indexes
2206
+ */
2207
+ getAutomaticIndexDefinitions() {
2208
+ const schemaPrefix = this.schemaName ? `${this.schemaName}_` : "";
2209
+ return [
2210
+ // Composite indexes for optimal filtering + sorting performance
2211
+ // NVARCHAR(400) = 800 bytes, plus BIGINT (8 bytes) = 808 bytes total (under 900-byte limit)
2212
+ {
2213
+ name: `${schemaPrefix}mastra_threads_resourceid_seqid_idx`,
2214
+ table: TABLE_THREADS,
2215
+ columns: ["resourceId", "seq_id DESC"]
2216
+ },
2217
+ {
2218
+ name: `${schemaPrefix}mastra_messages_thread_id_seqid_idx`,
2219
+ table: TABLE_MESSAGES,
2220
+ columns: ["thread_id", "seq_id DESC"]
2221
+ },
2222
+ {
2223
+ name: `${schemaPrefix}mastra_traces_name_seqid_idx`,
2224
+ table: TABLE_TRACES,
2225
+ columns: ["name", "seq_id DESC"]
2226
+ },
2227
+ {
2228
+ name: `${schemaPrefix}mastra_scores_trace_id_span_id_seqid_idx`,
2229
+ table: TABLE_SCORERS,
2230
+ columns: ["traceId", "spanId", "seq_id DESC"]
2231
+ },
2232
+ // AI Spans indexes for optimal trace querying
2233
+ {
2234
+ name: `${schemaPrefix}mastra_ai_spans_traceid_startedat_idx`,
2235
+ table: TABLE_AI_SPANS,
2236
+ columns: ["traceId", "startedAt DESC"]
2237
+ },
2238
+ {
2239
+ name: `${schemaPrefix}mastra_ai_spans_parentspanid_startedat_idx`,
2240
+ table: TABLE_AI_SPANS,
2241
+ columns: ["parentSpanId", "startedAt DESC"]
2242
+ },
2243
+ {
2244
+ name: `${schemaPrefix}mastra_ai_spans_name_idx`,
2245
+ table: TABLE_AI_SPANS,
2246
+ columns: ["name"]
2247
+ },
2248
+ {
2249
+ name: `${schemaPrefix}mastra_ai_spans_spantype_startedat_idx`,
2250
+ table: TABLE_AI_SPANS,
2251
+ columns: ["spanType", "startedAt DESC"]
2252
+ }
2253
+ ];
2254
+ }
2255
+ /**
2256
+ * Creates automatic indexes for optimal query performance
2257
+ * Uses getAutomaticIndexDefinitions() to determine which indexes to create
2258
+ */
2259
+ async createAutomaticIndexes() {
1397
2260
  try {
1398
- const tableNameWithSchema = getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) });
1399
- await this.pool.request().query(`DROP TABLE IF EXISTS ${tableNameWithSchema}`);
2261
+ const indexes = this.getAutomaticIndexDefinitions();
2262
+ for (const indexOptions of indexes) {
2263
+ try {
2264
+ await this.createIndex(indexOptions);
2265
+ } catch (error) {
2266
+ this.logger?.warn?.(`Failed to create index ${indexOptions.name}:`, error);
2267
+ }
2268
+ }
1400
2269
  } catch (error) {
1401
2270
  throw new MastraError(
1402
2271
  {
1403
- id: "MASTRA_STORAGE_MSSQL_STORE_DROP_TABLE_FAILED",
2272
+ id: "MASTRA_STORAGE_MSSQL_STORE_CREATE_PERFORMANCE_INDEXES_FAILED",
1404
2273
  domain: ErrorDomain.STORAGE,
1405
- category: ErrorCategory.THIRD_PARTY,
1406
- details: {
1407
- tableName
1408
- }
2274
+ category: ErrorCategory.THIRD_PARTY
1409
2275
  },
1410
2276
  error
1411
2277
  );
1412
2278
  }
1413
2279
  }
1414
2280
  };
1415
- function parseJSON(jsonString) {
1416
- try {
1417
- return JSON.parse(jsonString);
1418
- } catch {
1419
- return jsonString;
1420
- }
1421
- }
1422
2281
  function transformScoreRow(row) {
1423
2282
  return {
1424
2283
  ...row,
1425
- input: parseJSON(row.input),
1426
- scorer: parseJSON(row.scorer),
1427
- preprocessStepResult: parseJSON(row.preprocessStepResult),
1428
- analyzeStepResult: parseJSON(row.analyzeStepResult),
1429
- metadata: parseJSON(row.metadata),
1430
- output: parseJSON(row.output),
1431
- additionalContext: parseJSON(row.additionalContext),
1432
- runtimeContext: parseJSON(row.runtimeContext),
1433
- entity: parseJSON(row.entity),
2284
+ input: safelyParseJSON(row.input),
2285
+ scorer: safelyParseJSON(row.scorer),
2286
+ preprocessStepResult: safelyParseJSON(row.preprocessStepResult),
2287
+ analyzeStepResult: safelyParseJSON(row.analyzeStepResult),
2288
+ metadata: safelyParseJSON(row.metadata),
2289
+ output: safelyParseJSON(row.output),
2290
+ additionalContext: safelyParseJSON(row.additionalContext),
2291
+ requestContext: safelyParseJSON(row.requestContext),
2292
+ entity: safelyParseJSON(row.entity),
1434
2293
  createdAt: row.createdAt,
1435
2294
  updatedAt: row.updatedAt
1436
2295
  };
@@ -1473,8 +2332,21 @@ var ScoresMSSQL = class extends ScoresStorage {
1473
2332
  }
1474
2333
  }
1475
2334
  async saveScore(score) {
2335
+ let validatedScore;
2336
+ try {
2337
+ validatedScore = saveScorePayloadSchema.parse(score);
2338
+ } catch (error) {
2339
+ throw new MastraError(
2340
+ {
2341
+ id: "MASTRA_STORAGE_MSSQL_STORE_SAVE_SCORE_VALIDATION_FAILED",
2342
+ domain: ErrorDomain.STORAGE,
2343
+ category: ErrorCategory.THIRD_PARTY
2344
+ },
2345
+ error
2346
+ );
2347
+ }
1476
2348
  try {
1477
- const scoreId = crypto.randomUUID();
2349
+ const scoreId = randomUUID();
1478
2350
  const {
1479
2351
  scorer,
1480
2352
  preprocessStepResult,
@@ -1483,24 +2355,24 @@ var ScoresMSSQL = class extends ScoresStorage {
1483
2355
  input,
1484
2356
  output,
1485
2357
  additionalContext,
1486
- runtimeContext,
2358
+ requestContext,
1487
2359
  entity,
1488
2360
  ...rest
1489
- } = score;
2361
+ } = validatedScore;
1490
2362
  await this.operations.insert({
1491
2363
  tableName: TABLE_SCORERS,
1492
2364
  record: {
1493
2365
  id: scoreId,
1494
2366
  ...rest,
1495
- input: JSON.stringify(input) || "",
1496
- output: JSON.stringify(output) || "",
1497
- preprocessStepResult: preprocessStepResult ? JSON.stringify(preprocessStepResult) : null,
1498
- analyzeStepResult: analyzeStepResult ? JSON.stringify(analyzeStepResult) : null,
1499
- metadata: metadata ? JSON.stringify(metadata) : null,
1500
- additionalContext: additionalContext ? JSON.stringify(additionalContext) : null,
1501
- runtimeContext: runtimeContext ? JSON.stringify(runtimeContext) : null,
1502
- entity: entity ? JSON.stringify(entity) : null,
1503
- scorer: scorer ? JSON.stringify(scorer) : null,
2367
+ input: input || "",
2368
+ output: output || "",
2369
+ preprocessStepResult: preprocessStepResult || null,
2370
+ analyzeStepResult: analyzeStepResult || null,
2371
+ metadata: metadata || null,
2372
+ additionalContext: additionalContext || null,
2373
+ requestContext: requestContext || null,
2374
+ entity: entity || null,
2375
+ scorer: scorer || null,
1504
2376
  createdAt: (/* @__PURE__ */ new Date()).toISOString(),
1505
2377
  updatedAt: (/* @__PURE__ */ new Date()).toISOString()
1506
2378
  }
@@ -1518,41 +2390,70 @@ var ScoresMSSQL = class extends ScoresStorage {
1518
2390
  );
1519
2391
  }
1520
2392
  }
1521
- async getScoresByScorerId({
2393
+ async listScoresByScorerId({
1522
2394
  scorerId,
1523
- pagination
2395
+ pagination,
2396
+ entityId,
2397
+ entityType,
2398
+ source
1524
2399
  }) {
1525
2400
  try {
1526
- const request = this.pool.request();
1527
- request.input("p1", scorerId);
1528
- const totalResult = await request.query(
1529
- `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [scorerId] = @p1`
1530
- );
2401
+ const conditions = ["[scorerId] = @p1"];
2402
+ const params = { p1: scorerId };
2403
+ let paramIndex = 2;
2404
+ if (entityId) {
2405
+ conditions.push(`[entityId] = @p${paramIndex}`);
2406
+ params[`p${paramIndex}`] = entityId;
2407
+ paramIndex++;
2408
+ }
2409
+ if (entityType) {
2410
+ conditions.push(`[entityType] = @p${paramIndex}`);
2411
+ params[`p${paramIndex}`] = entityType;
2412
+ paramIndex++;
2413
+ }
2414
+ if (source) {
2415
+ conditions.push(`[source] = @p${paramIndex}`);
2416
+ params[`p${paramIndex}`] = source;
2417
+ paramIndex++;
2418
+ }
2419
+ const whereClause = conditions.join(" AND ");
2420
+ const tableName = getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) });
2421
+ const countRequest = this.pool.request();
2422
+ Object.entries(params).forEach(([key, value]) => {
2423
+ countRequest.input(key, value);
2424
+ });
2425
+ const totalResult = await countRequest.query(`SELECT COUNT(*) as count FROM ${tableName} WHERE ${whereClause}`);
1531
2426
  const total = totalResult.recordset[0]?.count || 0;
2427
+ const { page, perPage: perPageInput } = pagination;
1532
2428
  if (total === 0) {
1533
2429
  return {
1534
2430
  pagination: {
1535
2431
  total: 0,
1536
- page: pagination.page,
1537
- perPage: pagination.perPage,
2432
+ page,
2433
+ perPage: perPageInput,
1538
2434
  hasMore: false
1539
2435
  },
1540
2436
  scores: []
1541
2437
  };
1542
2438
  }
2439
+ const perPage = normalizePerPage(perPageInput, 100);
2440
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2441
+ const limitValue = perPageInput === false ? total : perPage;
2442
+ const end = perPageInput === false ? total : start + perPage;
1543
2443
  const dataRequest = this.pool.request();
1544
- dataRequest.input("p1", scorerId);
1545
- dataRequest.input("p2", pagination.perPage);
1546
- dataRequest.input("p3", pagination.page * pagination.perPage);
1547
- const result = await dataRequest.query(
1548
- `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [scorerId] = @p1 ORDER BY [createdAt] DESC OFFSET @p3 ROWS FETCH NEXT @p2 ROWS ONLY`
1549
- );
2444
+ Object.entries(params).forEach(([key, value]) => {
2445
+ dataRequest.input(key, value);
2446
+ });
2447
+ dataRequest.input("perPage", limitValue);
2448
+ dataRequest.input("offset", start);
2449
+ const dataQuery = `SELECT * FROM ${tableName} WHERE ${whereClause} ORDER BY [createdAt] DESC OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
2450
+ const result = await dataRequest.query(dataQuery);
1550
2451
  return {
1551
2452
  pagination: {
1552
2453
  total: Number(total),
1553
- page: pagination.page,
1554
- perPage: pagination.perPage,
1555
- hasMore: Number(total) > (pagination.page + 1) * pagination.perPage
2454
+ page,
2455
+ perPage: perPageForResponse,
2456
+ hasMore: end < total
1556
2457
  },
1557
2458
  scores: result.recordset.map((row) => transformScoreRow(row))
1558
2459
  };
@@ -1568,7 +2469,7 @@ var ScoresMSSQL = class extends ScoresStorage {
1568
2469
  );
1569
2470
  }
1570
2471
  }
1571
- async getScoresByRunId({
2472
+ async listScoresByRunId({
1572
2473
  runId,
1573
2474
  pagination
1574
2475
  }) {
@@ -1579,30 +2480,35 @@ var ScoresMSSQL = class extends ScoresStorage {
1579
2480
  `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [runId] = @p1`
1580
2481
  );
1581
2482
  const total = totalResult.recordset[0]?.count || 0;
2483
+ const { page, perPage: perPageInput } = pagination;
1582
2484
  if (total === 0) {
1583
2485
  return {
1584
2486
  pagination: {
1585
2487
  total: 0,
1586
- page: pagination.page,
1587
- perPage: pagination.perPage,
2488
+ page,
2489
+ perPage: perPageInput,
1588
2490
  hasMore: false
1589
2491
  },
1590
2492
  scores: []
1591
2493
  };
1592
2494
  }
2495
+ const perPage = normalizePerPage(perPageInput, 100);
2496
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2497
+ const limitValue = perPageInput === false ? total : perPage;
2498
+ const end = perPageInput === false ? total : start + perPage;
1593
2499
  const dataRequest = this.pool.request();
1594
2500
  dataRequest.input("p1", runId);
1595
- dataRequest.input("p2", pagination.perPage);
1596
- dataRequest.input("p3", pagination.page * pagination.perPage);
2501
+ dataRequest.input("p2", limitValue);
2502
+ dataRequest.input("p3", start);
1597
2503
  const result = await dataRequest.query(
1598
2504
  `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [runId] = @p1 ORDER BY [createdAt] DESC OFFSET @p3 ROWS FETCH NEXT @p2 ROWS ONLY`
1599
2505
  );
1600
2506
  return {
1601
2507
  pagination: {
1602
2508
  total: Number(total),
1603
- page: pagination.page,
1604
- perPage: pagination.perPage,
1605
- hasMore: Number(total) > (pagination.page + 1) * pagination.perPage
2509
+ page,
2510
+ perPage: perPageForResponse,
2511
+ hasMore: end < total
1606
2512
  },
1607
2513
  scores: result.recordset.map((row) => transformScoreRow(row))
1608
2514
  };
@@ -1618,7 +2524,7 @@ var ScoresMSSQL = class extends ScoresStorage {
1618
2524
  );
1619
2525
  }
1620
2526
  }
1621
- async getScoresByEntityId({
2527
+ async listScoresByEntityId({
1622
2528
  entityId,
1623
2529
  entityType,
1624
2530
  pagination
@@ -1631,31 +2537,36 @@ var ScoresMSSQL = class extends ScoresStorage {
1631
2537
  `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [entityId] = @p1 AND [entityType] = @p2`
1632
2538
  );
1633
2539
  const total = totalResult.recordset[0]?.count || 0;
2540
+ const { page, perPage: perPageInput } = pagination;
2541
+ const perPage = normalizePerPage(perPageInput, 100);
2542
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
1634
2543
  if (total === 0) {
1635
2544
  return {
1636
2545
  pagination: {
1637
2546
  total: 0,
1638
- page: pagination.page,
1639
- perPage: pagination.perPage,
2547
+ page,
2548
+ perPage: perPageForResponse,
1640
2549
  hasMore: false
1641
2550
  },
1642
2551
  scores: []
1643
2552
  };
1644
2553
  }
2554
+ const limitValue = perPageInput === false ? total : perPage;
2555
+ const end = perPageInput === false ? total : start + perPage;
1645
2556
  const dataRequest = this.pool.request();
1646
2557
  dataRequest.input("p1", entityId);
1647
2558
  dataRequest.input("p2", entityType);
1648
- dataRequest.input("p3", pagination.perPage);
1649
- dataRequest.input("p4", pagination.page * pagination.perPage);
2559
+ dataRequest.input("p3", limitValue);
2560
+ dataRequest.input("p4", start);
1650
2561
  const result = await dataRequest.query(
1651
2562
  `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [entityId] = @p1 AND [entityType] = @p2 ORDER BY [createdAt] DESC OFFSET @p4 ROWS FETCH NEXT @p3 ROWS ONLY`
1652
2563
  );
1653
2564
  return {
1654
2565
  pagination: {
1655
2566
  total: Number(total),
1656
- page: pagination.page,
1657
- perPage: pagination.perPage,
1658
- hasMore: Number(total) > (pagination.page + 1) * pagination.perPage
2567
+ page,
2568
+ perPage: perPageForResponse,
2569
+ hasMore: end < total
1659
2570
  },
1660
2571
  scores: result.recordset.map((row) => transformScoreRow(row))
1661
2572
  };
@@ -1671,8 +2582,66 @@ var ScoresMSSQL = class extends ScoresStorage {
1671
2582
  );
1672
2583
  }
1673
2584
  }
2585
+ async listScoresBySpan({
2586
+ traceId,
2587
+ spanId,
2588
+ pagination
2589
+ }) {
2590
+ try {
2591
+ const request = this.pool.request();
2592
+ request.input("p1", traceId);
2593
+ request.input("p2", spanId);
2594
+ const totalResult = await request.query(
2595
+ `SELECT COUNT(*) as count FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [traceId] = @p1 AND [spanId] = @p2`
2596
+ );
2597
+ const total = totalResult.recordset[0]?.count || 0;
2598
+ const { page, perPage: perPageInput } = pagination;
2599
+ const perPage = normalizePerPage(perPageInput, 100);
2600
+ const { offset: start, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
2601
+ if (total === 0) {
2602
+ return {
2603
+ pagination: {
2604
+ total: 0,
2605
+ page,
2606
+ perPage: perPageForResponse,
2607
+ hasMore: false
2608
+ },
2609
+ scores: []
2610
+ };
2611
+ }
2612
+ const limitValue = perPageInput === false ? total : perPage;
2613
+ const end = perPageInput === false ? total : start + perPage;
2614
+ const dataRequest = this.pool.request();
2615
+ dataRequest.input("p1", traceId);
2616
+ dataRequest.input("p2", spanId);
2617
+ dataRequest.input("p3", limitValue);
2618
+ dataRequest.input("p4", start);
2619
+ const result = await dataRequest.query(
2620
+ `SELECT * FROM ${getTableName({ indexName: TABLE_SCORERS, schemaName: getSchemaName(this.schema) })} WHERE [traceId] = @p1 AND [spanId] = @p2 ORDER BY [createdAt] DESC OFFSET @p4 ROWS FETCH NEXT @p3 ROWS ONLY`
2621
+ );
2622
+ return {
2623
+ pagination: {
2624
+ total: Number(total),
2625
+ page,
2626
+ perPage: perPageForResponse,
2627
+ hasMore: end < total
2628
+ },
2629
+ scores: result.recordset.map((row) => transformScoreRow(row))
2630
+ };
2631
+ } catch (error) {
2632
+ throw new MastraError(
2633
+ {
2634
+ id: "MASTRA_STORAGE_MSSQL_STORE_GET_SCORES_BY_SPAN_FAILED",
2635
+ domain: ErrorDomain.STORAGE,
2636
+ category: ErrorCategory.THIRD_PARTY,
2637
+ details: { traceId, spanId }
2638
+ },
2639
+ error
2640
+ );
2641
+ }
2642
+ }
1674
2643
  };
1675
- var TracesMSSQL = class extends TracesStorage {
2644
+ var WorkflowsMSSQL = class extends WorkflowsStorage {
1676
2645
  pool;
1677
2646
  operations;
1678
2647
  schema;
@@ -1686,207 +2655,164 @@ var TracesMSSQL = class extends TracesStorage {
1686
2655
  this.operations = operations;
1687
2656
  this.schema = schema;
1688
2657
  }
1689
- /** @deprecated use getTracesPaginated instead*/
1690
- async getTraces(args) {
1691
- if (args.fromDate || args.toDate) {
1692
- args.dateRange = {
1693
- start: args.fromDate,
1694
- end: args.toDate
1695
- };
1696
- }
1697
- const result = await this.getTracesPaginated(args);
1698
- return result.traces;
1699
- }
1700
- async getTracesPaginated(args) {
1701
- const { name, scope, page = 0, perPage: perPageInput, attributes, filters, dateRange } = args;
1702
- const fromDate = dateRange?.start;
1703
- const toDate = dateRange?.end;
1704
- const perPage = perPageInput !== void 0 ? perPageInput : 100;
1705
- const currentOffset = page * perPage;
1706
- const paramMap = {};
1707
- const conditions = [];
1708
- let paramIndex = 1;
1709
- if (name) {
1710
- const paramName = `p${paramIndex++}`;
1711
- conditions.push(`[name] LIKE @${paramName}`);
1712
- paramMap[paramName] = `${name}%`;
1713
- }
1714
- if (scope) {
1715
- const paramName = `p${paramIndex++}`;
1716
- conditions.push(`[scope] = @${paramName}`);
1717
- paramMap[paramName] = scope;
1718
- }
1719
- if (attributes) {
1720
- Object.entries(attributes).forEach(([key, value]) => {
1721
- const parsedKey = parseFieldKey(key);
1722
- const paramName = `p${paramIndex++}`;
1723
- conditions.push(`JSON_VALUE([attributes], '$.${parsedKey}') = @${paramName}`);
1724
- paramMap[paramName] = value;
1725
- });
1726
- }
1727
- if (filters) {
1728
- Object.entries(filters).forEach(([key, value]) => {
1729
- const parsedKey = parseFieldKey(key);
1730
- const paramName = `p${paramIndex++}`;
1731
- conditions.push(`[${parsedKey}] = @${paramName}`);
1732
- paramMap[paramName] = value;
1733
- });
1734
- }
1735
- if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
1736
- const paramName = `p${paramIndex++}`;
1737
- conditions.push(`[createdAt] >= @${paramName}`);
1738
- paramMap[paramName] = fromDate.toISOString();
1739
- }
1740
- if (toDate instanceof Date && !isNaN(toDate.getTime())) {
1741
- const paramName = `p${paramIndex++}`;
1742
- conditions.push(`[createdAt] <= @${paramName}`);
1743
- paramMap[paramName] = toDate.toISOString();
2658
+ parseWorkflowRun(row) {
2659
+ let parsedSnapshot = row.snapshot;
2660
+ if (typeof parsedSnapshot === "string") {
2661
+ try {
2662
+ parsedSnapshot = JSON.parse(row.snapshot);
2663
+ } catch (e) {
2664
+ this.logger?.warn?.(`Failed to parse snapshot for workflow ${row.workflow_name}:`, e);
2665
+ }
1744
2666
  }
1745
- const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
1746
- const countQuery = `SELECT COUNT(*) as total FROM ${getTableName({ indexName: TABLE_TRACES, schemaName: getSchemaName(this.schema) })} ${whereClause}`;
1747
- let total = 0;
2667
+ return {
2668
+ workflowName: row.workflow_name,
2669
+ runId: row.run_id,
2670
+ snapshot: parsedSnapshot,
2671
+ createdAt: row.createdAt,
2672
+ updatedAt: row.updatedAt,
2673
+ resourceId: row.resourceId
2674
+ };
2675
+ }
2676
+ async updateWorkflowResults({
2677
+ workflowName,
2678
+ runId,
2679
+ stepId,
2680
+ result,
2681
+ requestContext
2682
+ }) {
2683
+ const table = getTableName({ indexName: TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName(this.schema) });
2684
+ const transaction = this.pool.transaction();
1748
2685
  try {
1749
- const countRequest = this.pool.request();
1750
- Object.entries(paramMap).forEach(([key, value]) => {
1751
- if (value instanceof Date) {
1752
- countRequest.input(key, sql2.DateTime, value);
1753
- } else {
1754
- countRequest.input(key, value);
1755
- }
1756
- });
1757
- const countResult = await countRequest.query(countQuery);
1758
- total = parseInt(countResult.recordset[0].total, 10);
2686
+ await transaction.begin();
2687
+ const selectRequest = new sql2.Request(transaction);
2688
+ selectRequest.input("workflow_name", workflowName);
2689
+ selectRequest.input("run_id", runId);
2690
+ const existingSnapshotResult = await selectRequest.query(
2691
+ `SELECT snapshot FROM ${table} WITH (UPDLOCK, HOLDLOCK) WHERE workflow_name = @workflow_name AND run_id = @run_id`
2692
+ );
2693
+ let snapshot;
2694
+ if (!existingSnapshotResult.recordset || existingSnapshotResult.recordset.length === 0) {
2695
+ snapshot = {
2696
+ context: {},
2697
+ activePaths: [],
2698
+ timestamp: Date.now(),
2699
+ suspendedPaths: {},
2700
+ resumeLabels: {},
2701
+ serializedStepGraph: [],
2702
+ value: {},
2703
+ waitingPaths: {},
2704
+ status: "pending",
2705
+ runId,
2706
+ requestContext: {}
2707
+ };
2708
+ } else {
2709
+ const existingSnapshot = existingSnapshotResult.recordset[0].snapshot;
2710
+ snapshot = typeof existingSnapshot === "string" ? JSON.parse(existingSnapshot) : existingSnapshot;
2711
+ }
2712
+ snapshot.context[stepId] = result;
2713
+ snapshot.requestContext = { ...snapshot.requestContext, ...requestContext };
2714
+ const upsertReq = new sql2.Request(transaction);
2715
+ upsertReq.input("workflow_name", workflowName);
2716
+ upsertReq.input("run_id", runId);
2717
+ upsertReq.input("snapshot", JSON.stringify(snapshot));
2718
+ upsertReq.input("createdAt", sql2.DateTime2, /* @__PURE__ */ new Date());
2719
+ upsertReq.input("updatedAt", sql2.DateTime2, /* @__PURE__ */ new Date());
2720
+ await upsertReq.query(
2721
+ `MERGE ${table} AS target
2722
+ USING (SELECT @workflow_name AS workflow_name, @run_id AS run_id) AS src
2723
+ ON target.workflow_name = src.workflow_name AND target.run_id = src.run_id
2724
+ WHEN MATCHED THEN UPDATE SET snapshot = @snapshot, [updatedAt] = @updatedAt
2725
+ WHEN NOT MATCHED THEN INSERT (workflow_name, run_id, snapshot, [createdAt], [updatedAt])
2726
+ VALUES (@workflow_name, @run_id, @snapshot, @createdAt, @updatedAt);`
2727
+ );
2728
+ await transaction.commit();
2729
+ return snapshot.context;
1759
2730
  } catch (error) {
2731
+ try {
2732
+ await transaction.rollback();
2733
+ } catch {
2734
+ }
1760
2735
  throw new MastraError(
1761
2736
  {
1762
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_TRACES_PAGINATED_FAILED_TO_RETRIEVE_TOTAL_COUNT",
2737
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_WORKFLOW_RESULTS_FAILED",
1763
2738
  domain: ErrorDomain.STORAGE,
1764
2739
  category: ErrorCategory.THIRD_PARTY,
1765
2740
  details: {
1766
- name: args.name ?? "",
1767
- scope: args.scope ?? ""
2741
+ workflowName,
2742
+ runId,
2743
+ stepId
1768
2744
  }
1769
2745
  },
1770
2746
  error
1771
2747
  );
1772
2748
  }
1773
- if (total === 0) {
1774
- return {
1775
- traces: [],
1776
- total: 0,
1777
- page,
1778
- perPage,
1779
- hasMore: false
1780
- };
1781
- }
1782
- const dataQuery = `SELECT * FROM ${getTableName({ indexName: TABLE_TRACES, schemaName: getSchemaName(this.schema) })} ${whereClause} ORDER BY [seq_id] DESC OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
1783
- const dataRequest = this.pool.request();
1784
- Object.entries(paramMap).forEach(([key, value]) => {
1785
- if (value instanceof Date) {
1786
- dataRequest.input(key, sql2.DateTime, value);
1787
- } else {
1788
- dataRequest.input(key, value);
1789
- }
1790
- });
1791
- dataRequest.input("offset", currentOffset);
1792
- dataRequest.input("limit", perPage);
2749
+ }
2750
+ async updateWorkflowState({
2751
+ workflowName,
2752
+ runId,
2753
+ opts
2754
+ }) {
2755
+ const table = getTableName({ indexName: TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName(this.schema) });
2756
+ const transaction = this.pool.transaction();
1793
2757
  try {
1794
- const rowsResult = await dataRequest.query(dataQuery);
1795
- const rows = rowsResult.recordset;
1796
- const traces = rows.map((row) => ({
1797
- id: row.id,
1798
- parentSpanId: row.parentSpanId,
1799
- traceId: row.traceId,
1800
- name: row.name,
1801
- scope: row.scope,
1802
- kind: row.kind,
1803
- status: JSON.parse(row.status),
1804
- events: JSON.parse(row.events),
1805
- links: JSON.parse(row.links),
1806
- attributes: JSON.parse(row.attributes),
1807
- startTime: row.startTime,
1808
- endTime: row.endTime,
1809
- other: row.other,
1810
- createdAt: row.createdAt
1811
- }));
1812
- return {
1813
- traces,
1814
- total,
1815
- page,
1816
- perPage,
1817
- hasMore: currentOffset + traces.length < total
1818
- };
2758
+ await transaction.begin();
2759
+ const selectRequest = new sql2.Request(transaction);
2760
+ selectRequest.input("workflow_name", workflowName);
2761
+ selectRequest.input("run_id", runId);
2762
+ const existingSnapshotResult = await selectRequest.query(
2763
+ `SELECT snapshot FROM ${table} WITH (UPDLOCK, HOLDLOCK) WHERE workflow_name = @workflow_name AND run_id = @run_id`
2764
+ );
2765
+ if (!existingSnapshotResult.recordset || existingSnapshotResult.recordset.length === 0) {
2766
+ await transaction.rollback();
2767
+ return void 0;
2768
+ }
2769
+ const existingSnapshot = existingSnapshotResult.recordset[0].snapshot;
2770
+ const snapshot = typeof existingSnapshot === "string" ? JSON.parse(existingSnapshot) : existingSnapshot;
2771
+ if (!snapshot || !snapshot?.context) {
2772
+ await transaction.rollback();
2773
+ throw new MastraError(
2774
+ {
2775
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_WORKFLOW_STATE_SNAPSHOT_NOT_FOUND",
2776
+ domain: ErrorDomain.STORAGE,
2777
+ category: ErrorCategory.SYSTEM,
2778
+ details: {
2779
+ workflowName,
2780
+ runId
2781
+ }
2782
+ },
2783
+ new Error(`Snapshot not found for runId ${runId}`)
2784
+ );
2785
+ }
2786
+ const updatedSnapshot = { ...snapshot, ...opts };
2787
+ const updateRequest = new sql2.Request(transaction);
2788
+ updateRequest.input("snapshot", JSON.stringify(updatedSnapshot));
2789
+ updateRequest.input("workflow_name", workflowName);
2790
+ updateRequest.input("run_id", runId);
2791
+ updateRequest.input("updatedAt", sql2.DateTime2, /* @__PURE__ */ new Date());
2792
+ await updateRequest.query(
2793
+ `UPDATE ${table} SET snapshot = @snapshot, [updatedAt] = @updatedAt WHERE workflow_name = @workflow_name AND run_id = @run_id`
2794
+ );
2795
+ await transaction.commit();
2796
+ return updatedSnapshot;
1819
2797
  } catch (error) {
2798
+ try {
2799
+ await transaction.rollback();
2800
+ } catch {
2801
+ }
1820
2802
  throw new MastraError(
1821
2803
  {
1822
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_TRACES_PAGINATED_FAILED_TO_RETRIEVE_TRACES",
2804
+ id: "MASTRA_STORAGE_MSSQL_STORE_UPDATE_WORKFLOW_STATE_FAILED",
1823
2805
  domain: ErrorDomain.STORAGE,
1824
2806
  category: ErrorCategory.THIRD_PARTY,
1825
2807
  details: {
1826
- name: args.name ?? "",
1827
- scope: args.scope ?? ""
2808
+ workflowName,
2809
+ runId
1828
2810
  }
1829
2811
  },
1830
2812
  error
1831
2813
  );
1832
2814
  }
1833
2815
  }
1834
- async batchTraceInsert({ records }) {
1835
- this.logger.debug("Batch inserting traces", { count: records.length });
1836
- await this.operations.batchInsert({
1837
- tableName: TABLE_TRACES,
1838
- records
1839
- });
1840
- }
1841
- };
1842
- function parseWorkflowRun(row) {
1843
- let parsedSnapshot = row.snapshot;
1844
- if (typeof parsedSnapshot === "string") {
1845
- try {
1846
- parsedSnapshot = JSON.parse(row.snapshot);
1847
- } catch (e) {
1848
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1849
- }
1850
- }
1851
- return {
1852
- workflowName: row.workflow_name,
1853
- runId: row.run_id,
1854
- snapshot: parsedSnapshot,
1855
- createdAt: row.createdAt,
1856
- updatedAt: row.updatedAt,
1857
- resourceId: row.resourceId
1858
- };
1859
- }
1860
- var WorkflowsMSSQL = class extends WorkflowsStorage {
1861
- pool;
1862
- operations;
1863
- schema;
1864
- constructor({
1865
- pool,
1866
- operations,
1867
- schema
1868
- }) {
1869
- super();
1870
- this.pool = pool;
1871
- this.operations = operations;
1872
- this.schema = schema;
1873
- }
1874
- updateWorkflowResults({
1875
- // workflowName,
1876
- // runId,
1877
- // stepId,
1878
- // result,
1879
- // runtimeContext,
1880
- }) {
1881
- throw new Error("Method not implemented.");
1882
- }
1883
- updateWorkflowState({
1884
- // workflowName,
1885
- // runId,
1886
- // opts,
1887
- }) {
1888
- throw new Error("Method not implemented.");
1889
- }
1890
2816
  async persistWorkflowSnapshot({
1891
2817
  workflowName,
1892
2818
  runId,
@@ -1983,7 +2909,7 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
1983
2909
  if (!result.recordset || result.recordset.length === 0) {
1984
2910
  return null;
1985
2911
  }
1986
- return parseWorkflowRun(result.recordset[0]);
2912
+ return this.parseWorkflowRun(result.recordset[0]);
1987
2913
  } catch (error) {
1988
2914
  throw new MastraError(
1989
2915
  {
@@ -1999,12 +2925,12 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
1999
2925
  );
2000
2926
  }
2001
2927
  }
2002
- async getWorkflowRuns({
2928
+ async listWorkflowRuns({
2003
2929
  workflowName,
2004
2930
  fromDate,
2005
2931
  toDate,
2006
- limit,
2007
- offset,
2932
+ page,
2933
+ perPage,
2008
2934
  resourceId
2009
2935
  } = {}) {
2010
2936
  try {
@@ -2020,7 +2946,7 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
2020
2946
  conditions.push(`[resourceId] = @resourceId`);
2021
2947
  paramMap["resourceId"] = resourceId;
2022
2948
  } else {
2023
- console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
2949
+ this.logger?.warn?.(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
2024
2950
  }
2025
2951
  }
2026
2952
  if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
@@ -2042,24 +2968,27 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
2042
2968
  request.input(key, value);
2043
2969
  }
2044
2970
  });
2045
- if (limit !== void 0 && offset !== void 0) {
2971
+ const usePagination = typeof perPage === "number" && typeof page === "number";
2972
+ if (usePagination) {
2046
2973
  const countQuery = `SELECT COUNT(*) as count FROM ${tableName} ${whereClause}`;
2047
2974
  const countResult = await request.query(countQuery);
2048
2975
  total = Number(countResult.recordset[0]?.count || 0);
2049
2976
  }
2050
2977
  let query = `SELECT * FROM ${tableName} ${whereClause} ORDER BY [seq_id] DESC`;
2051
- if (limit !== void 0 && offset !== void 0) {
2052
- query += ` OFFSET @offset ROWS FETCH NEXT @limit ROWS ONLY`;
2053
- request.input("limit", limit);
2978
+ if (usePagination) {
2979
+ const normalizedPerPage = normalizePerPage(perPage, Number.MAX_SAFE_INTEGER);
2980
+ const offset = page * normalizedPerPage;
2981
+ query += ` OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
2982
+ request.input("perPage", normalizedPerPage);
2054
2983
  request.input("offset", offset);
2055
2984
  }
2056
2985
  const result = await request.query(query);
2057
- const runs = (result.recordset || []).map((row) => parseWorkflowRun(row));
2986
+ const runs = (result.recordset || []).map((row) => this.parseWorkflowRun(row));
2058
2987
  return { runs, total: total || runs.length };
2059
2988
  } catch (error) {
2060
2989
  throw new MastraError(
2061
2990
  {
2062
- id: "MASTRA_STORAGE_MSSQL_STORE_GET_WORKFLOW_RUNS_FAILED",
2991
+ id: "MASTRA_STORAGE_MSSQL_STORE_LIST_WORKFLOW_RUNS_FAILED",
2063
2992
  domain: ErrorDomain.STORAGE,
2064
2993
  category: ErrorCategory.THIRD_PARTY,
2065
2994
  details: {
@@ -2102,19 +3031,17 @@ var MSSQLStore = class extends MastraStorage {
2102
3031
  port: config.port,
2103
3032
  options: config.options || { encrypt: true, trustServerCertificate: true }
2104
3033
  });
2105
- const legacyEvals = new LegacyEvalsMSSQL({ pool: this.pool, schema: this.schema });
2106
3034
  const operations = new StoreOperationsMSSQL({ pool: this.pool, schemaName: this.schema });
2107
3035
  const scores = new ScoresMSSQL({ pool: this.pool, operations, schema: this.schema });
2108
- const traces = new TracesMSSQL({ pool: this.pool, operations, schema: this.schema });
2109
3036
  const workflows = new WorkflowsMSSQL({ pool: this.pool, operations, schema: this.schema });
2110
3037
  const memory = new MemoryMSSQL({ pool: this.pool, schema: this.schema, operations });
3038
+ const observability = new ObservabilityMSSQL({ pool: this.pool, operations, schema: this.schema });
2111
3039
  this.stores = {
2112
3040
  operations,
2113
3041
  scores,
2114
- traces,
2115
3042
  workflows,
2116
- legacyEvals,
2117
- memory
3043
+ memory,
3044
+ observability
2118
3045
  };
2119
3046
  } catch (e) {
2120
3047
  throw new MastraError(
@@ -2134,6 +3061,11 @@ var MSSQLStore = class extends MastraStorage {
2134
3061
  try {
2135
3062
  await this.isConnected;
2136
3063
  await super.init();
3064
+ try {
3065
+ await this.stores.operations.createAutomaticIndexes();
3066
+ } catch (indexError) {
3067
+ this.logger?.warn?.("Failed to create indexes:", indexError);
3068
+ }
2137
3069
  } catch (error) {
2138
3070
  this.isConnected = null;
2139
3071
  throw new MastraError(
@@ -2160,28 +3092,12 @@ var MSSQLStore = class extends MastraStorage {
2160
3092
  resourceWorkingMemory: true,
2161
3093
  hasColumn: true,
2162
3094
  createTable: true,
2163
- deleteMessages: true
3095
+ deleteMessages: true,
3096
+ listScoresBySpan: true,
3097
+ aiTracing: true,
3098
+ indexManagement: true
2164
3099
  };
2165
3100
  }
2166
- /** @deprecated use getEvals instead */
2167
- async getEvalsByAgentName(agentName, type) {
2168
- return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
2169
- }
2170
- async getEvals(options = {}) {
2171
- return this.stores.legacyEvals.getEvals(options);
2172
- }
2173
- /**
2174
- * @deprecated use getTracesPaginated instead
2175
- */
2176
- async getTraces(args) {
2177
- return this.stores.traces.getTraces(args);
2178
- }
2179
- async getTracesPaginated(args) {
2180
- return this.stores.traces.getTracesPaginated(args);
2181
- }
2182
- async batchTraceInsert({ records }) {
2183
- return this.stores.traces.batchTraceInsert({ records });
2184
- }
2185
3101
  async createTable({
2186
3102
  tableName,
2187
3103
  schema
@@ -2216,15 +3132,6 @@ var MSSQLStore = class extends MastraStorage {
2216
3132
  async getThreadById({ threadId }) {
2217
3133
  return this.stores.memory.getThreadById({ threadId });
2218
3134
  }
2219
- /**
2220
- * @deprecated use getThreadsByResourceIdPaginated instead
2221
- */
2222
- async getThreadsByResourceId(args) {
2223
- return this.stores.memory.getThreadsByResourceId(args);
2224
- }
2225
- async getThreadsByResourceIdPaginated(args) {
2226
- return this.stores.memory.getThreadsByResourceIdPaginated(args);
2227
- }
2228
3135
  async saveThread({ thread }) {
2229
3136
  return this.stores.memory.saveThread({ thread });
2230
3137
  }
@@ -2238,17 +3145,14 @@ var MSSQLStore = class extends MastraStorage {
2238
3145
  async deleteThread({ threadId }) {
2239
3146
  return this.stores.memory.deleteThread({ threadId });
2240
3147
  }
3148
+ /**
3149
+ * @deprecated use listMessages instead
3150
+ */
2241
3151
  async getMessages(args) {
2242
3152
  return this.stores.memory.getMessages(args);
2243
3153
  }
2244
- async getMessagesById({
2245
- messageIds,
2246
- format
2247
- }) {
2248
- return this.stores.memory.getMessagesById({ messageIds, format });
2249
- }
2250
- async getMessagesPaginated(args) {
2251
- return this.stores.memory.getMessagesPaginated(args);
3154
+ async listMessagesById({ messageIds }) {
3155
+ return this.stores.memory.listMessagesById({ messageIds });
2252
3156
  }
2253
3157
  async saveMessages(args) {
2254
3158
  return this.stores.memory.saveMessages(args);
@@ -2282,9 +3186,9 @@ var MSSQLStore = class extends MastraStorage {
2282
3186
  runId,
2283
3187
  stepId,
2284
3188
  result,
2285
- runtimeContext
3189
+ requestContext
2286
3190
  }) {
2287
- return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, runtimeContext });
3191
+ return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
2288
3192
  }
2289
3193
  async updateWorkflowState({
2290
3194
  workflowName,
@@ -2307,15 +3211,15 @@ var MSSQLStore = class extends MastraStorage {
2307
3211
  }) {
2308
3212
  return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
2309
3213
  }
2310
- async getWorkflowRuns({
3214
+ async listWorkflowRuns({
2311
3215
  workflowName,
2312
3216
  fromDate,
2313
3217
  toDate,
2314
- limit,
2315
- offset,
3218
+ perPage,
3219
+ page,
2316
3220
  resourceId
2317
3221
  } = {}) {
2318
- return this.stores.workflows.getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId });
3222
+ return this.stores.workflows.listWorkflowRuns({ workflowName, fromDate, toDate, perPage, page, resourceId });
2319
3223
  }
2320
3224
  async getWorkflowRunById({
2321
3225
  runId,
@@ -2326,38 +3230,108 @@ var MSSQLStore = class extends MastraStorage {
2326
3230
  async close() {
2327
3231
  await this.pool.close();
2328
3232
  }
3233
+ /**
3234
+ * Index Management
3235
+ */
3236
+ async createIndex(options) {
3237
+ return this.stores.operations.createIndex(options);
3238
+ }
3239
+ async listIndexes(tableName) {
3240
+ return this.stores.operations.listIndexes(tableName);
3241
+ }
3242
+ async describeIndex(indexName) {
3243
+ return this.stores.operations.describeIndex(indexName);
3244
+ }
3245
+ async dropIndex(indexName) {
3246
+ return this.stores.operations.dropIndex(indexName);
3247
+ }
3248
+ /**
3249
+ * AI Tracing / Observability
3250
+ */
3251
+ getObservabilityStore() {
3252
+ if (!this.stores.observability) {
3253
+ throw new MastraError({
3254
+ id: "MSSQL_STORE_OBSERVABILITY_NOT_INITIALIZED",
3255
+ domain: ErrorDomain.STORAGE,
3256
+ category: ErrorCategory.SYSTEM,
3257
+ text: "Observability storage is not initialized"
3258
+ });
3259
+ }
3260
+ return this.stores.observability;
3261
+ }
3262
+ async createAISpan(span) {
3263
+ return this.getObservabilityStore().createAISpan(span);
3264
+ }
3265
+ async updateAISpan({
3266
+ spanId,
3267
+ traceId,
3268
+ updates
3269
+ }) {
3270
+ return this.getObservabilityStore().updateAISpan({ spanId, traceId, updates });
3271
+ }
3272
+ async getAITrace(traceId) {
3273
+ return this.getObservabilityStore().getAITrace(traceId);
3274
+ }
3275
+ async getAITracesPaginated(args) {
3276
+ return this.getObservabilityStore().getAITracesPaginated(args);
3277
+ }
3278
+ async batchCreateAISpans(args) {
3279
+ return this.getObservabilityStore().batchCreateAISpans(args);
3280
+ }
3281
+ async batchUpdateAISpans(args) {
3282
+ return this.getObservabilityStore().batchUpdateAISpans(args);
3283
+ }
3284
+ async batchDeleteAITraces(args) {
3285
+ return this.getObservabilityStore().batchDeleteAITraces(args);
3286
+ }
2329
3287
  /**
2330
3288
  * Scorers
2331
3289
  */
2332
3290
  async getScoreById({ id: _id }) {
2333
3291
  return this.stores.scores.getScoreById({ id: _id });
2334
3292
  }
2335
- async getScoresByScorerId({
3293
+ async listScoresByScorerId({
2336
3294
  scorerId: _scorerId,
2337
- pagination: _pagination
3295
+ pagination: _pagination,
3296
+ entityId: _entityId,
3297
+ entityType: _entityType,
3298
+ source: _source
2338
3299
  }) {
2339
- return this.stores.scores.getScoresByScorerId({ scorerId: _scorerId, pagination: _pagination });
3300
+ return this.stores.scores.listScoresByScorerId({
3301
+ scorerId: _scorerId,
3302
+ pagination: _pagination,
3303
+ entityId: _entityId,
3304
+ entityType: _entityType,
3305
+ source: _source
3306
+ });
2340
3307
  }
2341
3308
  async saveScore(_score) {
2342
3309
  return this.stores.scores.saveScore(_score);
2343
3310
  }
2344
- async getScoresByRunId({
3311
+ async listScoresByRunId({
2345
3312
  runId: _runId,
2346
3313
  pagination: _pagination
2347
3314
  }) {
2348
- return this.stores.scores.getScoresByRunId({ runId: _runId, pagination: _pagination });
3315
+ return this.stores.scores.listScoresByRunId({ runId: _runId, pagination: _pagination });
2349
3316
  }
2350
- async getScoresByEntityId({
3317
+ async listScoresByEntityId({
2351
3318
  entityId: _entityId,
2352
3319
  entityType: _entityType,
2353
3320
  pagination: _pagination
2354
3321
  }) {
2355
- return this.stores.scores.getScoresByEntityId({
3322
+ return this.stores.scores.listScoresByEntityId({
2356
3323
  entityId: _entityId,
2357
3324
  entityType: _entityType,
2358
3325
  pagination: _pagination
2359
3326
  });
2360
3327
  }
3328
+ async listScoresBySpan({
3329
+ traceId,
3330
+ spanId,
3331
+ pagination: _pagination
3332
+ }) {
3333
+ return this.stores.scores.listScoresBySpan({ traceId, spanId, pagination: _pagination });
3334
+ }
2361
3335
  };
2362
3336
 
2363
3337
  export { MSSQLStore };