@mastra/mssql 0.0.0-fix-multi-modal-for-cloud-20251028082043 → 0.0.0-fix-persist-session-cache-option-mcp-server-20251030161352
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/CHANGELOG.md +11 -3
- package/dist/index.cjs +61 -204
- package/dist/index.cjs.map +1 -1
- package/dist/index.js +61 -204
- package/dist/index.js.map +1 -1
- package/dist/storage/domains/memory/index.d.ts +6 -1
- package/dist/storage/domains/memory/index.d.ts.map +1 -1
- package/dist/storage/domains/operations/index.d.ts.map +1 -1
- package/dist/storage/domains/workflows/index.d.ts +4 -2
- package/dist/storage/domains/workflows/index.d.ts.map +1 -1
- package/dist/storage/index.d.ts +3 -11
- package/dist/storage/index.d.ts.map +1 -1
- package/package.json +5 -5
- package/dist/storage/domains/legacy-evals/index.d.ts +0 -20
- package/dist/storage/domains/legacy-evals/index.d.ts.map +0 -1
package/dist/index.js
CHANGED
|
@@ -1,8 +1,8 @@
|
|
|
1
1
|
import { MastraError, ErrorCategory, ErrorDomain } from '@mastra/core/error';
|
|
2
|
-
import { MastraStorage,
|
|
3
|
-
import
|
|
4
|
-
import { parseSqlIdentifier } from '@mastra/core/utils';
|
|
2
|
+
import { MastraStorage, StoreOperations, TABLE_WORKFLOW_SNAPSHOT, TABLE_SCHEMAS, TABLE_THREADS, TABLE_MESSAGES, TABLE_TRACES, TABLE_SCORERS, TABLE_AI_SPANS, ScoresStorage, WorkflowsStorage, MemoryStorage, resolveMessageLimit, TABLE_RESOURCES, ObservabilityStorage, safelyParseJSON } from '@mastra/core/storage';
|
|
3
|
+
import sql2 from 'mssql';
|
|
5
4
|
import { MessageList } from '@mastra/core/agent';
|
|
5
|
+
import { parseSqlIdentifier } from '@mastra/core/utils';
|
|
6
6
|
import { randomUUID } from 'crypto';
|
|
7
7
|
import { saveScorePayloadSchema } from '@mastra/core/scores';
|
|
8
8
|
|
|
@@ -80,153 +80,7 @@ function transformFromSqlRow({
|
|
|
80
80
|
return result;
|
|
81
81
|
}
|
|
82
82
|
|
|
83
|
-
// src/storage/domains/
|
|
84
|
-
function transformEvalRow(row) {
|
|
85
|
-
let testInfoValue = null, resultValue = null;
|
|
86
|
-
if (row.test_info) {
|
|
87
|
-
try {
|
|
88
|
-
testInfoValue = typeof row.test_info === "string" ? JSON.parse(row.test_info) : row.test_info;
|
|
89
|
-
} catch {
|
|
90
|
-
}
|
|
91
|
-
}
|
|
92
|
-
if (row.result) {
|
|
93
|
-
try {
|
|
94
|
-
resultValue = typeof row.result === "string" ? JSON.parse(row.result) : row.result;
|
|
95
|
-
} catch {
|
|
96
|
-
}
|
|
97
|
-
}
|
|
98
|
-
return {
|
|
99
|
-
agentName: row.agent_name,
|
|
100
|
-
input: row.input,
|
|
101
|
-
output: row.output,
|
|
102
|
-
result: resultValue,
|
|
103
|
-
metricName: row.metric_name,
|
|
104
|
-
instructions: row.instructions,
|
|
105
|
-
testInfo: testInfoValue,
|
|
106
|
-
globalRunId: row.global_run_id,
|
|
107
|
-
runId: row.run_id,
|
|
108
|
-
createdAt: row.created_at
|
|
109
|
-
};
|
|
110
|
-
}
|
|
111
|
-
var LegacyEvalsMSSQL = class extends LegacyEvalsStorage {
|
|
112
|
-
pool;
|
|
113
|
-
schema;
|
|
114
|
-
constructor({ pool, schema }) {
|
|
115
|
-
super();
|
|
116
|
-
this.pool = pool;
|
|
117
|
-
this.schema = schema;
|
|
118
|
-
}
|
|
119
|
-
/** @deprecated use getEvals instead */
|
|
120
|
-
async getEvalsByAgentName(agentName, type) {
|
|
121
|
-
try {
|
|
122
|
-
let query = `SELECT * FROM ${getTableName({ indexName: TABLE_EVALS, schemaName: getSchemaName(this.schema) })} WHERE agent_name = @p1`;
|
|
123
|
-
if (type === "test") {
|
|
124
|
-
query += " AND test_info IS NOT NULL AND JSON_VALUE(test_info, '$.testPath') IS NOT NULL";
|
|
125
|
-
} else if (type === "live") {
|
|
126
|
-
query += " AND (test_info IS NULL OR JSON_VALUE(test_info, '$.testPath') IS NULL)";
|
|
127
|
-
}
|
|
128
|
-
query += " ORDER BY created_at DESC";
|
|
129
|
-
const request = this.pool.request();
|
|
130
|
-
request.input("p1", agentName);
|
|
131
|
-
const result = await request.query(query);
|
|
132
|
-
const rows = result.recordset;
|
|
133
|
-
return typeof transformEvalRow === "function" ? rows?.map((row) => transformEvalRow(row)) ?? [] : rows ?? [];
|
|
134
|
-
} catch (error) {
|
|
135
|
-
if (error && error.number === 208 && error.message && error.message.includes("Invalid object name")) {
|
|
136
|
-
return [];
|
|
137
|
-
}
|
|
138
|
-
this.logger?.error?.("Failed to get evals for the specified agent:", error);
|
|
139
|
-
throw error;
|
|
140
|
-
}
|
|
141
|
-
}
|
|
142
|
-
async getEvals(options = {}) {
|
|
143
|
-
const { agentName, type, page = 0, perPage = 100, dateRange } = options;
|
|
144
|
-
const fromDate = dateRange?.start;
|
|
145
|
-
const toDate = dateRange?.end;
|
|
146
|
-
const where = [];
|
|
147
|
-
const params = {};
|
|
148
|
-
if (agentName) {
|
|
149
|
-
where.push("agent_name = @agentName");
|
|
150
|
-
params["agentName"] = agentName;
|
|
151
|
-
}
|
|
152
|
-
if (type === "test") {
|
|
153
|
-
where.push("test_info IS NOT NULL AND JSON_VALUE(test_info, '$.testPath') IS NOT NULL");
|
|
154
|
-
} else if (type === "live") {
|
|
155
|
-
where.push("(test_info IS NULL OR JSON_VALUE(test_info, '$.testPath') IS NULL)");
|
|
156
|
-
}
|
|
157
|
-
if (fromDate instanceof Date && !isNaN(fromDate.getTime())) {
|
|
158
|
-
where.push(`[created_at] >= @fromDate`);
|
|
159
|
-
params[`fromDate`] = fromDate.toISOString();
|
|
160
|
-
}
|
|
161
|
-
if (toDate instanceof Date && !isNaN(toDate.getTime())) {
|
|
162
|
-
where.push(`[created_at] <= @toDate`);
|
|
163
|
-
params[`toDate`] = toDate.toISOString();
|
|
164
|
-
}
|
|
165
|
-
const whereClause = where.length > 0 ? `WHERE ${where.join(" AND ")}` : "";
|
|
166
|
-
const tableName = getTableName({ indexName: TABLE_EVALS, schemaName: getSchemaName(this.schema) });
|
|
167
|
-
const offset = page * perPage;
|
|
168
|
-
const countQuery = `SELECT COUNT(*) as total FROM ${tableName} ${whereClause}`;
|
|
169
|
-
const dataQuery = `SELECT * FROM ${tableName} ${whereClause} ORDER BY seq_id DESC OFFSET @offset ROWS FETCH NEXT @perPage ROWS ONLY`;
|
|
170
|
-
try {
|
|
171
|
-
const countReq = this.pool.request();
|
|
172
|
-
Object.entries(params).forEach(([key, value]) => {
|
|
173
|
-
if (value instanceof Date) {
|
|
174
|
-
countReq.input(key, sql3.DateTime, value);
|
|
175
|
-
} else {
|
|
176
|
-
countReq.input(key, value);
|
|
177
|
-
}
|
|
178
|
-
});
|
|
179
|
-
const countResult = await countReq.query(countQuery);
|
|
180
|
-
const total = countResult.recordset[0]?.total || 0;
|
|
181
|
-
if (total === 0) {
|
|
182
|
-
return {
|
|
183
|
-
evals: [],
|
|
184
|
-
total: 0,
|
|
185
|
-
page,
|
|
186
|
-
perPage,
|
|
187
|
-
hasMore: false
|
|
188
|
-
};
|
|
189
|
-
}
|
|
190
|
-
const req = this.pool.request();
|
|
191
|
-
Object.entries(params).forEach(([key, value]) => {
|
|
192
|
-
if (value instanceof Date) {
|
|
193
|
-
req.input(key, sql3.DateTime, value);
|
|
194
|
-
} else {
|
|
195
|
-
req.input(key, value);
|
|
196
|
-
}
|
|
197
|
-
});
|
|
198
|
-
req.input("offset", offset);
|
|
199
|
-
req.input("perPage", perPage);
|
|
200
|
-
const result = await req.query(dataQuery);
|
|
201
|
-
const rows = result.recordset;
|
|
202
|
-
return {
|
|
203
|
-
evals: rows?.map((row) => transformEvalRow(row)) ?? [],
|
|
204
|
-
total,
|
|
205
|
-
page,
|
|
206
|
-
perPage,
|
|
207
|
-
hasMore: offset + (rows?.length ?? 0) < total
|
|
208
|
-
};
|
|
209
|
-
} catch (error) {
|
|
210
|
-
const mastraError = new MastraError(
|
|
211
|
-
{
|
|
212
|
-
id: "MASTRA_STORAGE_MSSQL_STORE_GET_EVALS_FAILED",
|
|
213
|
-
domain: ErrorDomain.STORAGE,
|
|
214
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
215
|
-
details: {
|
|
216
|
-
agentName: agentName || "all",
|
|
217
|
-
type: type || "all",
|
|
218
|
-
page,
|
|
219
|
-
perPage
|
|
220
|
-
}
|
|
221
|
-
},
|
|
222
|
-
error
|
|
223
|
-
);
|
|
224
|
-
this.logger?.error?.(mastraError.toString());
|
|
225
|
-
this.logger?.trackException?.(mastraError);
|
|
226
|
-
throw mastraError;
|
|
227
|
-
}
|
|
228
|
-
}
|
|
229
|
-
};
|
|
83
|
+
// src/storage/domains/memory/index.ts
|
|
230
84
|
var MemoryMSSQL = class extends MemoryStorage {
|
|
231
85
|
pool;
|
|
232
86
|
schema;
|
|
@@ -258,7 +112,7 @@ var MemoryMSSQL = class extends MemoryStorage {
|
|
|
258
112
|
}
|
|
259
113
|
async getThreadById({ threadId }) {
|
|
260
114
|
try {
|
|
261
|
-
const
|
|
115
|
+
const sql5 = `SELECT
|
|
262
116
|
id,
|
|
263
117
|
[resourceId],
|
|
264
118
|
title,
|
|
@@ -269,7 +123,7 @@ var MemoryMSSQL = class extends MemoryStorage {
|
|
|
269
123
|
WHERE id = @threadId`;
|
|
270
124
|
const request = this.pool.request();
|
|
271
125
|
request.input("threadId", threadId);
|
|
272
|
-
const resultSet = await request.query(
|
|
126
|
+
const resultSet = await request.query(sql5);
|
|
273
127
|
const thread = resultSet.recordset[0] || null;
|
|
274
128
|
if (!thread) {
|
|
275
129
|
return null;
|
|
@@ -375,12 +229,12 @@ var MemoryMSSQL = class extends MemoryStorage {
|
|
|
375
229
|
req.input("title", thread.title);
|
|
376
230
|
const metadata = thread.metadata ? JSON.stringify(thread.metadata) : null;
|
|
377
231
|
if (metadata === null) {
|
|
378
|
-
req.input("metadata",
|
|
232
|
+
req.input("metadata", sql2.NVarChar, null);
|
|
379
233
|
} else {
|
|
380
234
|
req.input("metadata", metadata);
|
|
381
235
|
}
|
|
382
|
-
req.input("createdAt",
|
|
383
|
-
req.input("updatedAt",
|
|
236
|
+
req.input("createdAt", sql2.DateTime2, thread.createdAt);
|
|
237
|
+
req.input("updatedAt", sql2.DateTime2, thread.updatedAt);
|
|
384
238
|
await req.query(mergeSql);
|
|
385
239
|
return thread;
|
|
386
240
|
} catch (error) {
|
|
@@ -449,7 +303,7 @@ var MemoryMSSQL = class extends MemoryStorage {
|
|
|
449
303
|
};
|
|
450
304
|
try {
|
|
451
305
|
const table = getTableName({ indexName: TABLE_THREADS, schemaName: getSchemaName(this.schema) });
|
|
452
|
-
const
|
|
306
|
+
const sql5 = `UPDATE ${table}
|
|
453
307
|
SET title = @title,
|
|
454
308
|
metadata = @metadata,
|
|
455
309
|
[updatedAt] = @updatedAt
|
|
@@ -460,7 +314,7 @@ var MemoryMSSQL = class extends MemoryStorage {
|
|
|
460
314
|
req.input("title", title);
|
|
461
315
|
req.input("metadata", JSON.stringify(mergedMetadata));
|
|
462
316
|
req.input("updatedAt", /* @__PURE__ */ new Date());
|
|
463
|
-
const result = await req.query(
|
|
317
|
+
const result = await req.query(sql5);
|
|
464
318
|
let thread = result.recordset && result.recordset[0];
|
|
465
319
|
if (thread && "seq_id" in thread) {
|
|
466
320
|
const { seq_id, ...rest } = thread;
|
|
@@ -699,6 +553,20 @@ var MemoryMSSQL = class extends MemoryStorage {
|
|
|
699
553
|
return [];
|
|
700
554
|
}
|
|
701
555
|
}
|
|
556
|
+
async listMessages(_args) {
|
|
557
|
+
throw new Error(
|
|
558
|
+
`listMessages is not yet implemented by this storage adapter (${this.constructor.name}). This method is currently being rolled out across all storage adapters. Please use getMessages or getMessagesPaginated as an alternative, or wait for the implementation.`
|
|
559
|
+
);
|
|
560
|
+
}
|
|
561
|
+
async listMessagesById({ messageIds }) {
|
|
562
|
+
return this.getMessagesById({ messageIds, format: "v2" });
|
|
563
|
+
}
|
|
564
|
+
async listThreadsByResourceId(args) {
|
|
565
|
+
const { resourceId, limit, offset, orderBy, sortDirection } = args;
|
|
566
|
+
const page = Math.floor(offset / limit);
|
|
567
|
+
const perPage = limit;
|
|
568
|
+
return this.getThreadsByResourceIdPaginated({ resourceId, page, perPage, orderBy, sortDirection });
|
|
569
|
+
}
|
|
702
570
|
async getMessagesPaginated(args) {
|
|
703
571
|
const { threadId, resourceId, format, selectBy } = args;
|
|
704
572
|
const { page = 0, perPage: perPageInput, dateRange } = selectBy?.pagination || {};
|
|
@@ -829,7 +697,7 @@ var MemoryMSSQL = class extends MemoryStorage {
|
|
|
829
697
|
"content",
|
|
830
698
|
typeof message.content === "string" ? message.content : JSON.stringify(message.content)
|
|
831
699
|
);
|
|
832
|
-
request.input("createdAt",
|
|
700
|
+
request.input("createdAt", sql2.DateTime2, message.createdAt);
|
|
833
701
|
request.input("role", message.role);
|
|
834
702
|
request.input("type", message.type || "v2");
|
|
835
703
|
request.input("resourceId", message.resourceId);
|
|
@@ -848,7 +716,7 @@ var MemoryMSSQL = class extends MemoryStorage {
|
|
|
848
716
|
await request.query(mergeSql);
|
|
849
717
|
}
|
|
850
718
|
const threadReq = transaction.request();
|
|
851
|
-
threadReq.input("updatedAt",
|
|
719
|
+
threadReq.input("updatedAt", sql2.DateTime2, /* @__PURE__ */ new Date());
|
|
852
720
|
threadReq.input("id", threadId);
|
|
853
721
|
await threadReq.query(`UPDATE ${tableThreads} SET [updatedAt] = @updatedAt WHERE id = @id`);
|
|
854
722
|
await transaction.commit();
|
|
@@ -1550,7 +1418,7 @@ var StoreOperationsMSSQL = class extends StoreOperations {
|
|
|
1550
1418
|
const value = record[col];
|
|
1551
1419
|
const preparedValue = this.prepareValue(value, col, tableName);
|
|
1552
1420
|
if (preparedValue instanceof Date) {
|
|
1553
|
-
request.input(`param${i}`,
|
|
1421
|
+
request.input(`param${i}`, sql2.DateTime2, preparedValue);
|
|
1554
1422
|
} else if (preparedValue === null || preparedValue === void 0) {
|
|
1555
1423
|
request.input(`param${i}`, this.getMssqlType(tableName, col), null);
|
|
1556
1424
|
} else {
|
|
@@ -1765,7 +1633,7 @@ ${columns}
|
|
|
1765
1633
|
try {
|
|
1766
1634
|
const keyEntries = Object.entries(keys).map(([key, value]) => [parseSqlIdentifier(key, "column name"), value]);
|
|
1767
1635
|
const conditions = keyEntries.map(([key], i) => `[${key}] = @param${i}`).join(" AND ");
|
|
1768
|
-
const
|
|
1636
|
+
const sql5 = `SELECT * FROM ${getTableName({ indexName: tableName, schemaName: getSchemaName(this.schemaName) })} WHERE ${conditions}`;
|
|
1769
1637
|
const request = this.pool.request();
|
|
1770
1638
|
keyEntries.forEach(([key, value], i) => {
|
|
1771
1639
|
const preparedValue = this.prepareValue(value, key, tableName);
|
|
@@ -1775,7 +1643,7 @@ ${columns}
|
|
|
1775
1643
|
request.input(`param${i}`, preparedValue);
|
|
1776
1644
|
}
|
|
1777
1645
|
});
|
|
1778
|
-
const resultSet = await request.query(
|
|
1646
|
+
const resultSet = await request.query(sql5);
|
|
1779
1647
|
const result = resultSet.recordset[0] || null;
|
|
1780
1648
|
if (!result) {
|
|
1781
1649
|
return null;
|
|
@@ -1874,23 +1742,23 @@ ${columns}
|
|
|
1874
1742
|
const col = TABLE_SCHEMAS[tableName]?.[columnName];
|
|
1875
1743
|
switch (col?.type) {
|
|
1876
1744
|
case "text":
|
|
1877
|
-
return
|
|
1745
|
+
return sql2.NVarChar;
|
|
1878
1746
|
case "timestamp":
|
|
1879
|
-
return
|
|
1747
|
+
return sql2.DateTime2;
|
|
1880
1748
|
case "uuid":
|
|
1881
|
-
return
|
|
1749
|
+
return sql2.UniqueIdentifier;
|
|
1882
1750
|
case "jsonb":
|
|
1883
|
-
return
|
|
1751
|
+
return sql2.NVarChar;
|
|
1884
1752
|
case "integer":
|
|
1885
|
-
return
|
|
1753
|
+
return sql2.Int;
|
|
1886
1754
|
case "bigint":
|
|
1887
|
-
return
|
|
1755
|
+
return sql2.BigInt;
|
|
1888
1756
|
case "float":
|
|
1889
|
-
return
|
|
1757
|
+
return sql2.Float;
|
|
1890
1758
|
case "boolean":
|
|
1891
|
-
return
|
|
1759
|
+
return sql2.Bit;
|
|
1892
1760
|
default:
|
|
1893
|
-
return
|
|
1761
|
+
return sql2.NVarChar;
|
|
1894
1762
|
}
|
|
1895
1763
|
}
|
|
1896
1764
|
/**
|
|
@@ -2334,11 +2202,6 @@ ${columns}
|
|
|
2334
2202
|
table: TABLE_TRACES,
|
|
2335
2203
|
columns: ["name", "seq_id DESC"]
|
|
2336
2204
|
},
|
|
2337
|
-
{
|
|
2338
|
-
name: `${schemaPrefix}mastra_evals_agent_name_seqid_idx`,
|
|
2339
|
-
table: TABLE_EVALS,
|
|
2340
|
-
columns: ["agent_name", "seq_id DESC"]
|
|
2341
|
-
},
|
|
2342
2205
|
{
|
|
2343
2206
|
name: `${schemaPrefix}mastra_scores_trace_id_span_id_seqid_idx`,
|
|
2344
2207
|
table: TABLE_SCORERS,
|
|
@@ -2403,7 +2266,7 @@ function transformScoreRow(row) {
|
|
|
2403
2266
|
metadata: safelyParseJSON(row.metadata),
|
|
2404
2267
|
output: safelyParseJSON(row.output),
|
|
2405
2268
|
additionalContext: safelyParseJSON(row.additionalContext),
|
|
2406
|
-
|
|
2269
|
+
requestContext: safelyParseJSON(row.requestContext),
|
|
2407
2270
|
entity: safelyParseJSON(row.entity),
|
|
2408
2271
|
createdAt: row.createdAt,
|
|
2409
2272
|
updatedAt: row.updatedAt
|
|
@@ -2470,7 +2333,7 @@ var ScoresMSSQL = class extends ScoresStorage {
|
|
|
2470
2333
|
input,
|
|
2471
2334
|
output,
|
|
2472
2335
|
additionalContext,
|
|
2473
|
-
|
|
2336
|
+
requestContext,
|
|
2474
2337
|
entity,
|
|
2475
2338
|
...rest
|
|
2476
2339
|
} = validatedScore;
|
|
@@ -2485,7 +2348,7 @@ var ScoresMSSQL = class extends ScoresStorage {
|
|
|
2485
2348
|
analyzeStepResult: analyzeStepResult || null,
|
|
2486
2349
|
metadata: metadata || null,
|
|
2487
2350
|
additionalContext: additionalContext || null,
|
|
2488
|
-
|
|
2351
|
+
requestContext: requestContext || null,
|
|
2489
2352
|
entity: entity || null,
|
|
2490
2353
|
scorer: scorer || null,
|
|
2491
2354
|
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
@@ -2774,13 +2637,13 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
|
|
|
2774
2637
|
runId,
|
|
2775
2638
|
stepId,
|
|
2776
2639
|
result,
|
|
2777
|
-
|
|
2640
|
+
requestContext
|
|
2778
2641
|
}) {
|
|
2779
2642
|
const table = getTableName({ indexName: TABLE_WORKFLOW_SNAPSHOT, schemaName: getSchemaName(this.schema) });
|
|
2780
2643
|
const transaction = this.pool.transaction();
|
|
2781
2644
|
try {
|
|
2782
2645
|
await transaction.begin();
|
|
2783
|
-
const selectRequest = new
|
|
2646
|
+
const selectRequest = new sql2.Request(transaction);
|
|
2784
2647
|
selectRequest.input("workflow_name", workflowName);
|
|
2785
2648
|
selectRequest.input("run_id", runId);
|
|
2786
2649
|
const existingSnapshotResult = await selectRequest.query(
|
|
@@ -2799,20 +2662,20 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
|
|
|
2799
2662
|
waitingPaths: {},
|
|
2800
2663
|
status: "pending",
|
|
2801
2664
|
runId,
|
|
2802
|
-
|
|
2665
|
+
requestContext: {}
|
|
2803
2666
|
};
|
|
2804
2667
|
} else {
|
|
2805
2668
|
const existingSnapshot = existingSnapshotResult.recordset[0].snapshot;
|
|
2806
2669
|
snapshot = typeof existingSnapshot === "string" ? JSON.parse(existingSnapshot) : existingSnapshot;
|
|
2807
2670
|
}
|
|
2808
2671
|
snapshot.context[stepId] = result;
|
|
2809
|
-
snapshot.
|
|
2810
|
-
const upsertReq = new
|
|
2672
|
+
snapshot.requestContext = { ...snapshot.requestContext, ...requestContext };
|
|
2673
|
+
const upsertReq = new sql2.Request(transaction);
|
|
2811
2674
|
upsertReq.input("workflow_name", workflowName);
|
|
2812
2675
|
upsertReq.input("run_id", runId);
|
|
2813
2676
|
upsertReq.input("snapshot", JSON.stringify(snapshot));
|
|
2814
|
-
upsertReq.input("createdAt",
|
|
2815
|
-
upsertReq.input("updatedAt",
|
|
2677
|
+
upsertReq.input("createdAt", sql2.DateTime2, /* @__PURE__ */ new Date());
|
|
2678
|
+
upsertReq.input("updatedAt", sql2.DateTime2, /* @__PURE__ */ new Date());
|
|
2816
2679
|
await upsertReq.query(
|
|
2817
2680
|
`MERGE ${table} AS target
|
|
2818
2681
|
USING (SELECT @workflow_name AS workflow_name, @run_id AS run_id) AS src
|
|
@@ -2852,7 +2715,7 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
|
|
|
2852
2715
|
const transaction = this.pool.transaction();
|
|
2853
2716
|
try {
|
|
2854
2717
|
await transaction.begin();
|
|
2855
|
-
const selectRequest = new
|
|
2718
|
+
const selectRequest = new sql2.Request(transaction);
|
|
2856
2719
|
selectRequest.input("workflow_name", workflowName);
|
|
2857
2720
|
selectRequest.input("run_id", runId);
|
|
2858
2721
|
const existingSnapshotResult = await selectRequest.query(
|
|
@@ -2880,11 +2743,11 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
|
|
|
2880
2743
|
);
|
|
2881
2744
|
}
|
|
2882
2745
|
const updatedSnapshot = { ...snapshot, ...opts };
|
|
2883
|
-
const updateRequest = new
|
|
2746
|
+
const updateRequest = new sql2.Request(transaction);
|
|
2884
2747
|
updateRequest.input("snapshot", JSON.stringify(updatedSnapshot));
|
|
2885
2748
|
updateRequest.input("workflow_name", workflowName);
|
|
2886
2749
|
updateRequest.input("run_id", runId);
|
|
2887
|
-
updateRequest.input("updatedAt",
|
|
2750
|
+
updateRequest.input("updatedAt", sql2.DateTime2, /* @__PURE__ */ new Date());
|
|
2888
2751
|
await updateRequest.query(
|
|
2889
2752
|
`UPDATE ${table} SET snapshot = @snapshot, [updatedAt] = @updatedAt WHERE workflow_name = @workflow_name AND run_id = @run_id`
|
|
2890
2753
|
);
|
|
@@ -2923,8 +2786,8 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
|
|
|
2923
2786
|
request.input("run_id", runId);
|
|
2924
2787
|
request.input("resourceId", resourceId);
|
|
2925
2788
|
request.input("snapshot", JSON.stringify(snapshot));
|
|
2926
|
-
request.input("createdAt",
|
|
2927
|
-
request.input("updatedAt",
|
|
2789
|
+
request.input("createdAt", sql2.DateTime2, new Date(now));
|
|
2790
|
+
request.input("updatedAt", sql2.DateTime2, new Date(now));
|
|
2928
2791
|
const mergeSql = `MERGE INTO ${table} AS target
|
|
2929
2792
|
USING (SELECT @workflow_name AS workflow_name, @run_id AS run_id) AS src
|
|
2930
2793
|
ON target.workflow_name = src.workflow_name AND target.run_id = src.run_id
|
|
@@ -3059,7 +2922,7 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
|
|
|
3059
2922
|
const request = this.pool.request();
|
|
3060
2923
|
Object.entries(paramMap).forEach(([key, value]) => {
|
|
3061
2924
|
if (value instanceof Date) {
|
|
3062
|
-
request.input(key,
|
|
2925
|
+
request.input(key, sql2.DateTime, value);
|
|
3063
2926
|
} else {
|
|
3064
2927
|
request.input(key, value);
|
|
3065
2928
|
}
|
|
@@ -3092,6 +2955,9 @@ var WorkflowsMSSQL = class extends WorkflowsStorage {
|
|
|
3092
2955
|
);
|
|
3093
2956
|
}
|
|
3094
2957
|
}
|
|
2958
|
+
async listWorkflowRuns(args) {
|
|
2959
|
+
return this.getWorkflowRuns(args);
|
|
2960
|
+
}
|
|
3095
2961
|
};
|
|
3096
2962
|
|
|
3097
2963
|
// src/storage/index.ts
|
|
@@ -3116,7 +2982,7 @@ var MSSQLStore = class extends MastraStorage {
|
|
|
3116
2982
|
}
|
|
3117
2983
|
}
|
|
3118
2984
|
this.schema = config.schemaName || "dbo";
|
|
3119
|
-
this.pool = "connectionString" in config ? new
|
|
2985
|
+
this.pool = "connectionString" in config ? new sql2.ConnectionPool(config.connectionString) : new sql2.ConnectionPool({
|
|
3120
2986
|
server: config.server,
|
|
3121
2987
|
database: config.database,
|
|
3122
2988
|
user: config.user,
|
|
@@ -3124,7 +2990,6 @@ var MSSQLStore = class extends MastraStorage {
|
|
|
3124
2990
|
port: config.port,
|
|
3125
2991
|
options: config.options || { encrypt: true, trustServerCertificate: true }
|
|
3126
2992
|
});
|
|
3127
|
-
const legacyEvals = new LegacyEvalsMSSQL({ pool: this.pool, schema: this.schema });
|
|
3128
2993
|
const operations = new StoreOperationsMSSQL({ pool: this.pool, schemaName: this.schema });
|
|
3129
2994
|
const scores = new ScoresMSSQL({ pool: this.pool, operations, schema: this.schema });
|
|
3130
2995
|
const workflows = new WorkflowsMSSQL({ pool: this.pool, operations, schema: this.schema });
|
|
@@ -3134,7 +2999,6 @@ var MSSQLStore = class extends MastraStorage {
|
|
|
3134
2999
|
operations,
|
|
3135
3000
|
scores,
|
|
3136
3001
|
workflows,
|
|
3137
|
-
legacyEvals,
|
|
3138
3002
|
memory,
|
|
3139
3003
|
observability
|
|
3140
3004
|
};
|
|
@@ -3193,13 +3057,6 @@ var MSSQLStore = class extends MastraStorage {
|
|
|
3193
3057
|
indexManagement: true
|
|
3194
3058
|
};
|
|
3195
3059
|
}
|
|
3196
|
-
/** @deprecated use getEvals instead */
|
|
3197
|
-
async getEvalsByAgentName(agentName, type) {
|
|
3198
|
-
return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
|
|
3199
|
-
}
|
|
3200
|
-
async getEvals(options = {}) {
|
|
3201
|
-
return this.stores.legacyEvals.getEvals(options);
|
|
3202
|
-
}
|
|
3203
3060
|
async createTable({
|
|
3204
3061
|
tableName,
|
|
3205
3062
|
schema
|
|
@@ -3300,9 +3157,9 @@ var MSSQLStore = class extends MastraStorage {
|
|
|
3300
3157
|
runId,
|
|
3301
3158
|
stepId,
|
|
3302
3159
|
result,
|
|
3303
|
-
|
|
3160
|
+
requestContext
|
|
3304
3161
|
}) {
|
|
3305
|
-
return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result,
|
|
3162
|
+
return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
|
|
3306
3163
|
}
|
|
3307
3164
|
async updateWorkflowState({
|
|
3308
3165
|
workflowName,
|