@mastra/upstash 0.12.1 → 0.12.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +7 -7
- package/CHANGELOG.md +53 -0
- package/dist/_tsup-dts-rollup.d.cts +342 -40
- package/dist/_tsup-dts-rollup.d.ts +342 -40
- package/dist/index.cjs +1133 -612
- package/dist/index.js +1134 -613
- package/docker-compose.yaml +1 -1
- package/package.json +5 -5
- package/src/storage/domains/legacy-evals/index.ts +279 -0
- package/src/storage/domains/memory/index.ts +902 -0
- package/src/storage/domains/operations/index.ts +168 -0
- package/src/storage/domains/scores/index.ts +216 -0
- package/src/storage/domains/traces/index.ts +172 -0
- package/src/storage/domains/utils.ts +57 -0
- package/src/storage/domains/workflows/index.ts +243 -0
- package/src/storage/index.test.ts +13 -0
- package/src/storage/index.ts +143 -1416
- package/src/storage/upstash.test.ts +0 -1461
package/dist/index.cjs
CHANGED
|
@@ -1,159 +1,51 @@
|
|
|
1
1
|
'use strict';
|
|
2
2
|
|
|
3
|
-
var agent = require('@mastra/core/agent');
|
|
4
|
-
var error = require('@mastra/core/error');
|
|
5
3
|
var storage = require('@mastra/core/storage');
|
|
6
4
|
var redis = require('@upstash/redis');
|
|
5
|
+
var error = require('@mastra/core/error');
|
|
6
|
+
var agent = require('@mastra/core/agent');
|
|
7
7
|
var vector = require('@mastra/core/vector');
|
|
8
8
|
var vector$1 = require('@upstash/vector');
|
|
9
9
|
var filter = require('@mastra/core/vector/filter');
|
|
10
10
|
|
|
11
11
|
// src/storage/index.ts
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
});
|
|
20
|
-
}
|
|
21
|
-
get supports() {
|
|
22
|
-
return {
|
|
23
|
-
selectByIncludeResourceScope: true,
|
|
24
|
-
resourceWorkingMemory: true
|
|
25
|
-
};
|
|
26
|
-
}
|
|
27
|
-
transformEvalRecord(record) {
|
|
28
|
-
let result = record.result;
|
|
29
|
-
if (typeof result === "string") {
|
|
30
|
-
try {
|
|
31
|
-
result = JSON.parse(result);
|
|
32
|
-
} catch {
|
|
33
|
-
console.warn("Failed to parse result JSON:");
|
|
34
|
-
}
|
|
35
|
-
}
|
|
36
|
-
let testInfo = record.test_info;
|
|
37
|
-
if (typeof testInfo === "string") {
|
|
38
|
-
try {
|
|
39
|
-
testInfo = JSON.parse(testInfo);
|
|
40
|
-
} catch {
|
|
41
|
-
console.warn("Failed to parse test_info JSON:");
|
|
42
|
-
}
|
|
43
|
-
}
|
|
44
|
-
return {
|
|
45
|
-
agentName: record.agent_name,
|
|
46
|
-
input: record.input,
|
|
47
|
-
output: record.output,
|
|
48
|
-
result,
|
|
49
|
-
metricName: record.metric_name,
|
|
50
|
-
instructions: record.instructions,
|
|
51
|
-
testInfo,
|
|
52
|
-
globalRunId: record.global_run_id,
|
|
53
|
-
runId: record.run_id,
|
|
54
|
-
createdAt: typeof record.created_at === "string" ? record.created_at : record.created_at instanceof Date ? record.created_at.toISOString() : (/* @__PURE__ */ new Date()).toISOString()
|
|
55
|
-
};
|
|
56
|
-
}
|
|
57
|
-
parseJSON(value) {
|
|
58
|
-
if (typeof value === "string") {
|
|
59
|
-
try {
|
|
60
|
-
return JSON.parse(value);
|
|
61
|
-
} catch {
|
|
62
|
-
return value;
|
|
63
|
-
}
|
|
12
|
+
function transformEvalRecord(record) {
|
|
13
|
+
let result = record.result;
|
|
14
|
+
if (typeof result === "string") {
|
|
15
|
+
try {
|
|
16
|
+
result = JSON.parse(result);
|
|
17
|
+
} catch {
|
|
18
|
+
console.warn("Failed to parse result JSON:");
|
|
64
19
|
}
|
|
65
|
-
return value;
|
|
66
|
-
}
|
|
67
|
-
getKey(tableName, keys) {
|
|
68
|
-
const keyParts = Object.entries(keys).filter(([_, value]) => value !== void 0).map(([key, value]) => `${key}:${value}`);
|
|
69
|
-
return `${tableName}:${keyParts.join(":")}`;
|
|
70
|
-
}
|
|
71
|
-
/**
|
|
72
|
-
* Scans for keys matching the given pattern using SCAN and returns them as an array.
|
|
73
|
-
* @param pattern Redis key pattern, e.g. "table:*"
|
|
74
|
-
* @param batchSize Number of keys to scan per batch (default: 1000)
|
|
75
|
-
*/
|
|
76
|
-
async scanKeys(pattern, batchSize = 1e4) {
|
|
77
|
-
let cursor = "0";
|
|
78
|
-
let keys = [];
|
|
79
|
-
do {
|
|
80
|
-
const [nextCursor, batch] = await this.redis.scan(cursor, {
|
|
81
|
-
match: pattern,
|
|
82
|
-
count: batchSize
|
|
83
|
-
});
|
|
84
|
-
keys.push(...batch);
|
|
85
|
-
cursor = nextCursor;
|
|
86
|
-
} while (cursor !== "0");
|
|
87
|
-
return keys;
|
|
88
|
-
}
|
|
89
|
-
/**
|
|
90
|
-
* Deletes all keys matching the given pattern using SCAN and DEL in batches.
|
|
91
|
-
* @param pattern Redis key pattern, e.g. "table:*"
|
|
92
|
-
* @param batchSize Number of keys to delete per batch (default: 1000)
|
|
93
|
-
*/
|
|
94
|
-
async scanAndDelete(pattern, batchSize = 1e4) {
|
|
95
|
-
let cursor = "0";
|
|
96
|
-
let totalDeleted = 0;
|
|
97
|
-
do {
|
|
98
|
-
const [nextCursor, keys] = await this.redis.scan(cursor, {
|
|
99
|
-
match: pattern,
|
|
100
|
-
count: batchSize
|
|
101
|
-
});
|
|
102
|
-
if (keys.length > 0) {
|
|
103
|
-
await this.redis.del(...keys);
|
|
104
|
-
totalDeleted += keys.length;
|
|
105
|
-
}
|
|
106
|
-
cursor = nextCursor;
|
|
107
|
-
} while (cursor !== "0");
|
|
108
|
-
return totalDeleted;
|
|
109
|
-
}
|
|
110
|
-
getMessageKey(threadId, messageId) {
|
|
111
|
-
const key = this.getKey(storage.TABLE_MESSAGES, { threadId, id: messageId });
|
|
112
|
-
return key;
|
|
113
20
|
}
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
try {
|
|
121
|
-
parsedSnapshot = JSON.parse(row.snapshot);
|
|
122
|
-
} catch (e) {
|
|
123
|
-
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
124
|
-
}
|
|
21
|
+
let testInfo = record.test_info;
|
|
22
|
+
if (typeof testInfo === "string") {
|
|
23
|
+
try {
|
|
24
|
+
testInfo = JSON.parse(testInfo);
|
|
25
|
+
} catch {
|
|
26
|
+
console.warn("Failed to parse test_info JSON:");
|
|
125
27
|
}
|
|
126
|
-
return {
|
|
127
|
-
workflowName: row.workflow_name,
|
|
128
|
-
runId: row.run_id,
|
|
129
|
-
snapshot: parsedSnapshot,
|
|
130
|
-
createdAt: this.ensureDate(row.createdAt),
|
|
131
|
-
updatedAt: this.ensureDate(row.updatedAt),
|
|
132
|
-
resourceId: row.resourceId
|
|
133
|
-
};
|
|
134
28
|
}
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
};
|
|
156
|
-
return { key, processedRecord };
|
|
29
|
+
return {
|
|
30
|
+
agentName: record.agent_name,
|
|
31
|
+
input: record.input,
|
|
32
|
+
output: record.output,
|
|
33
|
+
result,
|
|
34
|
+
metricName: record.metric_name,
|
|
35
|
+
instructions: record.instructions,
|
|
36
|
+
testInfo,
|
|
37
|
+
globalRunId: record.global_run_id,
|
|
38
|
+
runId: record.run_id,
|
|
39
|
+
createdAt: typeof record.created_at === "string" ? record.created_at : record.created_at instanceof Date ? record.created_at.toISOString() : (/* @__PURE__ */ new Date()).toISOString()
|
|
40
|
+
};
|
|
41
|
+
}
|
|
42
|
+
var StoreLegacyEvalsUpstash = class extends storage.LegacyEvalsStorage {
|
|
43
|
+
client;
|
|
44
|
+
operations;
|
|
45
|
+
constructor({ client, operations }) {
|
|
46
|
+
super();
|
|
47
|
+
this.client = client;
|
|
48
|
+
this.operations = operations;
|
|
157
49
|
}
|
|
158
50
|
/**
|
|
159
51
|
* @deprecated Use getEvals instead
|
|
@@ -161,11 +53,11 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
161
53
|
async getEvalsByAgentName(agentName, type) {
|
|
162
54
|
try {
|
|
163
55
|
const pattern = `${storage.TABLE_EVALS}:*`;
|
|
164
|
-
const keys = await this.scanKeys(pattern);
|
|
56
|
+
const keys = await this.operations.scanKeys(pattern);
|
|
165
57
|
if (keys.length === 0) {
|
|
166
58
|
return [];
|
|
167
59
|
}
|
|
168
|
-
const pipeline = this.
|
|
60
|
+
const pipeline = this.client.pipeline();
|
|
169
61
|
keys.forEach((key) => pipeline.get(key));
|
|
170
62
|
const results = await pipeline.exec();
|
|
171
63
|
const nonNullRecords = results.filter(
|
|
@@ -199,7 +91,7 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
199
91
|
}
|
|
200
92
|
});
|
|
201
93
|
}
|
|
202
|
-
return filteredEvals.map((record) =>
|
|
94
|
+
return filteredEvals.map((record) => transformEvalRecord(record));
|
|
203
95
|
} catch (error$1) {
|
|
204
96
|
const mastraError = new error.MastraError(
|
|
205
97
|
{
|
|
@@ -216,148 +108,198 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
216
108
|
}
|
|
217
109
|
}
|
|
218
110
|
/**
|
|
219
|
-
*
|
|
111
|
+
* Get all evaluations with pagination and total count
|
|
112
|
+
* @param options Pagination and filtering options
|
|
113
|
+
* @returns Object with evals array and total count
|
|
220
114
|
*/
|
|
221
|
-
async
|
|
222
|
-
if (args.fromDate || args.toDate) {
|
|
223
|
-
args.dateRange = {
|
|
224
|
-
start: args.fromDate,
|
|
225
|
-
end: args.toDate
|
|
226
|
-
};
|
|
227
|
-
}
|
|
228
|
-
try {
|
|
229
|
-
const { traces } = await this.getTracesPaginated(args);
|
|
230
|
-
return traces;
|
|
231
|
-
} catch (error$1) {
|
|
232
|
-
throw new error.MastraError(
|
|
233
|
-
{
|
|
234
|
-
id: "STORAGE_UPSTASH_STORAGE_GET_TRACES_FAILED",
|
|
235
|
-
domain: error.ErrorDomain.STORAGE,
|
|
236
|
-
category: error.ErrorCategory.THIRD_PARTY
|
|
237
|
-
},
|
|
238
|
-
error$1
|
|
239
|
-
);
|
|
240
|
-
}
|
|
241
|
-
}
|
|
242
|
-
async getTracesPaginated(args) {
|
|
243
|
-
const { name, scope, page = 0, perPage = 100, attributes, filters, dateRange } = args;
|
|
244
|
-
const fromDate = dateRange?.start;
|
|
245
|
-
const toDate = dateRange?.end;
|
|
115
|
+
async getEvals(options) {
|
|
246
116
|
try {
|
|
247
|
-
const
|
|
248
|
-
const
|
|
117
|
+
const { agentName, type, page = 0, perPage = 100, dateRange } = options || {};
|
|
118
|
+
const fromDate = dateRange?.start;
|
|
119
|
+
const toDate = dateRange?.end;
|
|
120
|
+
const pattern = `${storage.TABLE_EVALS}:*`;
|
|
121
|
+
const keys = await this.operations.scanKeys(pattern);
|
|
249
122
|
if (keys.length === 0) {
|
|
250
123
|
return {
|
|
251
|
-
|
|
124
|
+
evals: [],
|
|
252
125
|
total: 0,
|
|
253
126
|
page,
|
|
254
|
-
perPage
|
|
127
|
+
perPage,
|
|
255
128
|
hasMore: false
|
|
256
129
|
};
|
|
257
130
|
}
|
|
258
|
-
const pipeline = this.
|
|
131
|
+
const pipeline = this.client.pipeline();
|
|
259
132
|
keys.forEach((key) => pipeline.get(key));
|
|
260
133
|
const results = await pipeline.exec();
|
|
261
|
-
let
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
if (name) {
|
|
265
|
-
filteredTraces = filteredTraces.filter((record) => record.name?.toLowerCase().startsWith(name.toLowerCase()));
|
|
266
|
-
}
|
|
267
|
-
if (scope) {
|
|
268
|
-
filteredTraces = filteredTraces.filter((record) => record.scope === scope);
|
|
134
|
+
let filteredEvals = results.map((result) => result).filter((record) => record !== null && typeof record === "object");
|
|
135
|
+
if (agentName) {
|
|
136
|
+
filteredEvals = filteredEvals.filter((record) => record.agent_name === agentName);
|
|
269
137
|
}
|
|
270
|
-
if (
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
138
|
+
if (type === "test") {
|
|
139
|
+
filteredEvals = filteredEvals.filter((record) => {
|
|
140
|
+
if (!record.test_info) return false;
|
|
141
|
+
try {
|
|
142
|
+
if (typeof record.test_info === "string") {
|
|
143
|
+
const parsedTestInfo = JSON.parse(record.test_info);
|
|
144
|
+
return parsedTestInfo && typeof parsedTestInfo === "object" && "testPath" in parsedTestInfo;
|
|
145
|
+
}
|
|
146
|
+
return typeof record.test_info === "object" && "testPath" in record.test_info;
|
|
147
|
+
} catch {
|
|
148
|
+
return false;
|
|
149
|
+
}
|
|
150
|
+
});
|
|
151
|
+
} else if (type === "live") {
|
|
152
|
+
filteredEvals = filteredEvals.filter((record) => {
|
|
153
|
+
if (!record.test_info) return true;
|
|
154
|
+
try {
|
|
155
|
+
if (typeof record.test_info === "string") {
|
|
156
|
+
const parsedTestInfo = JSON.parse(record.test_info);
|
|
157
|
+
return !(parsedTestInfo && typeof parsedTestInfo === "object" && "testPath" in parsedTestInfo);
|
|
158
|
+
}
|
|
159
|
+
return !(typeof record.test_info === "object" && "testPath" in record.test_info);
|
|
160
|
+
} catch {
|
|
161
|
+
return true;
|
|
162
|
+
}
|
|
276
163
|
});
|
|
277
|
-
}
|
|
278
|
-
if (filters) {
|
|
279
|
-
filteredTraces = filteredTraces.filter(
|
|
280
|
-
(record) => Object.entries(filters).every(([key, value]) => record[key] === value)
|
|
281
|
-
);
|
|
282
164
|
}
|
|
283
165
|
if (fromDate) {
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
166
|
+
filteredEvals = filteredEvals.filter((record) => {
|
|
167
|
+
const createdAt = new Date(record.created_at || record.createdAt || 0);
|
|
168
|
+
return createdAt.getTime() >= fromDate.getTime();
|
|
169
|
+
});
|
|
287
170
|
}
|
|
288
171
|
if (toDate) {
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
172
|
+
filteredEvals = filteredEvals.filter((record) => {
|
|
173
|
+
const createdAt = new Date(record.created_at || record.createdAt || 0);
|
|
174
|
+
return createdAt.getTime() <= toDate.getTime();
|
|
175
|
+
});
|
|
292
176
|
}
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
events: this.parseJSON(record.events),
|
|
303
|
-
links: this.parseJSON(record.links),
|
|
304
|
-
attributes: this.parseJSON(record.attributes),
|
|
305
|
-
startTime: record.startTime,
|
|
306
|
-
endTime: record.endTime,
|
|
307
|
-
other: this.parseJSON(record.other),
|
|
308
|
-
createdAt: this.ensureDate(record.createdAt)
|
|
309
|
-
}));
|
|
310
|
-
const total = transformedTraces.length;
|
|
311
|
-
const resolvedPerPage = perPage || 100;
|
|
312
|
-
const start = page * resolvedPerPage;
|
|
313
|
-
const end = start + resolvedPerPage;
|
|
314
|
-
const paginatedTraces = transformedTraces.slice(start, end);
|
|
177
|
+
filteredEvals.sort((a, b) => {
|
|
178
|
+
const dateA = new Date(a.created_at || a.createdAt || 0).getTime();
|
|
179
|
+
const dateB = new Date(b.created_at || b.createdAt || 0).getTime();
|
|
180
|
+
return dateB - dateA;
|
|
181
|
+
});
|
|
182
|
+
const total = filteredEvals.length;
|
|
183
|
+
const start = page * perPage;
|
|
184
|
+
const end = start + perPage;
|
|
185
|
+
const paginatedEvals = filteredEvals.slice(start, end);
|
|
315
186
|
const hasMore = end < total;
|
|
187
|
+
const evals = paginatedEvals.map((record) => transformEvalRecord(record));
|
|
316
188
|
return {
|
|
317
|
-
|
|
189
|
+
evals,
|
|
318
190
|
total,
|
|
319
191
|
page,
|
|
320
|
-
perPage
|
|
192
|
+
perPage,
|
|
321
193
|
hasMore
|
|
322
194
|
};
|
|
323
195
|
} catch (error$1) {
|
|
196
|
+
const { page = 0, perPage = 100 } = options || {};
|
|
324
197
|
const mastraError = new error.MastraError(
|
|
325
198
|
{
|
|
326
|
-
id: "
|
|
199
|
+
id: "STORAGE_UPSTASH_STORAGE_GET_EVALS_FAILED",
|
|
327
200
|
domain: error.ErrorDomain.STORAGE,
|
|
328
201
|
category: error.ErrorCategory.THIRD_PARTY,
|
|
329
202
|
details: {
|
|
330
|
-
|
|
331
|
-
|
|
203
|
+
page,
|
|
204
|
+
perPage
|
|
332
205
|
}
|
|
333
206
|
},
|
|
334
207
|
error$1
|
|
335
208
|
);
|
|
336
|
-
this.logger?.trackException(mastraError);
|
|
337
209
|
this.logger.error(mastraError.toString());
|
|
210
|
+
this.logger?.trackException(mastraError);
|
|
338
211
|
return {
|
|
339
|
-
|
|
212
|
+
evals: [],
|
|
340
213
|
total: 0,
|
|
341
214
|
page,
|
|
342
|
-
perPage
|
|
215
|
+
perPage,
|
|
343
216
|
hasMore: false
|
|
344
217
|
};
|
|
345
218
|
}
|
|
346
219
|
}
|
|
347
|
-
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
220
|
+
};
|
|
221
|
+
function ensureDate(value) {
|
|
222
|
+
if (!value) return null;
|
|
223
|
+
if (value instanceof Date) return value;
|
|
224
|
+
if (typeof value === "string") return new Date(value);
|
|
225
|
+
if (typeof value === "number") return new Date(value);
|
|
226
|
+
return null;
|
|
227
|
+
}
|
|
228
|
+
function parseJSON(value) {
|
|
229
|
+
if (typeof value === "string") {
|
|
230
|
+
try {
|
|
231
|
+
return JSON.parse(value);
|
|
232
|
+
} catch {
|
|
233
|
+
return value;
|
|
234
|
+
}
|
|
235
|
+
}
|
|
236
|
+
return value;
|
|
237
|
+
}
|
|
238
|
+
function getKey(tableName, keys) {
|
|
239
|
+
const keyParts = Object.entries(keys).filter(([_, value]) => value !== void 0).map(([key, value]) => `${key}:${value}`);
|
|
240
|
+
return `${tableName}:${keyParts.join(":")}`;
|
|
241
|
+
}
|
|
242
|
+
function processRecord(tableName, record) {
|
|
243
|
+
let key;
|
|
244
|
+
if (tableName === storage.TABLE_MESSAGES) {
|
|
245
|
+
key = getKey(tableName, { threadId: record.threadId, id: record.id });
|
|
246
|
+
} else if (tableName === storage.TABLE_WORKFLOW_SNAPSHOT) {
|
|
247
|
+
key = getKey(tableName, {
|
|
248
|
+
namespace: record.namespace || "workflows",
|
|
249
|
+
workflow_name: record.workflow_name,
|
|
250
|
+
run_id: record.run_id,
|
|
251
|
+
...record.resourceId ? { resourceId: record.resourceId } : {}
|
|
252
|
+
});
|
|
253
|
+
} else if (tableName === storage.TABLE_EVALS) {
|
|
254
|
+
key = getKey(tableName, { id: record.run_id });
|
|
255
|
+
} else {
|
|
256
|
+
key = getKey(tableName, { id: record.id });
|
|
257
|
+
}
|
|
258
|
+
const processedRecord = {
|
|
259
|
+
...record,
|
|
260
|
+
createdAt: storage.serializeDate(record.createdAt),
|
|
261
|
+
updatedAt: storage.serializeDate(record.updatedAt)
|
|
262
|
+
};
|
|
263
|
+
return { key, processedRecord };
|
|
264
|
+
}
|
|
265
|
+
|
|
266
|
+
// src/storage/domains/memory/index.ts
|
|
267
|
+
function getThreadMessagesKey(threadId) {
|
|
268
|
+
return `thread:${threadId}:messages`;
|
|
269
|
+
}
|
|
270
|
+
function getMessageKey(threadId, messageId) {
|
|
271
|
+
const key = getKey(storage.TABLE_MESSAGES, { threadId, id: messageId });
|
|
272
|
+
return key;
|
|
273
|
+
}
|
|
274
|
+
var StoreMemoryUpstash = class extends storage.MemoryStorage {
|
|
275
|
+
client;
|
|
276
|
+
operations;
|
|
277
|
+
constructor({ client, operations }) {
|
|
278
|
+
super();
|
|
279
|
+
this.client = client;
|
|
280
|
+
this.operations = operations;
|
|
281
|
+
}
|
|
282
|
+
async getThreadById({ threadId }) {
|
|
351
283
|
try {
|
|
352
|
-
await this.
|
|
284
|
+
const thread = await this.operations.load({
|
|
285
|
+
tableName: storage.TABLE_THREADS,
|
|
286
|
+
keys: { id: threadId }
|
|
287
|
+
});
|
|
288
|
+
if (!thread) return null;
|
|
289
|
+
return {
|
|
290
|
+
...thread,
|
|
291
|
+
createdAt: ensureDate(thread.createdAt),
|
|
292
|
+
updatedAt: ensureDate(thread.updatedAt),
|
|
293
|
+
metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
|
|
294
|
+
};
|
|
353
295
|
} catch (error$1) {
|
|
354
296
|
throw new error.MastraError(
|
|
355
297
|
{
|
|
356
|
-
id: "
|
|
298
|
+
id: "STORAGE_UPSTASH_STORAGE_GET_THREAD_BY_ID_FAILED",
|
|
357
299
|
domain: error.ErrorDomain.STORAGE,
|
|
358
300
|
category: error.ErrorCategory.THIRD_PARTY,
|
|
359
301
|
details: {
|
|
360
|
-
|
|
302
|
+
threadId
|
|
361
303
|
}
|
|
362
304
|
},
|
|
363
305
|
error$1
|
|
@@ -365,135 +307,17 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
365
307
|
}
|
|
366
308
|
}
|
|
367
309
|
/**
|
|
368
|
-
*
|
|
369
|
-
* @param tableName Name of the table
|
|
370
|
-
* @param schema Schema of the table
|
|
371
|
-
* @param ifNotExists Array of column names to add if they don't exist
|
|
310
|
+
* @deprecated use getThreadsByResourceIdPaginated instead
|
|
372
311
|
*/
|
|
373
|
-
async
|
|
374
|
-
}
|
|
375
|
-
async clearTable({ tableName }) {
|
|
376
|
-
const pattern = `${tableName}:*`;
|
|
377
|
-
try {
|
|
378
|
-
await this.scanAndDelete(pattern);
|
|
379
|
-
} catch (error$1) {
|
|
380
|
-
throw new error.MastraError(
|
|
381
|
-
{
|
|
382
|
-
id: "STORAGE_UPSTASH_STORAGE_CLEAR_TABLE_FAILED",
|
|
383
|
-
domain: error.ErrorDomain.STORAGE,
|
|
384
|
-
category: error.ErrorCategory.THIRD_PARTY,
|
|
385
|
-
details: {
|
|
386
|
-
tableName
|
|
387
|
-
}
|
|
388
|
-
},
|
|
389
|
-
error$1
|
|
390
|
-
);
|
|
391
|
-
}
|
|
392
|
-
}
|
|
393
|
-
async insert({ tableName, record }) {
|
|
394
|
-
const { key, processedRecord } = this.processRecord(tableName, record);
|
|
395
|
-
try {
|
|
396
|
-
await this.redis.set(key, processedRecord);
|
|
397
|
-
} catch (error$1) {
|
|
398
|
-
throw new error.MastraError(
|
|
399
|
-
{
|
|
400
|
-
id: "STORAGE_UPSTASH_STORAGE_INSERT_FAILED",
|
|
401
|
-
domain: error.ErrorDomain.STORAGE,
|
|
402
|
-
category: error.ErrorCategory.THIRD_PARTY,
|
|
403
|
-
details: {
|
|
404
|
-
tableName
|
|
405
|
-
}
|
|
406
|
-
},
|
|
407
|
-
error$1
|
|
408
|
-
);
|
|
409
|
-
}
|
|
410
|
-
}
|
|
411
|
-
async batchInsert(input) {
|
|
412
|
-
const { tableName, records } = input;
|
|
413
|
-
if (!records.length) return;
|
|
414
|
-
const batchSize = 1e3;
|
|
415
|
-
try {
|
|
416
|
-
for (let i = 0; i < records.length; i += batchSize) {
|
|
417
|
-
const batch = records.slice(i, i + batchSize);
|
|
418
|
-
const pipeline = this.redis.pipeline();
|
|
419
|
-
for (const record of batch) {
|
|
420
|
-
const { key, processedRecord } = this.processRecord(tableName, record);
|
|
421
|
-
pipeline.set(key, processedRecord);
|
|
422
|
-
}
|
|
423
|
-
await pipeline.exec();
|
|
424
|
-
}
|
|
425
|
-
} catch (error$1) {
|
|
426
|
-
throw new error.MastraError(
|
|
427
|
-
{
|
|
428
|
-
id: "STORAGE_UPSTASH_STORAGE_BATCH_INSERT_FAILED",
|
|
429
|
-
domain: error.ErrorDomain.STORAGE,
|
|
430
|
-
category: error.ErrorCategory.THIRD_PARTY,
|
|
431
|
-
details: {
|
|
432
|
-
tableName
|
|
433
|
-
}
|
|
434
|
-
},
|
|
435
|
-
error$1
|
|
436
|
-
);
|
|
437
|
-
}
|
|
438
|
-
}
|
|
439
|
-
async load({ tableName, keys }) {
|
|
440
|
-
const key = this.getKey(tableName, keys);
|
|
441
|
-
try {
|
|
442
|
-
const data = await this.redis.get(key);
|
|
443
|
-
return data || null;
|
|
444
|
-
} catch (error$1) {
|
|
445
|
-
throw new error.MastraError(
|
|
446
|
-
{
|
|
447
|
-
id: "STORAGE_UPSTASH_STORAGE_LOAD_FAILED",
|
|
448
|
-
domain: error.ErrorDomain.STORAGE,
|
|
449
|
-
category: error.ErrorCategory.THIRD_PARTY,
|
|
450
|
-
details: {
|
|
451
|
-
tableName
|
|
452
|
-
}
|
|
453
|
-
},
|
|
454
|
-
error$1
|
|
455
|
-
);
|
|
456
|
-
}
|
|
457
|
-
}
|
|
458
|
-
async getThreadById({ threadId }) {
|
|
459
|
-
try {
|
|
460
|
-
const thread = await this.load({
|
|
461
|
-
tableName: storage.TABLE_THREADS,
|
|
462
|
-
keys: { id: threadId }
|
|
463
|
-
});
|
|
464
|
-
if (!thread) return null;
|
|
465
|
-
return {
|
|
466
|
-
...thread,
|
|
467
|
-
createdAt: this.ensureDate(thread.createdAt),
|
|
468
|
-
updatedAt: this.ensureDate(thread.updatedAt),
|
|
469
|
-
metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
|
|
470
|
-
};
|
|
471
|
-
} catch (error$1) {
|
|
472
|
-
throw new error.MastraError(
|
|
473
|
-
{
|
|
474
|
-
id: "STORAGE_UPSTASH_STORAGE_GET_THREAD_BY_ID_FAILED",
|
|
475
|
-
domain: error.ErrorDomain.STORAGE,
|
|
476
|
-
category: error.ErrorCategory.THIRD_PARTY,
|
|
477
|
-
details: {
|
|
478
|
-
threadId
|
|
479
|
-
}
|
|
480
|
-
},
|
|
481
|
-
error$1
|
|
482
|
-
);
|
|
483
|
-
}
|
|
484
|
-
}
|
|
485
|
-
/**
|
|
486
|
-
* @deprecated use getThreadsByResourceIdPaginated instead
|
|
487
|
-
*/
|
|
488
|
-
async getThreadsByResourceId({ resourceId }) {
|
|
312
|
+
async getThreadsByResourceId({ resourceId }) {
|
|
489
313
|
try {
|
|
490
314
|
const pattern = `${storage.TABLE_THREADS}:*`;
|
|
491
|
-
const keys = await this.scanKeys(pattern);
|
|
315
|
+
const keys = await this.operations.scanKeys(pattern);
|
|
492
316
|
if (keys.length === 0) {
|
|
493
317
|
return [];
|
|
494
318
|
}
|
|
495
319
|
const allThreads = [];
|
|
496
|
-
const pipeline = this.
|
|
320
|
+
const pipeline = this.client.pipeline();
|
|
497
321
|
keys.forEach((key) => pipeline.get(key));
|
|
498
322
|
const results = await pipeline.exec();
|
|
499
323
|
for (let i = 0; i < results.length; i++) {
|
|
@@ -501,8 +325,8 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
501
325
|
if (thread && thread.resourceId === resourceId) {
|
|
502
326
|
allThreads.push({
|
|
503
327
|
...thread,
|
|
504
|
-
createdAt:
|
|
505
|
-
updatedAt:
|
|
328
|
+
createdAt: ensureDate(thread.createdAt),
|
|
329
|
+
updatedAt: ensureDate(thread.updatedAt),
|
|
506
330
|
metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
|
|
507
331
|
});
|
|
508
332
|
}
|
|
@@ -569,7 +393,7 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
569
393
|
}
|
|
570
394
|
async saveThread({ thread }) {
|
|
571
395
|
try {
|
|
572
|
-
await this.insert({
|
|
396
|
+
await this.operations.insert({
|
|
573
397
|
tableName: storage.TABLE_THREADS,
|
|
574
398
|
record: thread
|
|
575
399
|
});
|
|
@@ -634,20 +458,20 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
634
458
|
}
|
|
635
459
|
}
|
|
636
460
|
async deleteThread({ threadId }) {
|
|
637
|
-
const threadKey =
|
|
638
|
-
const threadMessagesKey =
|
|
461
|
+
const threadKey = getKey(storage.TABLE_THREADS, { id: threadId });
|
|
462
|
+
const threadMessagesKey = getThreadMessagesKey(threadId);
|
|
639
463
|
try {
|
|
640
|
-
const messageIds = await this.
|
|
641
|
-
const pipeline = this.
|
|
464
|
+
const messageIds = await this.client.zrange(threadMessagesKey, 0, -1);
|
|
465
|
+
const pipeline = this.client.pipeline();
|
|
642
466
|
pipeline.del(threadKey);
|
|
643
467
|
pipeline.del(threadMessagesKey);
|
|
644
468
|
for (let i = 0; i < messageIds.length; i++) {
|
|
645
469
|
const messageId = messageIds[i];
|
|
646
|
-
const messageKey =
|
|
470
|
+
const messageKey = getMessageKey(threadId, messageId);
|
|
647
471
|
pipeline.del(messageKey);
|
|
648
472
|
}
|
|
649
473
|
await pipeline.exec();
|
|
650
|
-
await this.scanAndDelete(
|
|
474
|
+
await this.operations.scanAndDelete(getMessageKey(threadId, "*"));
|
|
651
475
|
} catch (error$1) {
|
|
652
476
|
throw new error.MastraError(
|
|
653
477
|
{
|
|
@@ -684,40 +508,52 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
684
508
|
error$1
|
|
685
509
|
);
|
|
686
510
|
}
|
|
687
|
-
const messagesWithIndex = messages.map((message, index) =>
|
|
688
|
-
|
|
689
|
-
|
|
690
|
-
|
|
691
|
-
|
|
692
|
-
|
|
511
|
+
const messagesWithIndex = messages.map((message, index) => {
|
|
512
|
+
if (!message.threadId) {
|
|
513
|
+
throw new Error(
|
|
514
|
+
`Expected to find a threadId for message, but couldn't find one. An unexpected error has occurred.`
|
|
515
|
+
);
|
|
516
|
+
}
|
|
517
|
+
if (!message.resourceId) {
|
|
518
|
+
throw new Error(
|
|
519
|
+
`Expected to find a resourceId for message, but couldn't find one. An unexpected error has occurred.`
|
|
520
|
+
);
|
|
521
|
+
}
|
|
522
|
+
return {
|
|
523
|
+
...message,
|
|
524
|
+
_index: index
|
|
525
|
+
};
|
|
526
|
+
});
|
|
527
|
+
const threadKey = getKey(storage.TABLE_THREADS, { id: threadId });
|
|
528
|
+
const existingThread = await this.client.get(threadKey);
|
|
693
529
|
try {
|
|
694
530
|
const batchSize = 1e3;
|
|
695
531
|
for (let i = 0; i < messagesWithIndex.length; i += batchSize) {
|
|
696
532
|
const batch = messagesWithIndex.slice(i, i + batchSize);
|
|
697
|
-
const pipeline = this.
|
|
533
|
+
const pipeline = this.client.pipeline();
|
|
698
534
|
for (const message of batch) {
|
|
699
|
-
const key =
|
|
535
|
+
const key = getMessageKey(message.threadId, message.id);
|
|
700
536
|
const createdAtScore = new Date(message.createdAt).getTime();
|
|
701
537
|
const score = message._index !== void 0 ? message._index : createdAtScore;
|
|
702
|
-
const existingKeyPattern =
|
|
703
|
-
const keys = await this.scanKeys(existingKeyPattern);
|
|
538
|
+
const existingKeyPattern = getMessageKey("*", message.id);
|
|
539
|
+
const keys = await this.operations.scanKeys(existingKeyPattern);
|
|
704
540
|
if (keys.length > 0) {
|
|
705
|
-
const pipeline2 = this.
|
|
541
|
+
const pipeline2 = this.client.pipeline();
|
|
706
542
|
keys.forEach((key2) => pipeline2.get(key2));
|
|
707
543
|
const results = await pipeline2.exec();
|
|
708
544
|
const existingMessages = results.filter(
|
|
709
545
|
(msg) => msg !== null
|
|
710
546
|
);
|
|
711
547
|
for (const existingMessage of existingMessages) {
|
|
712
|
-
const existingMessageKey =
|
|
548
|
+
const existingMessageKey = getMessageKey(existingMessage.threadId, existingMessage.id);
|
|
713
549
|
if (existingMessage && existingMessage.threadId !== message.threadId) {
|
|
714
550
|
pipeline.del(existingMessageKey);
|
|
715
|
-
pipeline.zrem(
|
|
551
|
+
pipeline.zrem(getThreadMessagesKey(existingMessage.threadId), existingMessage.id);
|
|
716
552
|
}
|
|
717
553
|
}
|
|
718
554
|
}
|
|
719
555
|
pipeline.set(key, message);
|
|
720
|
-
pipeline.zadd(
|
|
556
|
+
pipeline.zadd(getThreadMessagesKey(message.threadId), {
|
|
721
557
|
score,
|
|
722
558
|
member: message.id
|
|
723
559
|
});
|
|
@@ -727,7 +563,7 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
727
563
|
...existingThread,
|
|
728
564
|
updatedAt: /* @__PURE__ */ new Date()
|
|
729
565
|
};
|
|
730
|
-
pipeline.set(threadKey,
|
|
566
|
+
pipeline.set(threadKey, processRecord(storage.TABLE_THREADS, updatedThread).processedRecord);
|
|
731
567
|
}
|
|
732
568
|
await pipeline.exec();
|
|
733
569
|
}
|
|
@@ -756,29 +592,29 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
756
592
|
messageIds.add(item.id);
|
|
757
593
|
const itemThreadId = item.threadId || threadId;
|
|
758
594
|
messageIdToThreadIds[item.id] = itemThreadId;
|
|
759
|
-
const itemThreadMessagesKey =
|
|
760
|
-
const rank = await this.
|
|
595
|
+
const itemThreadMessagesKey = getThreadMessagesKey(itemThreadId);
|
|
596
|
+
const rank = await this.client.zrank(itemThreadMessagesKey, item.id);
|
|
761
597
|
if (rank === null) continue;
|
|
762
598
|
if (item.withPreviousMessages) {
|
|
763
599
|
const start = Math.max(0, rank - item.withPreviousMessages);
|
|
764
|
-
const prevIds = rank === 0 ? [] : await this.
|
|
600
|
+
const prevIds = rank === 0 ? [] : await this.client.zrange(itemThreadMessagesKey, start, rank - 1);
|
|
765
601
|
prevIds.forEach((id) => {
|
|
766
602
|
messageIds.add(id);
|
|
767
603
|
messageIdToThreadIds[id] = itemThreadId;
|
|
768
604
|
});
|
|
769
605
|
}
|
|
770
606
|
if (item.withNextMessages) {
|
|
771
|
-
const nextIds = await this.
|
|
607
|
+
const nextIds = await this.client.zrange(itemThreadMessagesKey, rank + 1, rank + item.withNextMessages);
|
|
772
608
|
nextIds.forEach((id) => {
|
|
773
609
|
messageIds.add(id);
|
|
774
610
|
messageIdToThreadIds[id] = itemThreadId;
|
|
775
611
|
});
|
|
776
612
|
}
|
|
777
613
|
}
|
|
778
|
-
const pipeline = this.
|
|
614
|
+
const pipeline = this.client.pipeline();
|
|
779
615
|
Array.from(messageIds).forEach((id) => {
|
|
780
616
|
const tId = messageIdToThreadIds[id] || threadId;
|
|
781
|
-
pipeline.get(
|
|
617
|
+
pipeline.get(getMessageKey(tId, id));
|
|
782
618
|
});
|
|
783
619
|
const results = await pipeline.exec();
|
|
784
620
|
return results.filter((result) => result !== null);
|
|
@@ -790,23 +626,23 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
790
626
|
selectBy,
|
|
791
627
|
format
|
|
792
628
|
}) {
|
|
793
|
-
const threadMessagesKey =
|
|
629
|
+
const threadMessagesKey = getThreadMessagesKey(threadId);
|
|
794
630
|
try {
|
|
795
|
-
const allMessageIds = await this.
|
|
796
|
-
const limit =
|
|
631
|
+
const allMessageIds = await this.client.zrange(threadMessagesKey, 0, -1);
|
|
632
|
+
const limit = storage.resolveMessageLimit({ last: selectBy?.last, defaultLimit: Number.MAX_SAFE_INTEGER });
|
|
797
633
|
const messageIds = /* @__PURE__ */ new Set();
|
|
798
634
|
const messageIdToThreadIds = {};
|
|
799
635
|
if (limit === 0 && !selectBy?.include) {
|
|
800
636
|
return [];
|
|
801
637
|
}
|
|
802
638
|
if (limit === Number.MAX_SAFE_INTEGER) {
|
|
803
|
-
const allIds = await this.
|
|
639
|
+
const allIds = await this.client.zrange(threadMessagesKey, 0, -1);
|
|
804
640
|
allIds.forEach((id) => {
|
|
805
641
|
messageIds.add(id);
|
|
806
642
|
messageIdToThreadIds[id] = threadId;
|
|
807
643
|
});
|
|
808
644
|
} else if (limit > 0) {
|
|
809
|
-
const latestIds = await this.
|
|
645
|
+
const latestIds = await this.client.zrange(threadMessagesKey, -limit, -1);
|
|
810
646
|
latestIds.forEach((id) => {
|
|
811
647
|
messageIds.add(id);
|
|
812
648
|
messageIdToThreadIds[id] = threadId;
|
|
@@ -818,9 +654,7 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
818
654
|
...(await Promise.all(
|
|
819
655
|
Array.from(messageIds).map(async (id) => {
|
|
820
656
|
const tId = messageIdToThreadIds[id] || threadId;
|
|
821
|
-
const byThreadId = await this.
|
|
822
|
-
this.getMessageKey(tId, id)
|
|
823
|
-
);
|
|
657
|
+
const byThreadId = await this.client.get(getMessageKey(tId, id));
|
|
824
658
|
if (byThreadId) return byThreadId;
|
|
825
659
|
return null;
|
|
826
660
|
})
|
|
@@ -840,10 +674,14 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
840
674
|
if (format === "v2") {
|
|
841
675
|
return prepared.map((msg) => ({
|
|
842
676
|
...msg,
|
|
677
|
+
createdAt: new Date(msg.createdAt),
|
|
843
678
|
content: msg.content || { format: 2, parts: [{ type: "text", text: "" }] }
|
|
844
679
|
}));
|
|
845
680
|
}
|
|
846
|
-
return prepared
|
|
681
|
+
return prepared.map((msg) => ({
|
|
682
|
+
...msg,
|
|
683
|
+
createdAt: new Date(msg.createdAt)
|
|
684
|
+
}));
|
|
847
685
|
} catch (error$1) {
|
|
848
686
|
throw new error.MastraError(
|
|
849
687
|
{
|
|
@@ -863,12 +701,16 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
863
701
|
const { page = 0, perPage = 40, dateRange } = selectBy?.pagination || {};
|
|
864
702
|
const fromDate = dateRange?.start;
|
|
865
703
|
const toDate = dateRange?.end;
|
|
866
|
-
const threadMessagesKey =
|
|
704
|
+
const threadMessagesKey = getThreadMessagesKey(threadId);
|
|
867
705
|
const messages = [];
|
|
868
706
|
try {
|
|
869
707
|
const includedMessages = await this._getIncludedMessages(threadId, selectBy);
|
|
870
708
|
messages.push(...includedMessages);
|
|
871
|
-
const allMessageIds = await this.
|
|
709
|
+
const allMessageIds = await this.client.zrange(
|
|
710
|
+
threadMessagesKey,
|
|
711
|
+
args?.selectBy?.last ? -args.selectBy.last : 0,
|
|
712
|
+
-1
|
|
713
|
+
);
|
|
872
714
|
if (allMessageIds.length === 0) {
|
|
873
715
|
return {
|
|
874
716
|
messages: [],
|
|
@@ -878,8 +720,8 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
878
720
|
hasMore: false
|
|
879
721
|
};
|
|
880
722
|
}
|
|
881
|
-
const pipeline = this.
|
|
882
|
-
allMessageIds.forEach((id) => pipeline.get(
|
|
723
|
+
const pipeline = this.client.pipeline();
|
|
724
|
+
allMessageIds.forEach((id) => pipeline.get(getMessageKey(threadId, id)));
|
|
883
725
|
const results = await pipeline.exec();
|
|
884
726
|
let messagesData = results.filter((msg) => msg !== null);
|
|
885
727
|
if (fromDate) {
|
|
@@ -927,206 +769,795 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
927
769
|
};
|
|
928
770
|
}
|
|
929
771
|
}
|
|
930
|
-
async
|
|
931
|
-
const { namespace = "workflows", workflowName, runId, snapshot } = params;
|
|
772
|
+
async getResourceById({ resourceId }) {
|
|
932
773
|
try {
|
|
933
|
-
|
|
934
|
-
|
|
935
|
-
|
|
936
|
-
|
|
937
|
-
|
|
938
|
-
|
|
939
|
-
|
|
774
|
+
const key = `${storage.TABLE_RESOURCES}:${resourceId}`;
|
|
775
|
+
const data = await this.client.get(key);
|
|
776
|
+
if (!data) {
|
|
777
|
+
return null;
|
|
778
|
+
}
|
|
779
|
+
return {
|
|
780
|
+
...data,
|
|
781
|
+
createdAt: new Date(data.createdAt),
|
|
782
|
+
updatedAt: new Date(data.updatedAt),
|
|
783
|
+
// Ensure workingMemory is always returned as a string, regardless of automatic parsing
|
|
784
|
+
workingMemory: typeof data.workingMemory === "object" ? JSON.stringify(data.workingMemory) : data.workingMemory,
|
|
785
|
+
metadata: typeof data.metadata === "string" ? JSON.parse(data.metadata) : data.metadata
|
|
786
|
+
};
|
|
787
|
+
} catch (error) {
|
|
788
|
+
this.logger.error("Error getting resource by ID:", error);
|
|
789
|
+
throw error;
|
|
790
|
+
}
|
|
791
|
+
}
|
|
792
|
+
async saveResource({ resource }) {
|
|
793
|
+
try {
|
|
794
|
+
const key = `${storage.TABLE_RESOURCES}:${resource.id}`;
|
|
795
|
+
const serializedResource = {
|
|
796
|
+
...resource,
|
|
797
|
+
metadata: JSON.stringify(resource.metadata),
|
|
798
|
+
createdAt: resource.createdAt.toISOString(),
|
|
799
|
+
updatedAt: resource.updatedAt.toISOString()
|
|
800
|
+
};
|
|
801
|
+
await this.client.set(key, serializedResource);
|
|
802
|
+
return resource;
|
|
803
|
+
} catch (error) {
|
|
804
|
+
this.logger.error("Error saving resource:", error);
|
|
805
|
+
throw error;
|
|
806
|
+
}
|
|
807
|
+
}
|
|
808
|
+
async updateResource({
|
|
809
|
+
resourceId,
|
|
810
|
+
workingMemory,
|
|
811
|
+
metadata
|
|
812
|
+
}) {
|
|
813
|
+
try {
|
|
814
|
+
const existingResource = await this.getResourceById({ resourceId });
|
|
815
|
+
if (!existingResource) {
|
|
816
|
+
const newResource = {
|
|
817
|
+
id: resourceId,
|
|
818
|
+
workingMemory,
|
|
819
|
+
metadata: metadata || {},
|
|
940
820
|
createdAt: /* @__PURE__ */ new Date(),
|
|
941
821
|
updatedAt: /* @__PURE__ */ new Date()
|
|
822
|
+
};
|
|
823
|
+
return this.saveResource({ resource: newResource });
|
|
824
|
+
}
|
|
825
|
+
const updatedResource = {
|
|
826
|
+
...existingResource,
|
|
827
|
+
workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
|
|
828
|
+
metadata: {
|
|
829
|
+
...existingResource.metadata,
|
|
830
|
+
...metadata
|
|
831
|
+
},
|
|
832
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
833
|
+
};
|
|
834
|
+
await this.saveResource({ resource: updatedResource });
|
|
835
|
+
return updatedResource;
|
|
836
|
+
} catch (error) {
|
|
837
|
+
this.logger.error("Error updating resource:", error);
|
|
838
|
+
throw error;
|
|
839
|
+
}
|
|
840
|
+
}
|
|
841
|
+
async updateMessages(args) {
|
|
842
|
+
const { messages } = args;
|
|
843
|
+
if (messages.length === 0) {
|
|
844
|
+
return [];
|
|
845
|
+
}
|
|
846
|
+
try {
|
|
847
|
+
const messageIds = messages.map((m) => m.id);
|
|
848
|
+
const existingMessages = [];
|
|
849
|
+
const messageIdToKey = {};
|
|
850
|
+
for (const messageId of messageIds) {
|
|
851
|
+
const pattern = getMessageKey("*", messageId);
|
|
852
|
+
const keys = await this.operations.scanKeys(pattern);
|
|
853
|
+
for (const key of keys) {
|
|
854
|
+
const message = await this.client.get(key);
|
|
855
|
+
if (message && message.id === messageId) {
|
|
856
|
+
existingMessages.push(message);
|
|
857
|
+
messageIdToKey[messageId] = key;
|
|
858
|
+
break;
|
|
859
|
+
}
|
|
942
860
|
}
|
|
943
|
-
}
|
|
861
|
+
}
|
|
862
|
+
if (existingMessages.length === 0) {
|
|
863
|
+
return [];
|
|
864
|
+
}
|
|
865
|
+
const threadIdsToUpdate = /* @__PURE__ */ new Set();
|
|
866
|
+
const pipeline = this.client.pipeline();
|
|
867
|
+
for (const existingMessage of existingMessages) {
|
|
868
|
+
const updatePayload = messages.find((m) => m.id === existingMessage.id);
|
|
869
|
+
if (!updatePayload) continue;
|
|
870
|
+
const { id, ...fieldsToUpdate } = updatePayload;
|
|
871
|
+
if (Object.keys(fieldsToUpdate).length === 0) continue;
|
|
872
|
+
threadIdsToUpdate.add(existingMessage.threadId);
|
|
873
|
+
if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
|
|
874
|
+
threadIdsToUpdate.add(updatePayload.threadId);
|
|
875
|
+
}
|
|
876
|
+
const updatedMessage = { ...existingMessage };
|
|
877
|
+
if (fieldsToUpdate.content) {
|
|
878
|
+
const existingContent = existingMessage.content;
|
|
879
|
+
const newContent = {
|
|
880
|
+
...existingContent,
|
|
881
|
+
...fieldsToUpdate.content,
|
|
882
|
+
// Deep merge metadata if it exists on both
|
|
883
|
+
...existingContent?.metadata && fieldsToUpdate.content.metadata ? {
|
|
884
|
+
metadata: {
|
|
885
|
+
...existingContent.metadata,
|
|
886
|
+
...fieldsToUpdate.content.metadata
|
|
887
|
+
}
|
|
888
|
+
} : {}
|
|
889
|
+
};
|
|
890
|
+
updatedMessage.content = newContent;
|
|
891
|
+
}
|
|
892
|
+
for (const key2 in fieldsToUpdate) {
|
|
893
|
+
if (Object.prototype.hasOwnProperty.call(fieldsToUpdate, key2) && key2 !== "content") {
|
|
894
|
+
updatedMessage[key2] = fieldsToUpdate[key2];
|
|
895
|
+
}
|
|
896
|
+
}
|
|
897
|
+
const key = messageIdToKey[id];
|
|
898
|
+
if (key) {
|
|
899
|
+
if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
|
|
900
|
+
const oldThreadMessagesKey = getThreadMessagesKey(existingMessage.threadId);
|
|
901
|
+
pipeline.zrem(oldThreadMessagesKey, id);
|
|
902
|
+
pipeline.del(key);
|
|
903
|
+
const newKey = getMessageKey(updatePayload.threadId, id);
|
|
904
|
+
pipeline.set(newKey, updatedMessage);
|
|
905
|
+
const newThreadMessagesKey = getThreadMessagesKey(updatePayload.threadId);
|
|
906
|
+
const score = updatedMessage._index !== void 0 ? updatedMessage._index : new Date(updatedMessage.createdAt).getTime();
|
|
907
|
+
pipeline.zadd(newThreadMessagesKey, { score, member: id });
|
|
908
|
+
} else {
|
|
909
|
+
pipeline.set(key, updatedMessage);
|
|
910
|
+
}
|
|
911
|
+
}
|
|
912
|
+
}
|
|
913
|
+
const now = /* @__PURE__ */ new Date();
|
|
914
|
+
for (const threadId of threadIdsToUpdate) {
|
|
915
|
+
if (threadId) {
|
|
916
|
+
const threadKey = getKey(storage.TABLE_THREADS, { id: threadId });
|
|
917
|
+
const existingThread = await this.client.get(threadKey);
|
|
918
|
+
if (existingThread) {
|
|
919
|
+
const updatedThread = {
|
|
920
|
+
...existingThread,
|
|
921
|
+
updatedAt: now
|
|
922
|
+
};
|
|
923
|
+
pipeline.set(threadKey, processRecord(storage.TABLE_THREADS, updatedThread).processedRecord);
|
|
924
|
+
}
|
|
925
|
+
}
|
|
926
|
+
}
|
|
927
|
+
await pipeline.exec();
|
|
928
|
+
const updatedMessages = [];
|
|
929
|
+
for (const messageId of messageIds) {
|
|
930
|
+
const key = messageIdToKey[messageId];
|
|
931
|
+
if (key) {
|
|
932
|
+
const updatedMessage = await this.client.get(key);
|
|
933
|
+
if (updatedMessage) {
|
|
934
|
+
const v2e = updatedMessage;
|
|
935
|
+
updatedMessages.push(v2e);
|
|
936
|
+
}
|
|
937
|
+
}
|
|
938
|
+
}
|
|
939
|
+
return updatedMessages;
|
|
944
940
|
} catch (error$1) {
|
|
945
941
|
throw new error.MastraError(
|
|
946
942
|
{
|
|
947
|
-
id: "
|
|
943
|
+
id: "STORAGE_UPSTASH_STORAGE_UPDATE_MESSAGES_FAILED",
|
|
948
944
|
domain: error.ErrorDomain.STORAGE,
|
|
949
945
|
category: error.ErrorCategory.THIRD_PARTY,
|
|
950
946
|
details: {
|
|
951
|
-
|
|
952
|
-
workflowName,
|
|
953
|
-
runId
|
|
947
|
+
messageIds: messages.map((m) => m.id).join(",")
|
|
954
948
|
}
|
|
955
949
|
},
|
|
956
950
|
error$1
|
|
957
951
|
);
|
|
958
952
|
}
|
|
959
953
|
}
|
|
960
|
-
|
|
961
|
-
|
|
962
|
-
|
|
963
|
-
|
|
964
|
-
|
|
965
|
-
|
|
966
|
-
|
|
954
|
+
};
|
|
955
|
+
var StoreOperationsUpstash = class extends storage.StoreOperations {
|
|
956
|
+
client;
|
|
957
|
+
constructor({ client }) {
|
|
958
|
+
super();
|
|
959
|
+
this.client = client;
|
|
960
|
+
}
|
|
961
|
+
async createTable({
|
|
962
|
+
tableName: _tableName,
|
|
963
|
+
schema: _schema
|
|
964
|
+
}) {
|
|
965
|
+
}
|
|
966
|
+
async alterTable({
|
|
967
|
+
tableName: _tableName,
|
|
968
|
+
schema: _schema,
|
|
969
|
+
ifNotExists: _ifNotExists
|
|
970
|
+
}) {
|
|
971
|
+
}
|
|
972
|
+
async clearTable({ tableName }) {
|
|
973
|
+
const pattern = `${tableName}:*`;
|
|
967
974
|
try {
|
|
968
|
-
|
|
969
|
-
if (!data) return null;
|
|
970
|
-
return data.snapshot;
|
|
975
|
+
await this.scanAndDelete(pattern);
|
|
971
976
|
} catch (error$1) {
|
|
972
977
|
throw new error.MastraError(
|
|
973
978
|
{
|
|
974
|
-
id: "
|
|
979
|
+
id: "STORAGE_UPSTASH_STORAGE_CLEAR_TABLE_FAILED",
|
|
975
980
|
domain: error.ErrorDomain.STORAGE,
|
|
976
981
|
category: error.ErrorCategory.THIRD_PARTY,
|
|
977
982
|
details: {
|
|
978
|
-
|
|
979
|
-
workflowName,
|
|
980
|
-
runId
|
|
983
|
+
tableName
|
|
981
984
|
}
|
|
982
985
|
},
|
|
983
986
|
error$1
|
|
984
987
|
);
|
|
985
988
|
}
|
|
986
989
|
}
|
|
987
|
-
|
|
988
|
-
|
|
989
|
-
|
|
990
|
-
|
|
991
|
-
|
|
992
|
-
async getEvals(options) {
|
|
990
|
+
async dropTable({ tableName }) {
|
|
991
|
+
return this.clearTable({ tableName });
|
|
992
|
+
}
|
|
993
|
+
async insert({ tableName, record }) {
|
|
994
|
+
const { key, processedRecord } = processRecord(tableName, record);
|
|
993
995
|
try {
|
|
994
|
-
|
|
995
|
-
|
|
996
|
-
|
|
997
|
-
|
|
998
|
-
|
|
999
|
-
|
|
1000
|
-
|
|
1001
|
-
|
|
1002
|
-
|
|
996
|
+
await this.client.set(key, processedRecord);
|
|
997
|
+
} catch (error$1) {
|
|
998
|
+
throw new error.MastraError(
|
|
999
|
+
{
|
|
1000
|
+
id: "STORAGE_UPSTASH_STORAGE_INSERT_FAILED",
|
|
1001
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1002
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1003
|
+
details: {
|
|
1004
|
+
tableName
|
|
1005
|
+
}
|
|
1006
|
+
},
|
|
1007
|
+
error$1
|
|
1008
|
+
);
|
|
1009
|
+
}
|
|
1010
|
+
}
|
|
1011
|
+
async batchInsert(input) {
|
|
1012
|
+
const { tableName, records } = input;
|
|
1013
|
+
if (!records.length) return;
|
|
1014
|
+
const batchSize = 1e3;
|
|
1015
|
+
try {
|
|
1016
|
+
for (let i = 0; i < records.length; i += batchSize) {
|
|
1017
|
+
const batch = records.slice(i, i + batchSize);
|
|
1018
|
+
const pipeline = this.client.pipeline();
|
|
1019
|
+
for (const record of batch) {
|
|
1020
|
+
const { key, processedRecord } = processRecord(tableName, record);
|
|
1021
|
+
pipeline.set(key, processedRecord);
|
|
1022
|
+
}
|
|
1023
|
+
await pipeline.exec();
|
|
1024
|
+
}
|
|
1025
|
+
} catch (error$1) {
|
|
1026
|
+
throw new error.MastraError(
|
|
1027
|
+
{
|
|
1028
|
+
id: "STORAGE_UPSTASH_STORAGE_BATCH_INSERT_FAILED",
|
|
1029
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1030
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1031
|
+
details: {
|
|
1032
|
+
tableName
|
|
1033
|
+
}
|
|
1034
|
+
},
|
|
1035
|
+
error$1
|
|
1036
|
+
);
|
|
1037
|
+
}
|
|
1038
|
+
}
|
|
1039
|
+
async load({ tableName, keys }) {
|
|
1040
|
+
const key = getKey(tableName, keys);
|
|
1041
|
+
try {
|
|
1042
|
+
const data = await this.client.get(key);
|
|
1043
|
+
return data || null;
|
|
1044
|
+
} catch (error$1) {
|
|
1045
|
+
throw new error.MastraError(
|
|
1046
|
+
{
|
|
1047
|
+
id: "STORAGE_UPSTASH_STORAGE_LOAD_FAILED",
|
|
1048
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1049
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1050
|
+
details: {
|
|
1051
|
+
tableName
|
|
1052
|
+
}
|
|
1053
|
+
},
|
|
1054
|
+
error$1
|
|
1055
|
+
);
|
|
1056
|
+
}
|
|
1057
|
+
}
|
|
1058
|
+
async hasColumn(_tableName, _column) {
|
|
1059
|
+
return true;
|
|
1060
|
+
}
|
|
1061
|
+
async scanKeys(pattern, batchSize = 1e4) {
|
|
1062
|
+
let cursor = "0";
|
|
1063
|
+
let keys = [];
|
|
1064
|
+
do {
|
|
1065
|
+
const [nextCursor, batch] = await this.client.scan(cursor, {
|
|
1066
|
+
match: pattern,
|
|
1067
|
+
count: batchSize
|
|
1068
|
+
});
|
|
1069
|
+
keys.push(...batch);
|
|
1070
|
+
cursor = nextCursor;
|
|
1071
|
+
} while (cursor !== "0");
|
|
1072
|
+
return keys;
|
|
1073
|
+
}
|
|
1074
|
+
async scanAndDelete(pattern, batchSize = 1e4) {
|
|
1075
|
+
let cursor = "0";
|
|
1076
|
+
let totalDeleted = 0;
|
|
1077
|
+
do {
|
|
1078
|
+
const [nextCursor, keys] = await this.client.scan(cursor, {
|
|
1079
|
+
match: pattern,
|
|
1080
|
+
count: batchSize
|
|
1081
|
+
});
|
|
1082
|
+
if (keys.length > 0) {
|
|
1083
|
+
await this.client.del(...keys);
|
|
1084
|
+
totalDeleted += keys.length;
|
|
1085
|
+
}
|
|
1086
|
+
cursor = nextCursor;
|
|
1087
|
+
} while (cursor !== "0");
|
|
1088
|
+
return totalDeleted;
|
|
1089
|
+
}
|
|
1090
|
+
};
|
|
1091
|
+
function transformScoreRow(row) {
|
|
1092
|
+
const parseField = (v) => {
|
|
1093
|
+
if (typeof v === "string") {
|
|
1094
|
+
try {
|
|
1095
|
+
return JSON.parse(v);
|
|
1096
|
+
} catch {
|
|
1097
|
+
return v;
|
|
1098
|
+
}
|
|
1099
|
+
}
|
|
1100
|
+
return v;
|
|
1101
|
+
};
|
|
1102
|
+
return {
|
|
1103
|
+
...row,
|
|
1104
|
+
scorer: parseField(row.scorer),
|
|
1105
|
+
extractStepResult: parseField(row.extractStepResult),
|
|
1106
|
+
analyzeStepResult: parseField(row.analyzeStepResult),
|
|
1107
|
+
metadata: parseField(row.metadata),
|
|
1108
|
+
input: parseField(row.input),
|
|
1109
|
+
output: parseField(row.output),
|
|
1110
|
+
additionalContext: parseField(row.additionalContext),
|
|
1111
|
+
runtimeContext: parseField(row.runtimeContext),
|
|
1112
|
+
entity: parseField(row.entity),
|
|
1113
|
+
createdAt: row.createdAt,
|
|
1114
|
+
updatedAt: row.updatedAt
|
|
1115
|
+
};
|
|
1116
|
+
}
|
|
1117
|
+
var ScoresUpstash = class extends storage.ScoresStorage {
|
|
1118
|
+
client;
|
|
1119
|
+
operations;
|
|
1120
|
+
constructor({ client, operations }) {
|
|
1121
|
+
super();
|
|
1122
|
+
this.client = client;
|
|
1123
|
+
this.operations = operations;
|
|
1124
|
+
}
|
|
1125
|
+
async getScoreById({ id }) {
|
|
1126
|
+
try {
|
|
1127
|
+
const data = await this.operations.load({
|
|
1128
|
+
tableName: storage.TABLE_SCORERS,
|
|
1129
|
+
keys: { id }
|
|
1130
|
+
});
|
|
1131
|
+
if (!data) return null;
|
|
1132
|
+
return transformScoreRow(data);
|
|
1133
|
+
} catch (error$1) {
|
|
1134
|
+
throw new error.MastraError(
|
|
1135
|
+
{
|
|
1136
|
+
id: "STORAGE_UPSTASH_STORAGE_GET_SCORE_BY_ID_FAILED",
|
|
1137
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1138
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1139
|
+
details: { id }
|
|
1140
|
+
},
|
|
1141
|
+
error$1
|
|
1142
|
+
);
|
|
1143
|
+
}
|
|
1144
|
+
}
|
|
1145
|
+
async getScoresByScorerId({
|
|
1146
|
+
scorerId,
|
|
1147
|
+
pagination = { page: 0, perPage: 20 }
|
|
1148
|
+
}) {
|
|
1149
|
+
const pattern = `${storage.TABLE_SCORERS}:*`;
|
|
1150
|
+
const keys = await this.operations.scanKeys(pattern);
|
|
1151
|
+
if (keys.length === 0) {
|
|
1152
|
+
return {
|
|
1153
|
+
scores: [],
|
|
1154
|
+
pagination: { total: 0, page: pagination.page, perPage: pagination.perPage, hasMore: false }
|
|
1155
|
+
};
|
|
1156
|
+
}
|
|
1157
|
+
const pipeline = this.client.pipeline();
|
|
1158
|
+
keys.forEach((key) => pipeline.get(key));
|
|
1159
|
+
const results = await pipeline.exec();
|
|
1160
|
+
const filtered = results.map((row) => row).filter((row) => !!row && typeof row === "object" && row.scorerId === scorerId);
|
|
1161
|
+
const total = filtered.length;
|
|
1162
|
+
const { page, perPage } = pagination;
|
|
1163
|
+
const start = page * perPage;
|
|
1164
|
+
const end = start + perPage;
|
|
1165
|
+
const paged = filtered.slice(start, end);
|
|
1166
|
+
const scores = paged.map((row) => transformScoreRow(row));
|
|
1167
|
+
return {
|
|
1168
|
+
scores,
|
|
1169
|
+
pagination: {
|
|
1170
|
+
total,
|
|
1171
|
+
page,
|
|
1172
|
+
perPage,
|
|
1173
|
+
hasMore: end < total
|
|
1174
|
+
}
|
|
1175
|
+
};
|
|
1176
|
+
}
|
|
1177
|
+
async saveScore(score) {
|
|
1178
|
+
const { key, processedRecord } = processRecord(storage.TABLE_SCORERS, score);
|
|
1179
|
+
try {
|
|
1180
|
+
await this.client.set(key, processedRecord);
|
|
1181
|
+
return { score };
|
|
1182
|
+
} catch (error$1) {
|
|
1183
|
+
throw new error.MastraError(
|
|
1184
|
+
{
|
|
1185
|
+
id: "STORAGE_UPSTASH_STORAGE_SAVE_SCORE_FAILED",
|
|
1186
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1187
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1188
|
+
details: { id: score.id }
|
|
1189
|
+
},
|
|
1190
|
+
error$1
|
|
1191
|
+
);
|
|
1192
|
+
}
|
|
1193
|
+
}
|
|
1194
|
+
async getScoresByRunId({
|
|
1195
|
+
runId,
|
|
1196
|
+
pagination = { page: 0, perPage: 20 }
|
|
1197
|
+
}) {
|
|
1198
|
+
const pattern = `${storage.TABLE_SCORERS}:*`;
|
|
1199
|
+
const keys = await this.operations.scanKeys(pattern);
|
|
1200
|
+
if (keys.length === 0) {
|
|
1201
|
+
return {
|
|
1202
|
+
scores: [],
|
|
1203
|
+
pagination: { total: 0, page: pagination.page, perPage: pagination.perPage, hasMore: false }
|
|
1204
|
+
};
|
|
1205
|
+
}
|
|
1206
|
+
const pipeline = this.client.pipeline();
|
|
1207
|
+
keys.forEach((key) => pipeline.get(key));
|
|
1208
|
+
const results = await pipeline.exec();
|
|
1209
|
+
const filtered = results.map((row) => row).filter((row) => !!row && typeof row === "object" && row.runId === runId);
|
|
1210
|
+
const total = filtered.length;
|
|
1211
|
+
const { page, perPage } = pagination;
|
|
1212
|
+
const start = page * perPage;
|
|
1213
|
+
const end = start + perPage;
|
|
1214
|
+
const paged = filtered.slice(start, end);
|
|
1215
|
+
const scores = paged.map((row) => transformScoreRow(row));
|
|
1216
|
+
return {
|
|
1217
|
+
scores,
|
|
1218
|
+
pagination: {
|
|
1219
|
+
total,
|
|
1220
|
+
page,
|
|
1221
|
+
perPage,
|
|
1222
|
+
hasMore: end < total
|
|
1223
|
+
}
|
|
1224
|
+
};
|
|
1225
|
+
}
|
|
1226
|
+
async getScoresByEntityId({
|
|
1227
|
+
entityId,
|
|
1228
|
+
entityType,
|
|
1229
|
+
pagination = { page: 0, perPage: 20 }
|
|
1230
|
+
}) {
|
|
1231
|
+
const pattern = `${storage.TABLE_SCORERS}:*`;
|
|
1232
|
+
const keys = await this.operations.scanKeys(pattern);
|
|
1233
|
+
if (keys.length === 0) {
|
|
1234
|
+
return {
|
|
1235
|
+
scores: [],
|
|
1236
|
+
pagination: { total: 0, page: pagination.page, perPage: pagination.perPage, hasMore: false }
|
|
1237
|
+
};
|
|
1238
|
+
}
|
|
1239
|
+
const pipeline = this.client.pipeline();
|
|
1240
|
+
keys.forEach((key) => pipeline.get(key));
|
|
1241
|
+
const results = await pipeline.exec();
|
|
1242
|
+
const filtered = results.map((row) => row).filter((row) => {
|
|
1243
|
+
if (!row || typeof row !== "object") return false;
|
|
1244
|
+
if (row.entityId !== entityId) return false;
|
|
1245
|
+
if (entityType && row.entityType !== entityType) return false;
|
|
1246
|
+
return true;
|
|
1247
|
+
});
|
|
1248
|
+
const total = filtered.length;
|
|
1249
|
+
const { page, perPage } = pagination;
|
|
1250
|
+
const start = page * perPage;
|
|
1251
|
+
const end = start + perPage;
|
|
1252
|
+
const paged = filtered.slice(start, end);
|
|
1253
|
+
const scores = paged.map((row) => transformScoreRow(row));
|
|
1254
|
+
return {
|
|
1255
|
+
scores,
|
|
1256
|
+
pagination: {
|
|
1257
|
+
total,
|
|
1258
|
+
page,
|
|
1259
|
+
perPage,
|
|
1260
|
+
hasMore: end < total
|
|
1261
|
+
}
|
|
1262
|
+
};
|
|
1263
|
+
}
|
|
1264
|
+
};
|
|
1265
|
+
var TracesUpstash = class extends storage.TracesStorage {
|
|
1266
|
+
client;
|
|
1267
|
+
operations;
|
|
1268
|
+
constructor({ client, operations }) {
|
|
1269
|
+
super();
|
|
1270
|
+
this.client = client;
|
|
1271
|
+
this.operations = operations;
|
|
1272
|
+
}
|
|
1273
|
+
/**
|
|
1274
|
+
* @deprecated use getTracesPaginated instead
|
|
1275
|
+
*/
|
|
1276
|
+
async getTraces(args) {
|
|
1277
|
+
if (args.fromDate || args.toDate) {
|
|
1278
|
+
args.dateRange = {
|
|
1279
|
+
start: args.fromDate,
|
|
1280
|
+
end: args.toDate
|
|
1281
|
+
};
|
|
1282
|
+
}
|
|
1283
|
+
try {
|
|
1284
|
+
const { traces } = await this.getTracesPaginated(args);
|
|
1285
|
+
return traces;
|
|
1286
|
+
} catch (error$1) {
|
|
1287
|
+
throw new error.MastraError(
|
|
1288
|
+
{
|
|
1289
|
+
id: "STORAGE_UPSTASH_STORAGE_GET_TRACES_FAILED",
|
|
1290
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1291
|
+
category: error.ErrorCategory.THIRD_PARTY
|
|
1292
|
+
},
|
|
1293
|
+
error$1
|
|
1294
|
+
);
|
|
1295
|
+
}
|
|
1296
|
+
}
|
|
1297
|
+
async getTracesPaginated(args) {
|
|
1298
|
+
const { name, scope, page = 0, perPage = 100, attributes, filters, dateRange } = args;
|
|
1299
|
+
const fromDate = dateRange?.start;
|
|
1300
|
+
const toDate = dateRange?.end;
|
|
1301
|
+
try {
|
|
1302
|
+
const pattern = `${storage.TABLE_TRACES}:*`;
|
|
1303
|
+
const keys = await this.operations.scanKeys(pattern);
|
|
1304
|
+
if (keys.length === 0) {
|
|
1305
|
+
return {
|
|
1306
|
+
traces: [],
|
|
1307
|
+
total: 0,
|
|
1003
1308
|
page,
|
|
1004
|
-
perPage,
|
|
1309
|
+
perPage: perPage || 100,
|
|
1005
1310
|
hasMore: false
|
|
1006
1311
|
};
|
|
1007
1312
|
}
|
|
1008
|
-
const pipeline = this.
|
|
1313
|
+
const pipeline = this.client.pipeline();
|
|
1009
1314
|
keys.forEach((key) => pipeline.get(key));
|
|
1010
1315
|
const results = await pipeline.exec();
|
|
1011
|
-
let
|
|
1012
|
-
|
|
1013
|
-
|
|
1316
|
+
let filteredTraces = results.filter(
|
|
1317
|
+
(record) => record !== null && typeof record === "object"
|
|
1318
|
+
);
|
|
1319
|
+
if (name) {
|
|
1320
|
+
filteredTraces = filteredTraces.filter((record) => record.name?.toLowerCase().startsWith(name.toLowerCase()));
|
|
1014
1321
|
}
|
|
1015
|
-
if (
|
|
1016
|
-
|
|
1017
|
-
|
|
1018
|
-
|
|
1019
|
-
|
|
1020
|
-
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
} catch {
|
|
1025
|
-
return false;
|
|
1026
|
-
}
|
|
1027
|
-
});
|
|
1028
|
-
} else if (type === "live") {
|
|
1029
|
-
filteredEvals = filteredEvals.filter((record) => {
|
|
1030
|
-
if (!record.test_info) return true;
|
|
1031
|
-
try {
|
|
1032
|
-
if (typeof record.test_info === "string") {
|
|
1033
|
-
const parsedTestInfo = JSON.parse(record.test_info);
|
|
1034
|
-
return !(parsedTestInfo && typeof parsedTestInfo === "object" && "testPath" in parsedTestInfo);
|
|
1035
|
-
}
|
|
1036
|
-
return !(typeof record.test_info === "object" && "testPath" in record.test_info);
|
|
1037
|
-
} catch {
|
|
1038
|
-
return true;
|
|
1039
|
-
}
|
|
1322
|
+
if (scope) {
|
|
1323
|
+
filteredTraces = filteredTraces.filter((record) => record.scope === scope);
|
|
1324
|
+
}
|
|
1325
|
+
if (attributes) {
|
|
1326
|
+
filteredTraces = filteredTraces.filter((record) => {
|
|
1327
|
+
const recordAttributes = record.attributes;
|
|
1328
|
+
if (!recordAttributes) return false;
|
|
1329
|
+
const parsedAttributes = typeof recordAttributes === "string" ? JSON.parse(recordAttributes) : recordAttributes;
|
|
1330
|
+
return Object.entries(attributes).every(([key, value]) => parsedAttributes[key] === value);
|
|
1040
1331
|
});
|
|
1041
1332
|
}
|
|
1333
|
+
if (filters) {
|
|
1334
|
+
filteredTraces = filteredTraces.filter(
|
|
1335
|
+
(record) => Object.entries(filters).every(([key, value]) => record[key] === value)
|
|
1336
|
+
);
|
|
1337
|
+
}
|
|
1042
1338
|
if (fromDate) {
|
|
1043
|
-
|
|
1044
|
-
|
|
1045
|
-
|
|
1046
|
-
});
|
|
1339
|
+
filteredTraces = filteredTraces.filter(
|
|
1340
|
+
(record) => new Date(record.createdAt).getTime() >= new Date(fromDate).getTime()
|
|
1341
|
+
);
|
|
1047
1342
|
}
|
|
1048
1343
|
if (toDate) {
|
|
1049
|
-
|
|
1050
|
-
|
|
1051
|
-
|
|
1052
|
-
});
|
|
1344
|
+
filteredTraces = filteredTraces.filter(
|
|
1345
|
+
(record) => new Date(record.createdAt).getTime() <= new Date(toDate).getTime()
|
|
1346
|
+
);
|
|
1053
1347
|
}
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1348
|
+
filteredTraces.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
|
|
1349
|
+
const transformedTraces = filteredTraces.map((record) => ({
|
|
1350
|
+
id: record.id,
|
|
1351
|
+
parentSpanId: record.parentSpanId,
|
|
1352
|
+
traceId: record.traceId,
|
|
1353
|
+
name: record.name,
|
|
1354
|
+
scope: record.scope,
|
|
1355
|
+
kind: record.kind,
|
|
1356
|
+
status: parseJSON(record.status),
|
|
1357
|
+
events: parseJSON(record.events),
|
|
1358
|
+
links: parseJSON(record.links),
|
|
1359
|
+
attributes: parseJSON(record.attributes),
|
|
1360
|
+
startTime: record.startTime,
|
|
1361
|
+
endTime: record.endTime,
|
|
1362
|
+
other: parseJSON(record.other),
|
|
1363
|
+
createdAt: ensureDate(record.createdAt)
|
|
1364
|
+
}));
|
|
1365
|
+
const total = transformedTraces.length;
|
|
1366
|
+
const resolvedPerPage = perPage || 100;
|
|
1367
|
+
const start = page * resolvedPerPage;
|
|
1368
|
+
const end = start + resolvedPerPage;
|
|
1369
|
+
const paginatedTraces = transformedTraces.slice(start, end);
|
|
1063
1370
|
const hasMore = end < total;
|
|
1064
|
-
const evals = paginatedEvals.map((record) => this.transformEvalRecord(record));
|
|
1065
1371
|
return {
|
|
1066
|
-
|
|
1372
|
+
traces: paginatedTraces,
|
|
1067
1373
|
total,
|
|
1068
1374
|
page,
|
|
1069
|
-
perPage,
|
|
1375
|
+
perPage: resolvedPerPage,
|
|
1070
1376
|
hasMore
|
|
1071
1377
|
};
|
|
1072
1378
|
} catch (error$1) {
|
|
1073
|
-
const
|
|
1074
|
-
|
|
1379
|
+
const mastraError = new error.MastraError(
|
|
1380
|
+
{
|
|
1381
|
+
id: "STORAGE_UPSTASH_STORAGE_GET_TRACES_PAGINATED_FAILED",
|
|
1382
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1383
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1384
|
+
details: {
|
|
1385
|
+
name: args.name || "",
|
|
1386
|
+
scope: args.scope || ""
|
|
1387
|
+
}
|
|
1388
|
+
},
|
|
1389
|
+
error$1
|
|
1390
|
+
);
|
|
1391
|
+
this.logger?.trackException(mastraError);
|
|
1392
|
+
this.logger.error(mastraError.toString());
|
|
1393
|
+
return {
|
|
1394
|
+
traces: [],
|
|
1395
|
+
total: 0,
|
|
1396
|
+
page,
|
|
1397
|
+
perPage: perPage || 100,
|
|
1398
|
+
hasMore: false
|
|
1399
|
+
};
|
|
1400
|
+
}
|
|
1401
|
+
}
|
|
1402
|
+
async batchTraceInsert(args) {
|
|
1403
|
+
return this.operations.batchInsert({
|
|
1404
|
+
tableName: storage.TABLE_TRACES,
|
|
1405
|
+
records: args.records
|
|
1406
|
+
});
|
|
1407
|
+
}
|
|
1408
|
+
};
|
|
1409
|
+
function parseWorkflowRun(row) {
|
|
1410
|
+
let parsedSnapshot = row.snapshot;
|
|
1411
|
+
if (typeof parsedSnapshot === "string") {
|
|
1412
|
+
try {
|
|
1413
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
1414
|
+
} catch (e) {
|
|
1415
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
1416
|
+
}
|
|
1417
|
+
}
|
|
1418
|
+
return {
|
|
1419
|
+
workflowName: row.workflow_name,
|
|
1420
|
+
runId: row.run_id,
|
|
1421
|
+
snapshot: parsedSnapshot,
|
|
1422
|
+
createdAt: ensureDate(row.createdAt),
|
|
1423
|
+
updatedAt: ensureDate(row.updatedAt),
|
|
1424
|
+
resourceId: row.resourceId
|
|
1425
|
+
};
|
|
1426
|
+
}
|
|
1427
|
+
var WorkflowsUpstash = class extends storage.WorkflowsStorage {
|
|
1428
|
+
client;
|
|
1429
|
+
operations;
|
|
1430
|
+
constructor({ client, operations }) {
|
|
1431
|
+
super();
|
|
1432
|
+
this.client = client;
|
|
1433
|
+
this.operations = operations;
|
|
1434
|
+
}
|
|
1435
|
+
async persistWorkflowSnapshot(params) {
|
|
1436
|
+
const { namespace = "workflows", workflowName, runId, snapshot } = params;
|
|
1437
|
+
try {
|
|
1438
|
+
await this.operations.insert({
|
|
1439
|
+
tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
|
|
1440
|
+
record: {
|
|
1441
|
+
namespace,
|
|
1442
|
+
workflow_name: workflowName,
|
|
1443
|
+
run_id: runId,
|
|
1444
|
+
snapshot,
|
|
1445
|
+
createdAt: /* @__PURE__ */ new Date(),
|
|
1446
|
+
updatedAt: /* @__PURE__ */ new Date()
|
|
1447
|
+
}
|
|
1448
|
+
});
|
|
1449
|
+
} catch (error$1) {
|
|
1450
|
+
throw new error.MastraError(
|
|
1451
|
+
{
|
|
1452
|
+
id: "STORAGE_UPSTASH_STORAGE_PERSIST_WORKFLOW_SNAPSHOT_FAILED",
|
|
1453
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1454
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1455
|
+
details: {
|
|
1456
|
+
namespace,
|
|
1457
|
+
workflowName,
|
|
1458
|
+
runId
|
|
1459
|
+
}
|
|
1460
|
+
},
|
|
1461
|
+
error$1
|
|
1462
|
+
);
|
|
1463
|
+
}
|
|
1464
|
+
}
|
|
1465
|
+
async loadWorkflowSnapshot(params) {
|
|
1466
|
+
const { namespace = "workflows", workflowName, runId } = params;
|
|
1467
|
+
const key = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, {
|
|
1468
|
+
namespace,
|
|
1469
|
+
workflow_name: workflowName,
|
|
1470
|
+
run_id: runId
|
|
1471
|
+
});
|
|
1472
|
+
try {
|
|
1473
|
+
const data = await this.client.get(key);
|
|
1474
|
+
if (!data) return null;
|
|
1475
|
+
return data.snapshot;
|
|
1476
|
+
} catch (error$1) {
|
|
1477
|
+
throw new error.MastraError(
|
|
1075
1478
|
{
|
|
1076
|
-
id: "
|
|
1479
|
+
id: "STORAGE_UPSTASH_STORAGE_LOAD_WORKFLOW_SNAPSHOT_FAILED",
|
|
1077
1480
|
domain: error.ErrorDomain.STORAGE,
|
|
1078
1481
|
category: error.ErrorCategory.THIRD_PARTY,
|
|
1079
1482
|
details: {
|
|
1080
|
-
|
|
1081
|
-
|
|
1483
|
+
namespace,
|
|
1484
|
+
workflowName,
|
|
1485
|
+
runId
|
|
1486
|
+
}
|
|
1487
|
+
},
|
|
1488
|
+
error$1
|
|
1489
|
+
);
|
|
1490
|
+
}
|
|
1491
|
+
}
|
|
1492
|
+
async getWorkflowRunById({
|
|
1493
|
+
runId,
|
|
1494
|
+
workflowName
|
|
1495
|
+
}) {
|
|
1496
|
+
try {
|
|
1497
|
+
const key = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace: "workflows", workflow_name: workflowName, run_id: runId }) + "*";
|
|
1498
|
+
const keys = await this.operations.scanKeys(key);
|
|
1499
|
+
const workflows = await Promise.all(
|
|
1500
|
+
keys.map(async (key2) => {
|
|
1501
|
+
const data2 = await this.client.get(key2);
|
|
1502
|
+
return data2;
|
|
1503
|
+
})
|
|
1504
|
+
);
|
|
1505
|
+
const data = workflows.find((w) => w?.run_id === runId && w?.workflow_name === workflowName);
|
|
1506
|
+
if (!data) return null;
|
|
1507
|
+
return parseWorkflowRun(data);
|
|
1508
|
+
} catch (error$1) {
|
|
1509
|
+
throw new error.MastraError(
|
|
1510
|
+
{
|
|
1511
|
+
id: "STORAGE_UPSTASH_STORAGE_GET_WORKFLOW_RUN_BY_ID_FAILED",
|
|
1512
|
+
domain: error.ErrorDomain.STORAGE,
|
|
1513
|
+
category: error.ErrorCategory.THIRD_PARTY,
|
|
1514
|
+
details: {
|
|
1515
|
+
namespace: "workflows",
|
|
1516
|
+
runId,
|
|
1517
|
+
workflowName: workflowName || ""
|
|
1082
1518
|
}
|
|
1083
1519
|
},
|
|
1084
1520
|
error$1
|
|
1085
1521
|
);
|
|
1086
|
-
this.logger.error(mastraError.toString());
|
|
1087
|
-
this.logger?.trackException(mastraError);
|
|
1088
|
-
return {
|
|
1089
|
-
evals: [],
|
|
1090
|
-
total: 0,
|
|
1091
|
-
page,
|
|
1092
|
-
perPage,
|
|
1093
|
-
hasMore: false
|
|
1094
|
-
};
|
|
1095
1522
|
}
|
|
1096
1523
|
}
|
|
1097
1524
|
async getWorkflowRuns({
|
|
1098
|
-
namespace,
|
|
1099
1525
|
workflowName,
|
|
1100
1526
|
fromDate,
|
|
1101
1527
|
toDate,
|
|
1102
1528
|
limit,
|
|
1103
1529
|
offset,
|
|
1104
1530
|
resourceId
|
|
1105
|
-
}
|
|
1531
|
+
}) {
|
|
1106
1532
|
try {
|
|
1107
|
-
let pattern =
|
|
1533
|
+
let pattern = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace: "workflows" }) + ":*";
|
|
1108
1534
|
if (workflowName && resourceId) {
|
|
1109
|
-
pattern =
|
|
1110
|
-
namespace,
|
|
1535
|
+
pattern = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, {
|
|
1536
|
+
namespace: "workflows",
|
|
1111
1537
|
workflow_name: workflowName,
|
|
1112
1538
|
run_id: "*",
|
|
1113
1539
|
resourceId
|
|
1114
1540
|
});
|
|
1115
1541
|
} else if (workflowName) {
|
|
1116
|
-
pattern =
|
|
1542
|
+
pattern = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace: "workflows", workflow_name: workflowName }) + ":*";
|
|
1117
1543
|
} else if (resourceId) {
|
|
1118
|
-
pattern =
|
|
1544
|
+
pattern = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, {
|
|
1545
|
+
namespace: "workflows",
|
|
1546
|
+
workflow_name: "*",
|
|
1547
|
+
run_id: "*",
|
|
1548
|
+
resourceId
|
|
1549
|
+
});
|
|
1119
1550
|
}
|
|
1120
|
-
const keys = await this.scanKeys(pattern);
|
|
1551
|
+
const keys = await this.operations.scanKeys(pattern);
|
|
1121
1552
|
if (keys.length === 0) {
|
|
1122
1553
|
return { runs: [], total: 0 };
|
|
1123
1554
|
}
|
|
1124
|
-
const pipeline = this.
|
|
1555
|
+
const pipeline = this.client.pipeline();
|
|
1125
1556
|
keys.forEach((key) => pipeline.get(key));
|
|
1126
1557
|
const results = await pipeline.exec();
|
|
1127
1558
|
let runs = results.map((result) => result).filter(
|
|
1128
1559
|
(record) => record !== null && record !== void 0 && typeof record === "object" && "workflow_name" in record
|
|
1129
|
-
).filter((record) => !workflowName || record.workflow_name === workflowName).map((w) =>
|
|
1560
|
+
).filter((record) => !workflowName || record.workflow_name === workflowName).map((w) => parseWorkflowRun(w)).filter((w) => {
|
|
1130
1561
|
if (fromDate && w.createdAt < fromDate) return false;
|
|
1131
1562
|
if (toDate && w.createdAt > toDate) return false;
|
|
1132
1563
|
return true;
|
|
@@ -1143,7 +1574,7 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
1143
1574
|
domain: error.ErrorDomain.STORAGE,
|
|
1144
1575
|
category: error.ErrorCategory.THIRD_PARTY,
|
|
1145
1576
|
details: {
|
|
1146
|
-
namespace,
|
|
1577
|
+
namespace: "workflows",
|
|
1147
1578
|
workflowName: workflowName || "",
|
|
1148
1579
|
resourceId: resourceId || ""
|
|
1149
1580
|
}
|
|
@@ -1152,113 +1583,203 @@ var UpstashStore = class extends storage.MastraStorage {
|
|
|
1152
1583
|
);
|
|
1153
1584
|
}
|
|
1154
1585
|
}
|
|
1586
|
+
};
|
|
1587
|
+
|
|
1588
|
+
// src/storage/index.ts
|
|
1589
|
+
var UpstashStore = class extends storage.MastraStorage {
|
|
1590
|
+
redis;
|
|
1591
|
+
stores;
|
|
1592
|
+
constructor(config) {
|
|
1593
|
+
super({ name: "Upstash" });
|
|
1594
|
+
this.redis = new redis.Redis({
|
|
1595
|
+
url: config.url,
|
|
1596
|
+
token: config.token
|
|
1597
|
+
});
|
|
1598
|
+
const operations = new StoreOperationsUpstash({ client: this.redis });
|
|
1599
|
+
const traces = new TracesUpstash({ client: this.redis, operations });
|
|
1600
|
+
const scores = new ScoresUpstash({ client: this.redis, operations });
|
|
1601
|
+
const workflows = new WorkflowsUpstash({ client: this.redis, operations });
|
|
1602
|
+
const memory = new StoreMemoryUpstash({ client: this.redis, operations });
|
|
1603
|
+
const legacyEvals = new StoreLegacyEvalsUpstash({ client: this.redis, operations });
|
|
1604
|
+
this.stores = {
|
|
1605
|
+
operations,
|
|
1606
|
+
traces,
|
|
1607
|
+
scores,
|
|
1608
|
+
workflows,
|
|
1609
|
+
memory,
|
|
1610
|
+
legacyEvals
|
|
1611
|
+
};
|
|
1612
|
+
}
|
|
1613
|
+
get supports() {
|
|
1614
|
+
return {
|
|
1615
|
+
selectByIncludeResourceScope: true,
|
|
1616
|
+
resourceWorkingMemory: true,
|
|
1617
|
+
hasColumn: false,
|
|
1618
|
+
createTable: false
|
|
1619
|
+
};
|
|
1620
|
+
}
|
|
1621
|
+
/**
|
|
1622
|
+
* @deprecated Use getEvals instead
|
|
1623
|
+
*/
|
|
1624
|
+
async getEvalsByAgentName(agentName, type) {
|
|
1625
|
+
return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
|
|
1626
|
+
}
|
|
1627
|
+
/**
|
|
1628
|
+
* Get all evaluations with pagination and total count
|
|
1629
|
+
* @param options Pagination and filtering options
|
|
1630
|
+
* @returns Object with evals array and total count
|
|
1631
|
+
*/
|
|
1632
|
+
async getEvals(options) {
|
|
1633
|
+
return this.stores.legacyEvals.getEvals(options);
|
|
1634
|
+
}
|
|
1635
|
+
/**
|
|
1636
|
+
* @deprecated use getTracesPaginated instead
|
|
1637
|
+
*/
|
|
1638
|
+
async getTraces(args) {
|
|
1639
|
+
return this.stores.traces.getTraces(args);
|
|
1640
|
+
}
|
|
1641
|
+
async getTracesPaginated(args) {
|
|
1642
|
+
return this.stores.traces.getTracesPaginated(args);
|
|
1643
|
+
}
|
|
1644
|
+
async batchTraceInsert(args) {
|
|
1645
|
+
return this.stores.traces.batchTraceInsert(args);
|
|
1646
|
+
}
|
|
1647
|
+
async createTable({
|
|
1648
|
+
tableName,
|
|
1649
|
+
schema
|
|
1650
|
+
}) {
|
|
1651
|
+
return this.stores.operations.createTable({ tableName, schema });
|
|
1652
|
+
}
|
|
1653
|
+
/**
|
|
1654
|
+
* No-op: This backend is schemaless and does not require schema changes.
|
|
1655
|
+
* @param tableName Name of the table
|
|
1656
|
+
* @param schema Schema of the table
|
|
1657
|
+
* @param ifNotExists Array of column names to add if they don't exist
|
|
1658
|
+
*/
|
|
1659
|
+
async alterTable(args) {
|
|
1660
|
+
return this.stores.operations.alterTable(args);
|
|
1661
|
+
}
|
|
1662
|
+
async clearTable({ tableName }) {
|
|
1663
|
+
return this.stores.operations.clearTable({ tableName });
|
|
1664
|
+
}
|
|
1665
|
+
async dropTable({ tableName }) {
|
|
1666
|
+
return this.stores.operations.dropTable({ tableName });
|
|
1667
|
+
}
|
|
1668
|
+
async insert({ tableName, record }) {
|
|
1669
|
+
return this.stores.operations.insert({ tableName, record });
|
|
1670
|
+
}
|
|
1671
|
+
async batchInsert(input) {
|
|
1672
|
+
return this.stores.operations.batchInsert(input);
|
|
1673
|
+
}
|
|
1674
|
+
async load({ tableName, keys }) {
|
|
1675
|
+
return this.stores.operations.load({ tableName, keys });
|
|
1676
|
+
}
|
|
1677
|
+
async getThreadById({ threadId }) {
|
|
1678
|
+
return this.stores.memory.getThreadById({ threadId });
|
|
1679
|
+
}
|
|
1680
|
+
/**
|
|
1681
|
+
* @deprecated use getThreadsByResourceIdPaginated instead
|
|
1682
|
+
*/
|
|
1683
|
+
async getThreadsByResourceId({ resourceId }) {
|
|
1684
|
+
return this.stores.memory.getThreadsByResourceId({ resourceId });
|
|
1685
|
+
}
|
|
1686
|
+
async getThreadsByResourceIdPaginated(args) {
|
|
1687
|
+
return this.stores.memory.getThreadsByResourceIdPaginated(args);
|
|
1688
|
+
}
|
|
1689
|
+
async saveThread({ thread }) {
|
|
1690
|
+
return this.stores.memory.saveThread({ thread });
|
|
1691
|
+
}
|
|
1692
|
+
async updateThread({
|
|
1693
|
+
id,
|
|
1694
|
+
title,
|
|
1695
|
+
metadata
|
|
1696
|
+
}) {
|
|
1697
|
+
return this.stores.memory.updateThread({ id, title, metadata });
|
|
1698
|
+
}
|
|
1699
|
+
async deleteThread({ threadId }) {
|
|
1700
|
+
return this.stores.memory.deleteThread({ threadId });
|
|
1701
|
+
}
|
|
1702
|
+
async saveMessages(args) {
|
|
1703
|
+
return this.stores.memory.saveMessages(args);
|
|
1704
|
+
}
|
|
1705
|
+
async getMessages({
|
|
1706
|
+
threadId,
|
|
1707
|
+
selectBy,
|
|
1708
|
+
format
|
|
1709
|
+
}) {
|
|
1710
|
+
return this.stores.memory.getMessages({ threadId, selectBy, format });
|
|
1711
|
+
}
|
|
1712
|
+
async getMessagesPaginated(args) {
|
|
1713
|
+
return this.stores.memory.getMessagesPaginated(args);
|
|
1714
|
+
}
|
|
1715
|
+
async persistWorkflowSnapshot(params) {
|
|
1716
|
+
return this.stores.workflows.persistWorkflowSnapshot(params);
|
|
1717
|
+
}
|
|
1718
|
+
async loadWorkflowSnapshot(params) {
|
|
1719
|
+
return this.stores.workflows.loadWorkflowSnapshot(params);
|
|
1720
|
+
}
|
|
1721
|
+
async getWorkflowRuns({
|
|
1722
|
+
workflowName,
|
|
1723
|
+
fromDate,
|
|
1724
|
+
toDate,
|
|
1725
|
+
limit,
|
|
1726
|
+
offset,
|
|
1727
|
+
resourceId
|
|
1728
|
+
} = {}) {
|
|
1729
|
+
return this.stores.workflows.getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId });
|
|
1730
|
+
}
|
|
1155
1731
|
async getWorkflowRunById({
|
|
1156
|
-
namespace = "workflows",
|
|
1157
1732
|
runId,
|
|
1158
1733
|
workflowName
|
|
1159
1734
|
}) {
|
|
1160
|
-
|
|
1161
|
-
const key = this.getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace, workflow_name: workflowName, run_id: runId }) + "*";
|
|
1162
|
-
const keys = await this.scanKeys(key);
|
|
1163
|
-
const workflows = await Promise.all(
|
|
1164
|
-
keys.map(async (key2) => {
|
|
1165
|
-
const data2 = await this.redis.get(key2);
|
|
1166
|
-
return data2;
|
|
1167
|
-
})
|
|
1168
|
-
);
|
|
1169
|
-
const data = workflows.find((w) => w?.run_id === runId && w?.workflow_name === workflowName);
|
|
1170
|
-
if (!data) return null;
|
|
1171
|
-
return this.parseWorkflowRun(data);
|
|
1172
|
-
} catch (error$1) {
|
|
1173
|
-
throw new error.MastraError(
|
|
1174
|
-
{
|
|
1175
|
-
id: "STORAGE_UPSTASH_STORAGE_GET_WORKFLOW_RUN_BY_ID_FAILED",
|
|
1176
|
-
domain: error.ErrorDomain.STORAGE,
|
|
1177
|
-
category: error.ErrorCategory.THIRD_PARTY,
|
|
1178
|
-
details: {
|
|
1179
|
-
namespace,
|
|
1180
|
-
runId,
|
|
1181
|
-
workflowName: workflowName || ""
|
|
1182
|
-
}
|
|
1183
|
-
},
|
|
1184
|
-
error$1
|
|
1185
|
-
);
|
|
1186
|
-
}
|
|
1735
|
+
return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
|
|
1187
1736
|
}
|
|
1188
1737
|
async close() {
|
|
1189
1738
|
}
|
|
1190
|
-
async updateMessages(
|
|
1191
|
-
this.
|
|
1192
|
-
throw new Error("Method not implemented");
|
|
1739
|
+
async updateMessages(args) {
|
|
1740
|
+
return this.stores.memory.updateMessages(args);
|
|
1193
1741
|
}
|
|
1194
1742
|
async getResourceById({ resourceId }) {
|
|
1195
|
-
|
|
1196
|
-
const key = `${storage.TABLE_RESOURCES}:${resourceId}`;
|
|
1197
|
-
const data = await this.redis.get(key);
|
|
1198
|
-
if (!data) {
|
|
1199
|
-
return null;
|
|
1200
|
-
}
|
|
1201
|
-
return {
|
|
1202
|
-
...data,
|
|
1203
|
-
createdAt: new Date(data.createdAt),
|
|
1204
|
-
updatedAt: new Date(data.updatedAt),
|
|
1205
|
-
// Ensure workingMemory is always returned as a string, regardless of automatic parsing
|
|
1206
|
-
workingMemory: typeof data.workingMemory === "object" ? JSON.stringify(data.workingMemory) : data.workingMemory,
|
|
1207
|
-
metadata: typeof data.metadata === "string" ? JSON.parse(data.metadata) : data.metadata
|
|
1208
|
-
};
|
|
1209
|
-
} catch (error) {
|
|
1210
|
-
this.logger.error("Error getting resource by ID:", error);
|
|
1211
|
-
throw error;
|
|
1212
|
-
}
|
|
1743
|
+
return this.stores.memory.getResourceById({ resourceId });
|
|
1213
1744
|
}
|
|
1214
1745
|
async saveResource({ resource }) {
|
|
1215
|
-
|
|
1216
|
-
const key = `${storage.TABLE_RESOURCES}:${resource.id}`;
|
|
1217
|
-
const serializedResource = {
|
|
1218
|
-
...resource,
|
|
1219
|
-
metadata: JSON.stringify(resource.metadata),
|
|
1220
|
-
createdAt: resource.createdAt.toISOString(),
|
|
1221
|
-
updatedAt: resource.updatedAt.toISOString()
|
|
1222
|
-
};
|
|
1223
|
-
await this.redis.set(key, serializedResource);
|
|
1224
|
-
return resource;
|
|
1225
|
-
} catch (error) {
|
|
1226
|
-
this.logger.error("Error saving resource:", error);
|
|
1227
|
-
throw error;
|
|
1228
|
-
}
|
|
1746
|
+
return this.stores.memory.saveResource({ resource });
|
|
1229
1747
|
}
|
|
1230
1748
|
async updateResource({
|
|
1231
1749
|
resourceId,
|
|
1232
1750
|
workingMemory,
|
|
1233
1751
|
metadata
|
|
1234
1752
|
}) {
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1253
|
-
|
|
1254
|
-
|
|
1255
|
-
|
|
1256
|
-
|
|
1257
|
-
|
|
1258
|
-
}
|
|
1259
|
-
|
|
1260
|
-
|
|
1261
|
-
|
|
1753
|
+
return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
|
|
1754
|
+
}
|
|
1755
|
+
async getScoreById({ id: _id }) {
|
|
1756
|
+
return this.stores.scores.getScoreById({ id: _id });
|
|
1757
|
+
}
|
|
1758
|
+
async saveScore(score) {
|
|
1759
|
+
return this.stores.scores.saveScore(score);
|
|
1760
|
+
}
|
|
1761
|
+
async getScoresByRunId({
|
|
1762
|
+
runId,
|
|
1763
|
+
pagination
|
|
1764
|
+
}) {
|
|
1765
|
+
return this.stores.scores.getScoresByRunId({ runId, pagination });
|
|
1766
|
+
}
|
|
1767
|
+
async getScoresByEntityId({
|
|
1768
|
+
entityId,
|
|
1769
|
+
entityType,
|
|
1770
|
+
pagination
|
|
1771
|
+
}) {
|
|
1772
|
+
return this.stores.scores.getScoresByEntityId({
|
|
1773
|
+
entityId,
|
|
1774
|
+
entityType,
|
|
1775
|
+
pagination
|
|
1776
|
+
});
|
|
1777
|
+
}
|
|
1778
|
+
async getScoresByScorerId({
|
|
1779
|
+
scorerId,
|
|
1780
|
+
pagination
|
|
1781
|
+
}) {
|
|
1782
|
+
return this.stores.scores.getScoresByScorerId({ scorerId, pagination });
|
|
1262
1783
|
}
|
|
1263
1784
|
};
|
|
1264
1785
|
var UpstashFilterTranslator = class extends filter.BaseFilterTranslator {
|