@mastra/upstash 0.0.0-working-memory-per-user-20250620163010 → 0.0.0-zod-v4-compat-part-2-20250822105954

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. package/CHANGELOG.md +268 -6
  2. package/LICENSE.md +12 -4
  3. package/README.md +98 -0
  4. package/dist/index.cjs +1629 -626
  5. package/dist/index.cjs.map +1 -0
  6. package/dist/index.d.ts +4 -4
  7. package/dist/index.d.ts.map +1 -0
  8. package/dist/index.js +1645 -642
  9. package/dist/index.js.map +1 -0
  10. package/dist/storage/domains/legacy-evals/index.d.ts +28 -0
  11. package/dist/storage/domains/legacy-evals/index.d.ts.map +1 -0
  12. package/dist/storage/domains/memory/index.d.ts +86 -0
  13. package/dist/storage/domains/memory/index.d.ts.map +1 -0
  14. package/dist/storage/domains/operations/index.d.ts +40 -0
  15. package/dist/storage/domains/operations/index.d.ts.map +1 -0
  16. package/dist/storage/domains/scores/index.d.ts +65 -0
  17. package/dist/storage/domains/scores/index.d.ts.map +1 -0
  18. package/dist/storage/domains/traces/index.d.ts +28 -0
  19. package/dist/storage/domains/traces/index.d.ts.map +1 -0
  20. package/dist/storage/domains/utils.d.ts +12 -0
  21. package/dist/storage/domains/utils.d.ts.map +1 -0
  22. package/dist/storage/domains/workflows/index.d.ts +36 -0
  23. package/dist/storage/domains/workflows/index.d.ts.map +1 -0
  24. package/dist/storage/index.d.ts +208 -0
  25. package/dist/storage/index.d.ts.map +1 -0
  26. package/dist/vector/filter.d.ts +21 -0
  27. package/dist/vector/filter.d.ts.map +1 -0
  28. package/dist/vector/index.d.ts +79 -0
  29. package/dist/vector/index.d.ts.map +1 -0
  30. package/dist/vector/prompt.d.ts +6 -0
  31. package/dist/vector/prompt.d.ts.map +1 -0
  32. package/dist/vector/types.d.ts +23 -0
  33. package/dist/vector/types.d.ts.map +1 -0
  34. package/docker-compose.yaml +1 -1
  35. package/package.json +12 -12
  36. package/src/storage/domains/legacy-evals/index.ts +279 -0
  37. package/src/storage/domains/memory/index.ts +972 -0
  38. package/src/storage/domains/operations/index.ts +168 -0
  39. package/src/storage/domains/scores/index.ts +216 -0
  40. package/src/storage/domains/traces/index.ts +172 -0
  41. package/src/storage/domains/utils.ts +57 -0
  42. package/src/storage/domains/workflows/index.ts +243 -0
  43. package/src/storage/index.test.ts +13 -0
  44. package/src/storage/index.ts +149 -1078
  45. package/src/vector/filter.test.ts +7 -6
  46. package/src/vector/filter.ts +10 -4
  47. package/src/vector/hybrid.test.ts +1455 -0
  48. package/src/vector/index.test.ts +4 -4
  49. package/src/vector/index.ts +155 -69
  50. package/src/vector/types.ts +26 -0
  51. package/tsconfig.build.json +9 -0
  52. package/tsconfig.json +1 -1
  53. package/tsup.config.ts +22 -0
  54. package/dist/_tsup-dts-rollup.d.cts +0 -318
  55. package/dist/_tsup-dts-rollup.d.ts +0 -318
  56. package/dist/index.d.cts +0 -4
  57. package/src/storage/upstash.test.ts +0 -1386
package/dist/index.cjs CHANGED
@@ -1,158 +1,51 @@
1
1
  'use strict';
2
2
 
3
- var agent = require('@mastra/core/agent');
4
3
  var storage = require('@mastra/core/storage');
5
4
  var redis = require('@upstash/redis');
5
+ var error = require('@mastra/core/error');
6
+ var agent = require('@mastra/core/agent');
6
7
  var vector = require('@mastra/core/vector');
7
8
  var vector$1 = require('@upstash/vector');
8
9
  var filter = require('@mastra/core/vector/filter');
9
10
 
10
11
  // src/storage/index.ts
11
- var UpstashStore = class extends storage.MastraStorage {
12
- redis;
13
- constructor(config) {
14
- super({ name: "Upstash" });
15
- this.redis = new redis.Redis({
16
- url: config.url,
17
- token: config.token
18
- });
19
- }
20
- get supports() {
21
- return {
22
- selectByIncludeResourceScope: true,
23
- resourceWorkingMemory: true
24
- };
25
- }
26
- transformEvalRecord(record) {
27
- let result = record.result;
28
- if (typeof result === "string") {
29
- try {
30
- result = JSON.parse(result);
31
- } catch {
32
- console.warn("Failed to parse result JSON:");
33
- }
34
- }
35
- let testInfo = record.test_info;
36
- if (typeof testInfo === "string") {
37
- try {
38
- testInfo = JSON.parse(testInfo);
39
- } catch {
40
- console.warn("Failed to parse test_info JSON:");
41
- }
42
- }
43
- return {
44
- agentName: record.agent_name,
45
- input: record.input,
46
- output: record.output,
47
- result,
48
- metricName: record.metric_name,
49
- instructions: record.instructions,
50
- testInfo,
51
- globalRunId: record.global_run_id,
52
- runId: record.run_id,
53
- createdAt: typeof record.created_at === "string" ? record.created_at : record.created_at instanceof Date ? record.created_at.toISOString() : (/* @__PURE__ */ new Date()).toISOString()
54
- };
55
- }
56
- parseJSON(value) {
57
- if (typeof value === "string") {
58
- try {
59
- return JSON.parse(value);
60
- } catch {
61
- return value;
62
- }
12
+ function transformEvalRecord(record) {
13
+ let result = record.result;
14
+ if (typeof result === "string") {
15
+ try {
16
+ result = JSON.parse(result);
17
+ } catch {
18
+ console.warn("Failed to parse result JSON:");
63
19
  }
64
- return value;
65
- }
66
- getKey(tableName, keys) {
67
- const keyParts = Object.entries(keys).filter(([_, value]) => value !== void 0).map(([key, value]) => `${key}:${value}`);
68
- return `${tableName}:${keyParts.join(":")}`;
69
- }
70
- /**
71
- * Scans for keys matching the given pattern using SCAN and returns them as an array.
72
- * @param pattern Redis key pattern, e.g. "table:*"
73
- * @param batchSize Number of keys to scan per batch (default: 1000)
74
- */
75
- async scanKeys(pattern, batchSize = 1e4) {
76
- let cursor = "0";
77
- let keys = [];
78
- do {
79
- const [nextCursor, batch] = await this.redis.scan(cursor, {
80
- match: pattern,
81
- count: batchSize
82
- });
83
- keys.push(...batch);
84
- cursor = nextCursor;
85
- } while (cursor !== "0");
86
- return keys;
87
- }
88
- /**
89
- * Deletes all keys matching the given pattern using SCAN and DEL in batches.
90
- * @param pattern Redis key pattern, e.g. "table:*"
91
- * @param batchSize Number of keys to delete per batch (default: 1000)
92
- */
93
- async scanAndDelete(pattern, batchSize = 1e4) {
94
- let cursor = "0";
95
- let totalDeleted = 0;
96
- do {
97
- const [nextCursor, keys] = await this.redis.scan(cursor, {
98
- match: pattern,
99
- count: batchSize
100
- });
101
- if (keys.length > 0) {
102
- await this.redis.del(...keys);
103
- totalDeleted += keys.length;
104
- }
105
- cursor = nextCursor;
106
- } while (cursor !== "0");
107
- return totalDeleted;
108
- }
109
- getMessageKey(threadId, messageId) {
110
- const key = this.getKey(storage.TABLE_MESSAGES, { threadId, id: messageId });
111
- return key;
112
20
  }
113
- getThreadMessagesKey(threadId) {
114
- return `thread:${threadId}:messages`;
115
- }
116
- parseWorkflowRun(row) {
117
- let parsedSnapshot = row.snapshot;
118
- if (typeof parsedSnapshot === "string") {
119
- try {
120
- parsedSnapshot = JSON.parse(row.snapshot);
121
- } catch (e) {
122
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
123
- }
21
+ let testInfo = record.test_info;
22
+ if (typeof testInfo === "string") {
23
+ try {
24
+ testInfo = JSON.parse(testInfo);
25
+ } catch {
26
+ console.warn("Failed to parse test_info JSON:");
124
27
  }
125
- return {
126
- workflowName: row.workflow_name,
127
- runId: row.run_id,
128
- snapshot: parsedSnapshot,
129
- createdAt: this.ensureDate(row.createdAt),
130
- updatedAt: this.ensureDate(row.updatedAt),
131
- resourceId: row.resourceId
132
- };
133
28
  }
134
- processRecord(tableName, record) {
135
- let key;
136
- if (tableName === storage.TABLE_MESSAGES) {
137
- key = this.getKey(tableName, { threadId: record.threadId, id: record.id });
138
- } else if (tableName === storage.TABLE_WORKFLOW_SNAPSHOT) {
139
- key = this.getKey(tableName, {
140
- namespace: record.namespace || "workflows",
141
- workflow_name: record.workflow_name,
142
- run_id: record.run_id,
143
- ...record.resourceId ? { resourceId: record.resourceId } : {}
144
- });
145
- } else if (tableName === storage.TABLE_EVALS) {
146
- key = this.getKey(tableName, { id: record.run_id });
147
- } else {
148
- key = this.getKey(tableName, { id: record.id });
149
- }
150
- const processedRecord = {
151
- ...record,
152
- createdAt: this.serializeDate(record.createdAt),
153
- updatedAt: this.serializeDate(record.updatedAt)
154
- };
155
- return { key, processedRecord };
29
+ return {
30
+ agentName: record.agent_name,
31
+ input: record.input,
32
+ output: record.output,
33
+ result,
34
+ metricName: record.metric_name,
35
+ instructions: record.instructions,
36
+ testInfo,
37
+ globalRunId: record.global_run_id,
38
+ runId: record.run_id,
39
+ createdAt: typeof record.created_at === "string" ? record.created_at : record.created_at instanceof Date ? record.created_at.toISOString() : (/* @__PURE__ */ new Date()).toISOString()
40
+ };
41
+ }
42
+ var StoreLegacyEvalsUpstash = class extends storage.LegacyEvalsStorage {
43
+ client;
44
+ operations;
45
+ constructor({ client, operations }) {
46
+ super();
47
+ this.client = client;
48
+ this.operations = operations;
156
49
  }
157
50
  /**
158
51
  * @deprecated Use getEvals instead
@@ -160,11 +53,11 @@ var UpstashStore = class extends storage.MastraStorage {
160
53
  async getEvalsByAgentName(agentName, type) {
161
54
  try {
162
55
  const pattern = `${storage.TABLE_EVALS}:*`;
163
- const keys = await this.scanKeys(pattern);
56
+ const keys = await this.operations.scanKeys(pattern);
164
57
  if (keys.length === 0) {
165
58
  return [];
166
59
  }
167
- const pipeline = this.redis.pipeline();
60
+ const pipeline = this.client.pipeline();
168
61
  keys.forEach((key) => pipeline.get(key));
169
62
  const results = await pipeline.exec();
170
63
  const nonNullRecords = results.filter(
@@ -198,170 +91,220 @@ var UpstashStore = class extends storage.MastraStorage {
198
91
  }
199
92
  });
200
93
  }
201
- return filteredEvals.map((record) => this.transformEvalRecord(record));
202
- } catch (error) {
203
- console.error("Failed to get evals for the specified agent:", error);
94
+ return filteredEvals.map((record) => transformEvalRecord(record));
95
+ } catch (error$1) {
96
+ const mastraError = new error.MastraError(
97
+ {
98
+ id: "STORAGE_UPSTASH_STORAGE_GET_EVALS_BY_AGENT_NAME_FAILED",
99
+ domain: error.ErrorDomain.STORAGE,
100
+ category: error.ErrorCategory.THIRD_PARTY,
101
+ details: { agentName }
102
+ },
103
+ error$1
104
+ );
105
+ this.logger?.trackException(mastraError);
106
+ this.logger.error(mastraError.toString());
204
107
  return [];
205
108
  }
206
109
  }
207
110
  /**
208
- * @deprecated use getTracesPaginated instead
111
+ * Get all evaluations with pagination and total count
112
+ * @param options Pagination and filtering options
113
+ * @returns Object with evals array and total count
209
114
  */
210
- async getTraces(args) {
211
- if (args.fromDate || args.toDate) {
212
- args.dateRange = {
213
- start: args.fromDate,
214
- end: args.toDate
215
- };
216
- }
217
- const { traces } = await this.getTracesPaginated(args);
218
- return traces;
219
- }
220
- async getTracesPaginated(args) {
221
- const { name, scope, page = 0, perPage = 100, attributes, filters, dateRange } = args;
222
- const fromDate = dateRange?.start;
223
- const toDate = dateRange?.end;
115
+ async getEvals(options) {
224
116
  try {
225
- const pattern = `${storage.TABLE_TRACES}:*`;
226
- const keys = await this.scanKeys(pattern);
117
+ const { agentName, type, page = 0, perPage = 100, dateRange } = options || {};
118
+ const fromDate = dateRange?.start;
119
+ const toDate = dateRange?.end;
120
+ const pattern = `${storage.TABLE_EVALS}:*`;
121
+ const keys = await this.operations.scanKeys(pattern);
227
122
  if (keys.length === 0) {
228
123
  return {
229
- traces: [],
124
+ evals: [],
230
125
  total: 0,
231
126
  page,
232
- perPage: perPage || 100,
127
+ perPage,
233
128
  hasMore: false
234
129
  };
235
130
  }
236
- const pipeline = this.redis.pipeline();
131
+ const pipeline = this.client.pipeline();
237
132
  keys.forEach((key) => pipeline.get(key));
238
133
  const results = await pipeline.exec();
239
- let filteredTraces = results.filter(
240
- (record) => record !== null && typeof record === "object"
241
- );
242
- if (name) {
243
- filteredTraces = filteredTraces.filter((record) => record.name?.toLowerCase().startsWith(name.toLowerCase()));
244
- }
245
- if (scope) {
246
- filteredTraces = filteredTraces.filter((record) => record.scope === scope);
134
+ let filteredEvals = results.map((result) => result).filter((record) => record !== null && typeof record === "object");
135
+ if (agentName) {
136
+ filteredEvals = filteredEvals.filter((record) => record.agent_name === agentName);
247
137
  }
248
- if (attributes) {
249
- filteredTraces = filteredTraces.filter((record) => {
250
- const recordAttributes = record.attributes;
251
- if (!recordAttributes) return false;
252
- const parsedAttributes = typeof recordAttributes === "string" ? JSON.parse(recordAttributes) : recordAttributes;
253
- return Object.entries(attributes).every(([key, value]) => parsedAttributes[key] === value);
138
+ if (type === "test") {
139
+ filteredEvals = filteredEvals.filter((record) => {
140
+ if (!record.test_info) return false;
141
+ try {
142
+ if (typeof record.test_info === "string") {
143
+ const parsedTestInfo = JSON.parse(record.test_info);
144
+ return parsedTestInfo && typeof parsedTestInfo === "object" && "testPath" in parsedTestInfo;
145
+ }
146
+ return typeof record.test_info === "object" && "testPath" in record.test_info;
147
+ } catch {
148
+ return false;
149
+ }
150
+ });
151
+ } else if (type === "live") {
152
+ filteredEvals = filteredEvals.filter((record) => {
153
+ if (!record.test_info) return true;
154
+ try {
155
+ if (typeof record.test_info === "string") {
156
+ const parsedTestInfo = JSON.parse(record.test_info);
157
+ return !(parsedTestInfo && typeof parsedTestInfo === "object" && "testPath" in parsedTestInfo);
158
+ }
159
+ return !(typeof record.test_info === "object" && "testPath" in record.test_info);
160
+ } catch {
161
+ return true;
162
+ }
254
163
  });
255
- }
256
- if (filters) {
257
- filteredTraces = filteredTraces.filter(
258
- (record) => Object.entries(filters).every(([key, value]) => record[key] === value)
259
- );
260
164
  }
261
165
  if (fromDate) {
262
- filteredTraces = filteredTraces.filter(
263
- (record) => new Date(record.createdAt).getTime() >= new Date(fromDate).getTime()
264
- );
166
+ filteredEvals = filteredEvals.filter((record) => {
167
+ const createdAt = new Date(record.created_at || record.createdAt || 0);
168
+ return createdAt.getTime() >= fromDate.getTime();
169
+ });
265
170
  }
266
171
  if (toDate) {
267
- filteredTraces = filteredTraces.filter(
268
- (record) => new Date(record.createdAt).getTime() <= new Date(toDate).getTime()
269
- );
172
+ filteredEvals = filteredEvals.filter((record) => {
173
+ const createdAt = new Date(record.created_at || record.createdAt || 0);
174
+ return createdAt.getTime() <= toDate.getTime();
175
+ });
270
176
  }
271
- filteredTraces.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
272
- const transformedTraces = filteredTraces.map((record) => ({
273
- id: record.id,
274
- parentSpanId: record.parentSpanId,
275
- traceId: record.traceId,
276
- name: record.name,
277
- scope: record.scope,
278
- kind: record.kind,
279
- status: this.parseJSON(record.status),
280
- events: this.parseJSON(record.events),
281
- links: this.parseJSON(record.links),
282
- attributes: this.parseJSON(record.attributes),
283
- startTime: record.startTime,
284
- endTime: record.endTime,
285
- other: this.parseJSON(record.other),
286
- createdAt: this.ensureDate(record.createdAt)
287
- }));
288
- const total = transformedTraces.length;
289
- const resolvedPerPage = perPage || 100;
290
- const start = page * resolvedPerPage;
291
- const end = start + resolvedPerPage;
292
- const paginatedTraces = transformedTraces.slice(start, end);
177
+ filteredEvals.sort((a, b) => {
178
+ const dateA = new Date(a.created_at || a.createdAt || 0).getTime();
179
+ const dateB = new Date(b.created_at || b.createdAt || 0).getTime();
180
+ return dateB - dateA;
181
+ });
182
+ const total = filteredEvals.length;
183
+ const start = page * perPage;
184
+ const end = start + perPage;
185
+ const paginatedEvals = filteredEvals.slice(start, end);
293
186
  const hasMore = end < total;
187
+ const evals = paginatedEvals.map((record) => transformEvalRecord(record));
294
188
  return {
295
- traces: paginatedTraces,
189
+ evals,
296
190
  total,
297
191
  page,
298
- perPage: resolvedPerPage,
192
+ perPage,
299
193
  hasMore
300
194
  };
301
- } catch (error) {
302
- console.error("Failed to get traces:", error);
195
+ } catch (error$1) {
196
+ const { page = 0, perPage = 100 } = options || {};
197
+ const mastraError = new error.MastraError(
198
+ {
199
+ id: "STORAGE_UPSTASH_STORAGE_GET_EVALS_FAILED",
200
+ domain: error.ErrorDomain.STORAGE,
201
+ category: error.ErrorCategory.THIRD_PARTY,
202
+ details: {
203
+ page,
204
+ perPage
205
+ }
206
+ },
207
+ error$1
208
+ );
209
+ this.logger.error(mastraError.toString());
210
+ this.logger?.trackException(mastraError);
303
211
  return {
304
- traces: [],
212
+ evals: [],
305
213
  total: 0,
306
214
  page,
307
- perPage: perPage || 100,
215
+ perPage,
308
216
  hasMore: false
309
217
  };
310
218
  }
311
219
  }
312
- async createTable({
313
- tableName,
314
- schema
315
- }) {
316
- await this.redis.set(`schema:${tableName}`, schema);
317
- }
318
- /**
319
- * No-op: This backend is schemaless and does not require schema changes.
320
- * @param tableName Name of the table
321
- * @param schema Schema of the table
322
- * @param ifNotExists Array of column names to add if they don't exist
323
- */
324
- async alterTable(_args) {
325
- }
326
- async clearTable({ tableName }) {
327
- const pattern = `${tableName}:*`;
328
- await this.scanAndDelete(pattern);
329
- }
330
- async insert({ tableName, record }) {
331
- const { key, processedRecord } = this.processRecord(tableName, record);
332
- await this.redis.set(key, processedRecord);
333
- }
334
- async batchInsert(input) {
335
- const { tableName, records } = input;
336
- if (!records.length) return;
337
- const batchSize = 1e3;
338
- for (let i = 0; i < records.length; i += batchSize) {
339
- const batch = records.slice(i, i + batchSize);
340
- const pipeline = this.redis.pipeline();
341
- for (const record of batch) {
342
- const { key, processedRecord } = this.processRecord(tableName, record);
343
- pipeline.set(key, processedRecord);
344
- }
345
- await pipeline.exec();
220
+ };
221
+ function ensureDate(value) {
222
+ if (!value) return null;
223
+ if (value instanceof Date) return value;
224
+ if (typeof value === "string") return new Date(value);
225
+ if (typeof value === "number") return new Date(value);
226
+ return null;
227
+ }
228
+ function parseJSON(value) {
229
+ if (typeof value === "string") {
230
+ try {
231
+ return JSON.parse(value);
232
+ } catch {
233
+ return value;
346
234
  }
347
235
  }
348
- async load({ tableName, keys }) {
349
- const key = this.getKey(tableName, keys);
350
- const data = await this.redis.get(key);
351
- return data || null;
236
+ return value;
237
+ }
238
+ function getKey(tableName, keys) {
239
+ const keyParts = Object.entries(keys).filter(([_, value]) => value !== void 0).map(([key, value]) => `${key}:${value}`);
240
+ return `${tableName}:${keyParts.join(":")}`;
241
+ }
242
+ function processRecord(tableName, record) {
243
+ let key;
244
+ if (tableName === storage.TABLE_MESSAGES) {
245
+ key = getKey(tableName, { threadId: record.threadId, id: record.id });
246
+ } else if (tableName === storage.TABLE_WORKFLOW_SNAPSHOT) {
247
+ key = getKey(tableName, {
248
+ namespace: record.namespace || "workflows",
249
+ workflow_name: record.workflow_name,
250
+ run_id: record.run_id,
251
+ ...record.resourceId ? { resourceId: record.resourceId } : {}
252
+ });
253
+ } else if (tableName === storage.TABLE_EVALS) {
254
+ key = getKey(tableName, { id: record.run_id });
255
+ } else {
256
+ key = getKey(tableName, { id: record.id });
257
+ }
258
+ const processedRecord = {
259
+ ...record,
260
+ createdAt: storage.serializeDate(record.createdAt),
261
+ updatedAt: storage.serializeDate(record.updatedAt)
262
+ };
263
+ return { key, processedRecord };
264
+ }
265
+
266
+ // src/storage/domains/memory/index.ts
267
+ function getThreadMessagesKey(threadId) {
268
+ return `thread:${threadId}:messages`;
269
+ }
270
+ function getMessageKey(threadId, messageId) {
271
+ const key = getKey(storage.TABLE_MESSAGES, { threadId, id: messageId });
272
+ return key;
273
+ }
274
+ var StoreMemoryUpstash = class extends storage.MemoryStorage {
275
+ client;
276
+ operations;
277
+ constructor({ client, operations }) {
278
+ super();
279
+ this.client = client;
280
+ this.operations = operations;
352
281
  }
353
282
  async getThreadById({ threadId }) {
354
- const thread = await this.load({
355
- tableName: storage.TABLE_THREADS,
356
- keys: { id: threadId }
357
- });
358
- if (!thread) return null;
359
- return {
360
- ...thread,
361
- createdAt: this.ensureDate(thread.createdAt),
362
- updatedAt: this.ensureDate(thread.updatedAt),
363
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
364
- };
283
+ try {
284
+ const thread = await this.operations.load({
285
+ tableName: storage.TABLE_THREADS,
286
+ keys: { id: threadId }
287
+ });
288
+ if (!thread) return null;
289
+ return {
290
+ ...thread,
291
+ createdAt: ensureDate(thread.createdAt),
292
+ updatedAt: ensureDate(thread.updatedAt),
293
+ metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
294
+ };
295
+ } catch (error$1) {
296
+ throw new error.MastraError(
297
+ {
298
+ id: "STORAGE_UPSTASH_STORAGE_GET_THREAD_BY_ID_FAILED",
299
+ domain: error.ErrorDomain.STORAGE,
300
+ category: error.ErrorCategory.THIRD_PARTY,
301
+ details: {
302
+ threadId
303
+ }
304
+ },
305
+ error$1
306
+ );
307
+ }
365
308
  }
366
309
  /**
367
310
  * @deprecated use getThreadsByResourceIdPaginated instead
@@ -369,12 +312,12 @@ var UpstashStore = class extends storage.MastraStorage {
369
312
  async getThreadsByResourceId({ resourceId }) {
370
313
  try {
371
314
  const pattern = `${storage.TABLE_THREADS}:*`;
372
- const keys = await this.scanKeys(pattern);
315
+ const keys = await this.operations.scanKeys(pattern);
373
316
  if (keys.length === 0) {
374
317
  return [];
375
318
  }
376
319
  const allThreads = [];
377
- const pipeline = this.redis.pipeline();
320
+ const pipeline = this.client.pipeline();
378
321
  keys.forEach((key) => pipeline.get(key));
379
322
  const results = await pipeline.exec();
380
323
  for (let i = 0; i < results.length; i++) {
@@ -382,16 +325,28 @@ var UpstashStore = class extends storage.MastraStorage {
382
325
  if (thread && thread.resourceId === resourceId) {
383
326
  allThreads.push({
384
327
  ...thread,
385
- createdAt: this.ensureDate(thread.createdAt),
386
- updatedAt: this.ensureDate(thread.updatedAt),
328
+ createdAt: ensureDate(thread.createdAt),
329
+ updatedAt: ensureDate(thread.updatedAt),
387
330
  metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
388
331
  });
389
332
  }
390
333
  }
391
334
  allThreads.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
392
335
  return allThreads;
393
- } catch (error) {
394
- console.error("Error in getThreadsByResourceId:", error);
336
+ } catch (error$1) {
337
+ const mastraError = new error.MastraError(
338
+ {
339
+ id: "STORAGE_UPSTASH_STORAGE_GET_THREADS_BY_RESOURCE_ID_FAILED",
340
+ domain: error.ErrorDomain.STORAGE,
341
+ category: error.ErrorCategory.THIRD_PARTY,
342
+ details: {
343
+ resourceId
344
+ }
345
+ },
346
+ error$1
347
+ );
348
+ this.logger?.trackException(mastraError);
349
+ this.logger.error(mastraError.toString());
395
350
  return [];
396
351
  }
397
352
  }
@@ -411,8 +366,22 @@ var UpstashStore = class extends storage.MastraStorage {
411
366
  perPage,
412
367
  hasMore
413
368
  };
414
- } catch (error) {
415
- console.error("Error in getThreadsByResourceIdPaginated:", error);
369
+ } catch (error$1) {
370
+ const mastraError = new error.MastraError(
371
+ {
372
+ id: "STORAGE_UPSTASH_STORAGE_GET_THREADS_BY_RESOURCE_ID_PAGINATED_FAILED",
373
+ domain: error.ErrorDomain.STORAGE,
374
+ category: error.ErrorCategory.THIRD_PARTY,
375
+ details: {
376
+ resourceId,
377
+ page,
378
+ perPage
379
+ }
380
+ },
381
+ error$1
382
+ );
383
+ this.logger?.trackException(mastraError);
384
+ this.logger.error(mastraError.toString());
416
385
  return {
417
386
  threads: [],
418
387
  total: 0,
@@ -423,11 +392,28 @@ var UpstashStore = class extends storage.MastraStorage {
423
392
  }
424
393
  }
425
394
  async saveThread({ thread }) {
426
- await this.insert({
427
- tableName: storage.TABLE_THREADS,
428
- record: thread
429
- });
430
- return thread;
395
+ try {
396
+ await this.operations.insert({
397
+ tableName: storage.TABLE_THREADS,
398
+ record: thread
399
+ });
400
+ return thread;
401
+ } catch (error$1) {
402
+ const mastraError = new error.MastraError(
403
+ {
404
+ id: "STORAGE_UPSTASH_STORAGE_SAVE_THREAD_FAILED",
405
+ domain: error.ErrorDomain.STORAGE,
406
+ category: error.ErrorCategory.THIRD_PARTY,
407
+ details: {
408
+ threadId: thread.id
409
+ }
410
+ },
411
+ error$1
412
+ );
413
+ this.logger?.trackException(mastraError);
414
+ this.logger.error(mastraError.toString());
415
+ throw mastraError;
416
+ }
431
417
  }
432
418
  async updateThread({
433
419
  id,
@@ -436,7 +422,15 @@ var UpstashStore = class extends storage.MastraStorage {
436
422
  }) {
437
423
  const thread = await this.getThreadById({ threadId: id });
438
424
  if (!thread) {
439
- throw new Error(`Thread ${id} not found`);
425
+ throw new error.MastraError({
426
+ id: "STORAGE_UPSTASH_STORAGE_UPDATE_THREAD_FAILED",
427
+ domain: error.ErrorDomain.STORAGE,
428
+ category: error.ErrorCategory.USER,
429
+ text: `Thread ${id} not found`,
430
+ details: {
431
+ threadId: id
432
+ }
433
+ });
440
434
  }
441
435
  const updatedThread = {
442
436
  ...thread,
@@ -446,67 +440,149 @@ var UpstashStore = class extends storage.MastraStorage {
446
440
  ...metadata
447
441
  }
448
442
  };
449
- await this.saveThread({ thread: updatedThread });
450
- return updatedThread;
443
+ try {
444
+ await this.saveThread({ thread: updatedThread });
445
+ return updatedThread;
446
+ } catch (error$1) {
447
+ throw new error.MastraError(
448
+ {
449
+ id: "STORAGE_UPSTASH_STORAGE_UPDATE_THREAD_FAILED",
450
+ domain: error.ErrorDomain.STORAGE,
451
+ category: error.ErrorCategory.THIRD_PARTY,
452
+ details: {
453
+ threadId: id
454
+ }
455
+ },
456
+ error$1
457
+ );
458
+ }
451
459
  }
452
460
  async deleteThread({ threadId }) {
453
- const threadKey = this.getKey(storage.TABLE_THREADS, { id: threadId });
454
- const threadMessagesKey = this.getThreadMessagesKey(threadId);
455
- const messageIds = await this.redis.zrange(threadMessagesKey, 0, -1);
456
- const pipeline = this.redis.pipeline();
457
- pipeline.del(threadKey);
458
- pipeline.del(threadMessagesKey);
459
- for (let i = 0; i < messageIds.length; i++) {
460
- const messageId = messageIds[i];
461
- const messageKey = this.getMessageKey(threadId, messageId);
462
- pipeline.del(messageKey);
463
- }
464
- await pipeline.exec();
465
- await this.scanAndDelete(this.getMessageKey(threadId, "*"));
461
+ const threadKey = getKey(storage.TABLE_THREADS, { id: threadId });
462
+ const threadMessagesKey = getThreadMessagesKey(threadId);
463
+ try {
464
+ const messageIds = await this.client.zrange(threadMessagesKey, 0, -1);
465
+ const pipeline = this.client.pipeline();
466
+ pipeline.del(threadKey);
467
+ pipeline.del(threadMessagesKey);
468
+ for (let i = 0; i < messageIds.length; i++) {
469
+ const messageId = messageIds[i];
470
+ const messageKey = getMessageKey(threadId, messageId);
471
+ pipeline.del(messageKey);
472
+ }
473
+ await pipeline.exec();
474
+ await this.operations.scanAndDelete(getMessageKey(threadId, "*"));
475
+ } catch (error$1) {
476
+ throw new error.MastraError(
477
+ {
478
+ id: "STORAGE_UPSTASH_STORAGE_DELETE_THREAD_FAILED",
479
+ domain: error.ErrorDomain.STORAGE,
480
+ category: error.ErrorCategory.THIRD_PARTY,
481
+ details: {
482
+ threadId
483
+ }
484
+ },
485
+ error$1
486
+ );
487
+ }
466
488
  }
467
489
  async saveMessages(args) {
468
490
  const { messages, format = "v1" } = args;
469
491
  if (messages.length === 0) return [];
470
492
  const threadId = messages[0]?.threadId;
471
- if (!threadId) {
472
- throw new Error("Thread ID is required");
473
- }
474
- const thread = await this.getThreadById({ threadId });
475
- if (!thread) {
476
- throw new Error(`Thread ${threadId} not found`);
493
+ try {
494
+ if (!threadId) {
495
+ throw new Error("Thread ID is required");
496
+ }
497
+ const thread = await this.getThreadById({ threadId });
498
+ if (!thread) {
499
+ throw new Error(`Thread ${threadId} not found`);
500
+ }
501
+ } catch (error$1) {
502
+ throw new error.MastraError(
503
+ {
504
+ id: "STORAGE_UPSTASH_STORAGE_SAVE_MESSAGES_INVALID_ARGS",
505
+ domain: error.ErrorDomain.STORAGE,
506
+ category: error.ErrorCategory.USER
507
+ },
508
+ error$1
509
+ );
477
510
  }
478
- const messagesWithIndex = messages.map((message, index) => ({
479
- ...message,
480
- _index: index
481
- }));
482
- const threadKey = this.getKey(storage.TABLE_THREADS, { id: threadId });
483
- const existingThread = await this.redis.get(threadKey);
484
- const batchSize = 1e3;
485
- for (let i = 0; i < messagesWithIndex.length; i += batchSize) {
486
- const batch = messagesWithIndex.slice(i, i + batchSize);
487
- const pipeline = this.redis.pipeline();
488
- for (const message of batch) {
489
- const key = this.getMessageKey(message.threadId, message.id);
490
- const createdAtScore = new Date(message.createdAt).getTime();
491
- const score = message._index !== void 0 ? message._index : createdAtScore;
492
- pipeline.set(key, message);
493
- pipeline.zadd(this.getThreadMessagesKey(message.threadId), {
494
- score,
495
- member: message.id
496
- });
511
+ const messagesWithIndex = messages.map((message, index) => {
512
+ if (!message.threadId) {
513
+ throw new Error(
514
+ `Expected to find a threadId for message, but couldn't find one. An unexpected error has occurred.`
515
+ );
497
516
  }
498
- if (i === 0 && existingThread) {
499
- const updatedThread = {
500
- ...existingThread,
501
- updatedAt: /* @__PURE__ */ new Date()
502
- };
503
- pipeline.set(threadKey, this.processRecord(storage.TABLE_THREADS, updatedThread).processedRecord);
517
+ if (!message.resourceId) {
518
+ throw new Error(
519
+ `Expected to find a resourceId for message, but couldn't find one. An unexpected error has occurred.`
520
+ );
504
521
  }
505
- await pipeline.exec();
522
+ return {
523
+ ...message,
524
+ _index: index
525
+ };
526
+ });
527
+ const threadKey = getKey(storage.TABLE_THREADS, { id: threadId });
528
+ const existingThread = await this.client.get(threadKey);
529
+ try {
530
+ const batchSize = 1e3;
531
+ for (let i = 0; i < messagesWithIndex.length; i += batchSize) {
532
+ const batch = messagesWithIndex.slice(i, i + batchSize);
533
+ const pipeline = this.client.pipeline();
534
+ for (const message of batch) {
535
+ const key = getMessageKey(message.threadId, message.id);
536
+ const createdAtScore = new Date(message.createdAt).getTime();
537
+ const score = message._index !== void 0 ? message._index : createdAtScore;
538
+ const existingKeyPattern = getMessageKey("*", message.id);
539
+ const keys = await this.operations.scanKeys(existingKeyPattern);
540
+ if (keys.length > 0) {
541
+ const pipeline2 = this.client.pipeline();
542
+ keys.forEach((key2) => pipeline2.get(key2));
543
+ const results = await pipeline2.exec();
544
+ const existingMessages = results.filter(
545
+ (msg) => msg !== null
546
+ );
547
+ for (const existingMessage of existingMessages) {
548
+ const existingMessageKey = getMessageKey(existingMessage.threadId, existingMessage.id);
549
+ if (existingMessage && existingMessage.threadId !== message.threadId) {
550
+ pipeline.del(existingMessageKey);
551
+ pipeline.zrem(getThreadMessagesKey(existingMessage.threadId), existingMessage.id);
552
+ }
553
+ }
554
+ }
555
+ pipeline.set(key, message);
556
+ pipeline.zadd(getThreadMessagesKey(message.threadId), {
557
+ score,
558
+ member: message.id
559
+ });
560
+ }
561
+ if (i === 0 && existingThread) {
562
+ const updatedThread = {
563
+ ...existingThread,
564
+ updatedAt: /* @__PURE__ */ new Date()
565
+ };
566
+ pipeline.set(threadKey, processRecord(storage.TABLE_THREADS, updatedThread).processedRecord);
567
+ }
568
+ await pipeline.exec();
569
+ }
570
+ const list = new agent.MessageList().add(messages, "memory");
571
+ if (format === `v2`) return list.get.all.v2();
572
+ return list.get.all.v1();
573
+ } catch (error$1) {
574
+ throw new error.MastraError(
575
+ {
576
+ id: "STORAGE_UPSTASH_STORAGE_SAVE_MESSAGES_FAILED",
577
+ domain: error.ErrorDomain.STORAGE,
578
+ category: error.ErrorCategory.THIRD_PARTY,
579
+ details: {
580
+ threadId
581
+ }
582
+ },
583
+ error$1
584
+ );
506
585
  }
507
- const list = new agent.MessageList().add(messages, "memory");
508
- if (format === `v2`) return list.get.all.v2();
509
- return list.get.all.v1();
510
586
  }
511
587
  async _getIncludedMessages(threadId, selectBy) {
512
588
  const messageIds = /* @__PURE__ */ new Set();
@@ -516,29 +592,29 @@ var UpstashStore = class extends storage.MastraStorage {
516
592
  messageIds.add(item.id);
517
593
  const itemThreadId = item.threadId || threadId;
518
594
  messageIdToThreadIds[item.id] = itemThreadId;
519
- const itemThreadMessagesKey = this.getThreadMessagesKey(itemThreadId);
520
- const rank = await this.redis.zrank(itemThreadMessagesKey, item.id);
595
+ const itemThreadMessagesKey = getThreadMessagesKey(itemThreadId);
596
+ const rank = await this.client.zrank(itemThreadMessagesKey, item.id);
521
597
  if (rank === null) continue;
522
598
  if (item.withPreviousMessages) {
523
599
  const start = Math.max(0, rank - item.withPreviousMessages);
524
- const prevIds = rank === 0 ? [] : await this.redis.zrange(itemThreadMessagesKey, start, rank - 1);
600
+ const prevIds = rank === 0 ? [] : await this.client.zrange(itemThreadMessagesKey, start, rank - 1);
525
601
  prevIds.forEach((id) => {
526
602
  messageIds.add(id);
527
603
  messageIdToThreadIds[id] = itemThreadId;
528
604
  });
529
605
  }
530
606
  if (item.withNextMessages) {
531
- const nextIds = await this.redis.zrange(itemThreadMessagesKey, rank + 1, rank + item.withNextMessages);
607
+ const nextIds = await this.client.zrange(itemThreadMessagesKey, rank + 1, rank + item.withNextMessages);
532
608
  nextIds.forEach((id) => {
533
609
  messageIds.add(id);
534
610
  messageIdToThreadIds[id] = itemThreadId;
535
611
  });
536
612
  }
537
613
  }
538
- const pipeline = this.redis.pipeline();
614
+ const pipeline = this.client.pipeline();
539
615
  Array.from(messageIds).forEach((id) => {
540
616
  const tId = messageIdToThreadIds[id] || threadId;
541
- pipeline.get(this.getMessageKey(tId, id));
617
+ pipeline.get(getMessageKey(tId, id));
542
618
  });
543
619
  const results = await pipeline.exec();
544
620
  return results.filter((result) => result !== null);
@@ -550,69 +626,91 @@ var UpstashStore = class extends storage.MastraStorage {
550
626
  selectBy,
551
627
  format
552
628
  }) {
553
- const threadMessagesKey = this.getThreadMessagesKey(threadId);
554
- const allMessageIds = await this.redis.zrange(threadMessagesKey, 0, -1);
555
- const limit = this.resolveMessageLimit({ last: selectBy?.last, defaultLimit: Number.MAX_SAFE_INTEGER });
556
- const messageIds = /* @__PURE__ */ new Set();
557
- const messageIdToThreadIds = {};
558
- if (limit === 0 && !selectBy?.include) {
559
- return [];
560
- }
561
- if (limit === Number.MAX_SAFE_INTEGER) {
562
- const allIds = await this.redis.zrange(threadMessagesKey, 0, -1);
563
- allIds.forEach((id) => {
564
- messageIds.add(id);
565
- messageIdToThreadIds[id] = threadId;
629
+ const threadMessagesKey = getThreadMessagesKey(threadId);
630
+ try {
631
+ const allMessageIds = await this.client.zrange(threadMessagesKey, 0, -1);
632
+ const limit = storage.resolveMessageLimit({ last: selectBy?.last, defaultLimit: Number.MAX_SAFE_INTEGER });
633
+ const messageIds = /* @__PURE__ */ new Set();
634
+ const messageIdToThreadIds = {};
635
+ if (limit === 0 && !selectBy?.include) {
636
+ return [];
637
+ }
638
+ if (limit === Number.MAX_SAFE_INTEGER) {
639
+ const allIds = await this.client.zrange(threadMessagesKey, 0, -1);
640
+ allIds.forEach((id) => {
641
+ messageIds.add(id);
642
+ messageIdToThreadIds[id] = threadId;
643
+ });
644
+ } else if (limit > 0) {
645
+ const latestIds = await this.client.zrange(threadMessagesKey, -limit, -1);
646
+ latestIds.forEach((id) => {
647
+ messageIds.add(id);
648
+ messageIdToThreadIds[id] = threadId;
649
+ });
650
+ }
651
+ const includedMessages = await this._getIncludedMessages(threadId, selectBy);
652
+ const messages = [
653
+ ...includedMessages,
654
+ ...(await Promise.all(
655
+ Array.from(messageIds).map(async (id) => {
656
+ const tId = messageIdToThreadIds[id] || threadId;
657
+ const byThreadId = await this.client.get(getMessageKey(tId, id));
658
+ if (byThreadId) return byThreadId;
659
+ return null;
660
+ })
661
+ )).filter((msg) => msg !== null)
662
+ ];
663
+ messages.sort((a, b) => allMessageIds.indexOf(a.id) - allMessageIds.indexOf(b.id));
664
+ const seen = /* @__PURE__ */ new Set();
665
+ const dedupedMessages = messages.filter((row) => {
666
+ if (seen.has(row.id)) return false;
667
+ seen.add(row.id);
668
+ return true;
566
669
  });
567
- } else if (limit > 0) {
568
- const latestIds = await this.redis.zrange(threadMessagesKey, -limit, -1);
569
- latestIds.forEach((id) => {
570
- messageIds.add(id);
571
- messageIdToThreadIds[id] = threadId;
670
+ const prepared = dedupedMessages.filter((message) => message !== null && message !== void 0).map((message) => {
671
+ const { _index, ...messageWithoutIndex } = message;
672
+ return messageWithoutIndex;
572
673
  });
573
- }
574
- const includedMessages = await this._getIncludedMessages(threadId, selectBy);
575
- const messages = [
576
- ...includedMessages,
577
- ...(await Promise.all(
578
- Array.from(messageIds).map(async (id) => {
579
- const tId = messageIdToThreadIds[id] || threadId;
580
- const byThreadId = await this.redis.get(this.getMessageKey(tId, id));
581
- if (byThreadId) return byThreadId;
582
- return null;
583
- })
584
- )).filter((msg) => msg !== null)
585
- ];
586
- messages.sort((a, b) => allMessageIds.indexOf(a.id) - allMessageIds.indexOf(b.id));
587
- const seen = /* @__PURE__ */ new Set();
588
- const dedupedMessages = messages.filter((row) => {
589
- if (seen.has(row.id)) return false;
590
- seen.add(row.id);
591
- return true;
592
- });
593
- const prepared = dedupedMessages.filter((message) => message !== null && message !== void 0).map((message) => {
594
- const { _index, ...messageWithoutIndex } = message;
595
- return messageWithoutIndex;
596
- });
597
- if (format === "v2") {
674
+ if (format === "v2") {
675
+ return prepared.map((msg) => ({
676
+ ...msg,
677
+ createdAt: new Date(msg.createdAt),
678
+ content: msg.content || { format: 2, parts: [{ type: "text", text: "" }] }
679
+ }));
680
+ }
598
681
  return prepared.map((msg) => ({
599
682
  ...msg,
600
- content: msg.content || { format: 2, parts: [{ type: "text", text: "" }] }
683
+ createdAt: new Date(msg.createdAt)
601
684
  }));
685
+ } catch (error$1) {
686
+ throw new error.MastraError(
687
+ {
688
+ id: "STORAGE_UPSTASH_STORAGE_GET_MESSAGES_FAILED",
689
+ domain: error.ErrorDomain.STORAGE,
690
+ category: error.ErrorCategory.THIRD_PARTY,
691
+ details: {
692
+ threadId
693
+ }
694
+ },
695
+ error$1
696
+ );
602
697
  }
603
- return prepared;
604
698
  }
605
699
  async getMessagesPaginated(args) {
606
700
  const { threadId, selectBy, format } = args;
607
701
  const { page = 0, perPage = 40, dateRange } = selectBy?.pagination || {};
608
702
  const fromDate = dateRange?.start;
609
703
  const toDate = dateRange?.end;
610
- const threadMessagesKey = this.getThreadMessagesKey(threadId);
704
+ const threadMessagesKey = getThreadMessagesKey(threadId);
611
705
  const messages = [];
612
- const includedMessages = await this._getIncludedMessages(threadId, selectBy);
613
- messages.push(...includedMessages);
614
706
  try {
615
- const allMessageIds = await this.redis.zrange(threadMessagesKey, 0, -1);
707
+ const includedMessages = await this._getIncludedMessages(threadId, selectBy);
708
+ messages.push(...includedMessages);
709
+ const allMessageIds = await this.client.zrange(
710
+ threadMessagesKey,
711
+ args?.selectBy?.last ? -args.selectBy.last : 0,
712
+ -1
713
+ );
616
714
  if (allMessageIds.length === 0) {
617
715
  return {
618
716
  messages: [],
@@ -622,8 +720,8 @@ var UpstashStore = class extends storage.MastraStorage {
622
720
  hasMore: false
623
721
  };
624
722
  }
625
- const pipeline = this.redis.pipeline();
626
- allMessageIds.forEach((id) => pipeline.get(this.getMessageKey(threadId, id)));
723
+ const pipeline = this.client.pipeline();
724
+ allMessageIds.forEach((id) => pipeline.get(getMessageKey(threadId, id)));
627
725
  const results = await pipeline.exec();
628
726
  let messagesData = results.filter((msg) => msg !== null);
629
727
  if (fromDate) {
@@ -648,8 +746,20 @@ var UpstashStore = class extends storage.MastraStorage {
648
746
  perPage,
649
747
  hasMore
650
748
  };
651
- } catch (error) {
652
- console.error("Failed to get paginated messages:", error);
749
+ } catch (error$1) {
750
+ const mastraError = new error.MastraError(
751
+ {
752
+ id: "STORAGE_UPSTASH_STORAGE_GET_MESSAGES_PAGINATED_FAILED",
753
+ domain: error.ErrorDomain.STORAGE,
754
+ category: error.ErrorCategory.THIRD_PARTY,
755
+ details: {
756
+ threadId
757
+ }
758
+ },
759
+ error$1
760
+ );
761
+ this.logger.error(mastraError.toString());
762
+ this.logger?.trackException(mastraError);
653
763
  return {
654
764
  messages: [],
655
765
  total: 0,
@@ -659,161 +769,849 @@ var UpstashStore = class extends storage.MastraStorage {
659
769
  };
660
770
  }
661
771
  }
662
- async persistWorkflowSnapshot(params) {
663
- const { namespace = "workflows", workflowName, runId, snapshot } = params;
664
- await this.insert({
665
- tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
666
- record: {
667
- namespace,
668
- workflow_name: workflowName,
669
- run_id: runId,
670
- snapshot,
671
- createdAt: /* @__PURE__ */ new Date(),
772
+ async getResourceById({ resourceId }) {
773
+ try {
774
+ const key = `${storage.TABLE_RESOURCES}:${resourceId}`;
775
+ const data = await this.client.get(key);
776
+ if (!data) {
777
+ return null;
778
+ }
779
+ return {
780
+ ...data,
781
+ createdAt: new Date(data.createdAt),
782
+ updatedAt: new Date(data.updatedAt),
783
+ // Ensure workingMemory is always returned as a string, regardless of automatic parsing
784
+ workingMemory: typeof data.workingMemory === "object" ? JSON.stringify(data.workingMemory) : data.workingMemory,
785
+ metadata: typeof data.metadata === "string" ? JSON.parse(data.metadata) : data.metadata
786
+ };
787
+ } catch (error) {
788
+ this.logger.error("Error getting resource by ID:", error);
789
+ throw error;
790
+ }
791
+ }
792
+ async saveResource({ resource }) {
793
+ try {
794
+ const key = `${storage.TABLE_RESOURCES}:${resource.id}`;
795
+ const serializedResource = {
796
+ ...resource,
797
+ metadata: JSON.stringify(resource.metadata),
798
+ createdAt: resource.createdAt.toISOString(),
799
+ updatedAt: resource.updatedAt.toISOString()
800
+ };
801
+ await this.client.set(key, serializedResource);
802
+ return resource;
803
+ } catch (error) {
804
+ this.logger.error("Error saving resource:", error);
805
+ throw error;
806
+ }
807
+ }
808
+ async updateResource({
809
+ resourceId,
810
+ workingMemory,
811
+ metadata
812
+ }) {
813
+ try {
814
+ const existingResource = await this.getResourceById({ resourceId });
815
+ if (!existingResource) {
816
+ const newResource = {
817
+ id: resourceId,
818
+ workingMemory,
819
+ metadata: metadata || {},
820
+ createdAt: /* @__PURE__ */ new Date(),
821
+ updatedAt: /* @__PURE__ */ new Date()
822
+ };
823
+ return this.saveResource({ resource: newResource });
824
+ }
825
+ const updatedResource = {
826
+ ...existingResource,
827
+ workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
828
+ metadata: {
829
+ ...existingResource.metadata,
830
+ ...metadata
831
+ },
672
832
  updatedAt: /* @__PURE__ */ new Date()
833
+ };
834
+ await this.saveResource({ resource: updatedResource });
835
+ return updatedResource;
836
+ } catch (error) {
837
+ this.logger.error("Error updating resource:", error);
838
+ throw error;
839
+ }
840
+ }
841
+ async updateMessages(args) {
842
+ const { messages } = args;
843
+ if (messages.length === 0) {
844
+ return [];
845
+ }
846
+ try {
847
+ const messageIds = messages.map((m) => m.id);
848
+ const existingMessages = [];
849
+ const messageIdToKey = {};
850
+ for (const messageId of messageIds) {
851
+ const pattern = getMessageKey("*", messageId);
852
+ const keys = await this.operations.scanKeys(pattern);
853
+ for (const key of keys) {
854
+ const message = await this.client.get(key);
855
+ if (message && message.id === messageId) {
856
+ existingMessages.push(message);
857
+ messageIdToKey[messageId] = key;
858
+ break;
859
+ }
860
+ }
673
861
  }
674
- });
862
+ if (existingMessages.length === 0) {
863
+ return [];
864
+ }
865
+ const threadIdsToUpdate = /* @__PURE__ */ new Set();
866
+ const pipeline = this.client.pipeline();
867
+ for (const existingMessage of existingMessages) {
868
+ const updatePayload = messages.find((m) => m.id === existingMessage.id);
869
+ if (!updatePayload) continue;
870
+ const { id, ...fieldsToUpdate } = updatePayload;
871
+ if (Object.keys(fieldsToUpdate).length === 0) continue;
872
+ threadIdsToUpdate.add(existingMessage.threadId);
873
+ if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
874
+ threadIdsToUpdate.add(updatePayload.threadId);
875
+ }
876
+ const updatedMessage = { ...existingMessage };
877
+ if (fieldsToUpdate.content) {
878
+ const existingContent = existingMessage.content;
879
+ const newContent = {
880
+ ...existingContent,
881
+ ...fieldsToUpdate.content,
882
+ // Deep merge metadata if it exists on both
883
+ ...existingContent?.metadata && fieldsToUpdate.content.metadata ? {
884
+ metadata: {
885
+ ...existingContent.metadata,
886
+ ...fieldsToUpdate.content.metadata
887
+ }
888
+ } : {}
889
+ };
890
+ updatedMessage.content = newContent;
891
+ }
892
+ for (const key2 in fieldsToUpdate) {
893
+ if (Object.prototype.hasOwnProperty.call(fieldsToUpdate, key2) && key2 !== "content") {
894
+ updatedMessage[key2] = fieldsToUpdate[key2];
895
+ }
896
+ }
897
+ const key = messageIdToKey[id];
898
+ if (key) {
899
+ if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
900
+ const oldThreadMessagesKey = getThreadMessagesKey(existingMessage.threadId);
901
+ pipeline.zrem(oldThreadMessagesKey, id);
902
+ pipeline.del(key);
903
+ const newKey = getMessageKey(updatePayload.threadId, id);
904
+ pipeline.set(newKey, updatedMessage);
905
+ const newThreadMessagesKey = getThreadMessagesKey(updatePayload.threadId);
906
+ const score = updatedMessage._index !== void 0 ? updatedMessage._index : new Date(updatedMessage.createdAt).getTime();
907
+ pipeline.zadd(newThreadMessagesKey, { score, member: id });
908
+ } else {
909
+ pipeline.set(key, updatedMessage);
910
+ }
911
+ }
912
+ }
913
+ const now = /* @__PURE__ */ new Date();
914
+ for (const threadId of threadIdsToUpdate) {
915
+ if (threadId) {
916
+ const threadKey = getKey(storage.TABLE_THREADS, { id: threadId });
917
+ const existingThread = await this.client.get(threadKey);
918
+ if (existingThread) {
919
+ const updatedThread = {
920
+ ...existingThread,
921
+ updatedAt: now
922
+ };
923
+ pipeline.set(threadKey, processRecord(storage.TABLE_THREADS, updatedThread).processedRecord);
924
+ }
925
+ }
926
+ }
927
+ await pipeline.exec();
928
+ const updatedMessages = [];
929
+ for (const messageId of messageIds) {
930
+ const key = messageIdToKey[messageId];
931
+ if (key) {
932
+ const updatedMessage = await this.client.get(key);
933
+ if (updatedMessage) {
934
+ const v2e = updatedMessage;
935
+ updatedMessages.push(v2e);
936
+ }
937
+ }
938
+ }
939
+ return updatedMessages;
940
+ } catch (error$1) {
941
+ throw new error.MastraError(
942
+ {
943
+ id: "STORAGE_UPSTASH_STORAGE_UPDATE_MESSAGES_FAILED",
944
+ domain: error.ErrorDomain.STORAGE,
945
+ category: error.ErrorCategory.THIRD_PARTY,
946
+ details: {
947
+ messageIds: messages.map((m) => m.id).join(",")
948
+ }
949
+ },
950
+ error$1
951
+ );
952
+ }
675
953
  }
676
- async loadWorkflowSnapshot(params) {
677
- const { namespace = "workflows", workflowName, runId } = params;
678
- const key = this.getKey(storage.TABLE_WORKFLOW_SNAPSHOT, {
679
- namespace,
680
- workflow_name: workflowName,
681
- run_id: runId
954
+ async deleteMessages(messageIds) {
955
+ if (!messageIds || messageIds.length === 0) {
956
+ return;
957
+ }
958
+ try {
959
+ const threadIds = /* @__PURE__ */ new Set();
960
+ const messageKeys = [];
961
+ for (const messageId of messageIds) {
962
+ const pattern = getMessageKey("*", messageId);
963
+ const keys = await this.operations.scanKeys(pattern);
964
+ for (const key of keys) {
965
+ const message = await this.client.get(key);
966
+ if (message && message.id === messageId) {
967
+ messageKeys.push(key);
968
+ if (message.threadId) {
969
+ threadIds.add(message.threadId);
970
+ }
971
+ break;
972
+ }
973
+ }
974
+ }
975
+ if (messageKeys.length === 0) {
976
+ return;
977
+ }
978
+ const pipeline = this.client.pipeline();
979
+ for (const key of messageKeys) {
980
+ pipeline.del(key);
981
+ }
982
+ if (threadIds.size > 0) {
983
+ for (const threadId of threadIds) {
984
+ const threadKey = getKey(storage.TABLE_THREADS, { id: threadId });
985
+ const thread = await this.client.get(threadKey);
986
+ if (thread) {
987
+ const updatedThread = {
988
+ ...thread,
989
+ updatedAt: /* @__PURE__ */ new Date()
990
+ };
991
+ pipeline.set(threadKey, processRecord(storage.TABLE_THREADS, updatedThread).processedRecord);
992
+ }
993
+ }
994
+ }
995
+ await pipeline.exec();
996
+ } catch (error$1) {
997
+ throw new error.MastraError(
998
+ {
999
+ id: "STORAGE_UPSTASH_DELETE_MESSAGES_FAILED",
1000
+ domain: error.ErrorDomain.STORAGE,
1001
+ category: error.ErrorCategory.THIRD_PARTY,
1002
+ details: { messageIds: messageIds.join(", ") }
1003
+ },
1004
+ error$1
1005
+ );
1006
+ }
1007
+ }
1008
+ };
1009
+ var StoreOperationsUpstash = class extends storage.StoreOperations {
1010
+ client;
1011
+ constructor({ client }) {
1012
+ super();
1013
+ this.client = client;
1014
+ }
1015
+ async createTable({
1016
+ tableName: _tableName,
1017
+ schema: _schema
1018
+ }) {
1019
+ }
1020
+ async alterTable({
1021
+ tableName: _tableName,
1022
+ schema: _schema,
1023
+ ifNotExists: _ifNotExists
1024
+ }) {
1025
+ }
1026
+ async clearTable({ tableName }) {
1027
+ const pattern = `${tableName}:*`;
1028
+ try {
1029
+ await this.scanAndDelete(pattern);
1030
+ } catch (error$1) {
1031
+ throw new error.MastraError(
1032
+ {
1033
+ id: "STORAGE_UPSTASH_STORAGE_CLEAR_TABLE_FAILED",
1034
+ domain: error.ErrorDomain.STORAGE,
1035
+ category: error.ErrorCategory.THIRD_PARTY,
1036
+ details: {
1037
+ tableName
1038
+ }
1039
+ },
1040
+ error$1
1041
+ );
1042
+ }
1043
+ }
1044
+ async dropTable({ tableName }) {
1045
+ return this.clearTable({ tableName });
1046
+ }
1047
+ async insert({ tableName, record }) {
1048
+ const { key, processedRecord } = processRecord(tableName, record);
1049
+ try {
1050
+ await this.client.set(key, processedRecord);
1051
+ } catch (error$1) {
1052
+ throw new error.MastraError(
1053
+ {
1054
+ id: "STORAGE_UPSTASH_STORAGE_INSERT_FAILED",
1055
+ domain: error.ErrorDomain.STORAGE,
1056
+ category: error.ErrorCategory.THIRD_PARTY,
1057
+ details: {
1058
+ tableName
1059
+ }
1060
+ },
1061
+ error$1
1062
+ );
1063
+ }
1064
+ }
1065
+ async batchInsert(input) {
1066
+ const { tableName, records } = input;
1067
+ if (!records.length) return;
1068
+ const batchSize = 1e3;
1069
+ try {
1070
+ for (let i = 0; i < records.length; i += batchSize) {
1071
+ const batch = records.slice(i, i + batchSize);
1072
+ const pipeline = this.client.pipeline();
1073
+ for (const record of batch) {
1074
+ const { key, processedRecord } = processRecord(tableName, record);
1075
+ pipeline.set(key, processedRecord);
1076
+ }
1077
+ await pipeline.exec();
1078
+ }
1079
+ } catch (error$1) {
1080
+ throw new error.MastraError(
1081
+ {
1082
+ id: "STORAGE_UPSTASH_STORAGE_BATCH_INSERT_FAILED",
1083
+ domain: error.ErrorDomain.STORAGE,
1084
+ category: error.ErrorCategory.THIRD_PARTY,
1085
+ details: {
1086
+ tableName
1087
+ }
1088
+ },
1089
+ error$1
1090
+ );
1091
+ }
1092
+ }
1093
+ async load({ tableName, keys }) {
1094
+ const key = getKey(tableName, keys);
1095
+ try {
1096
+ const data = await this.client.get(key);
1097
+ return data || null;
1098
+ } catch (error$1) {
1099
+ throw new error.MastraError(
1100
+ {
1101
+ id: "STORAGE_UPSTASH_STORAGE_LOAD_FAILED",
1102
+ domain: error.ErrorDomain.STORAGE,
1103
+ category: error.ErrorCategory.THIRD_PARTY,
1104
+ details: {
1105
+ tableName
1106
+ }
1107
+ },
1108
+ error$1
1109
+ );
1110
+ }
1111
+ }
1112
+ async hasColumn(_tableName, _column) {
1113
+ return true;
1114
+ }
1115
+ async scanKeys(pattern, batchSize = 1e4) {
1116
+ let cursor = "0";
1117
+ let keys = [];
1118
+ do {
1119
+ const [nextCursor, batch] = await this.client.scan(cursor, {
1120
+ match: pattern,
1121
+ count: batchSize
1122
+ });
1123
+ keys.push(...batch);
1124
+ cursor = nextCursor;
1125
+ } while (cursor !== "0");
1126
+ return keys;
1127
+ }
1128
+ async scanAndDelete(pattern, batchSize = 1e4) {
1129
+ let cursor = "0";
1130
+ let totalDeleted = 0;
1131
+ do {
1132
+ const [nextCursor, keys] = await this.client.scan(cursor, {
1133
+ match: pattern,
1134
+ count: batchSize
1135
+ });
1136
+ if (keys.length > 0) {
1137
+ await this.client.del(...keys);
1138
+ totalDeleted += keys.length;
1139
+ }
1140
+ cursor = nextCursor;
1141
+ } while (cursor !== "0");
1142
+ return totalDeleted;
1143
+ }
1144
+ };
1145
+ function transformScoreRow(row) {
1146
+ const parseField = (v) => {
1147
+ if (typeof v === "string") {
1148
+ try {
1149
+ return JSON.parse(v);
1150
+ } catch {
1151
+ return v;
1152
+ }
1153
+ }
1154
+ return v;
1155
+ };
1156
+ return {
1157
+ ...row,
1158
+ scorer: parseField(row.scorer),
1159
+ extractStepResult: parseField(row.extractStepResult),
1160
+ analyzeStepResult: parseField(row.analyzeStepResult),
1161
+ metadata: parseField(row.metadata),
1162
+ input: parseField(row.input),
1163
+ output: parseField(row.output),
1164
+ additionalContext: parseField(row.additionalContext),
1165
+ runtimeContext: parseField(row.runtimeContext),
1166
+ entity: parseField(row.entity),
1167
+ createdAt: row.createdAt,
1168
+ updatedAt: row.updatedAt
1169
+ };
1170
+ }
1171
+ var ScoresUpstash = class extends storage.ScoresStorage {
1172
+ client;
1173
+ operations;
1174
+ constructor({ client, operations }) {
1175
+ super();
1176
+ this.client = client;
1177
+ this.operations = operations;
1178
+ }
1179
+ async getScoreById({ id }) {
1180
+ try {
1181
+ const data = await this.operations.load({
1182
+ tableName: storage.TABLE_SCORERS,
1183
+ keys: { id }
1184
+ });
1185
+ if (!data) return null;
1186
+ return transformScoreRow(data);
1187
+ } catch (error$1) {
1188
+ throw new error.MastraError(
1189
+ {
1190
+ id: "STORAGE_UPSTASH_STORAGE_GET_SCORE_BY_ID_FAILED",
1191
+ domain: error.ErrorDomain.STORAGE,
1192
+ category: error.ErrorCategory.THIRD_PARTY,
1193
+ details: { id }
1194
+ },
1195
+ error$1
1196
+ );
1197
+ }
1198
+ }
1199
+ async getScoresByScorerId({
1200
+ scorerId,
1201
+ pagination = { page: 0, perPage: 20 }
1202
+ }) {
1203
+ const pattern = `${storage.TABLE_SCORERS}:*`;
1204
+ const keys = await this.operations.scanKeys(pattern);
1205
+ if (keys.length === 0) {
1206
+ return {
1207
+ scores: [],
1208
+ pagination: { total: 0, page: pagination.page, perPage: pagination.perPage, hasMore: false }
1209
+ };
1210
+ }
1211
+ const pipeline = this.client.pipeline();
1212
+ keys.forEach((key) => pipeline.get(key));
1213
+ const results = await pipeline.exec();
1214
+ const filtered = results.map((row) => row).filter((row) => !!row && typeof row === "object" && row.scorerId === scorerId);
1215
+ const total = filtered.length;
1216
+ const { page, perPage } = pagination;
1217
+ const start = page * perPage;
1218
+ const end = start + perPage;
1219
+ const paged = filtered.slice(start, end);
1220
+ const scores = paged.map((row) => transformScoreRow(row));
1221
+ return {
1222
+ scores,
1223
+ pagination: {
1224
+ total,
1225
+ page,
1226
+ perPage,
1227
+ hasMore: end < total
1228
+ }
1229
+ };
1230
+ }
1231
+ async saveScore(score) {
1232
+ const { key, processedRecord } = processRecord(storage.TABLE_SCORERS, score);
1233
+ try {
1234
+ await this.client.set(key, processedRecord);
1235
+ return { score };
1236
+ } catch (error$1) {
1237
+ throw new error.MastraError(
1238
+ {
1239
+ id: "STORAGE_UPSTASH_STORAGE_SAVE_SCORE_FAILED",
1240
+ domain: error.ErrorDomain.STORAGE,
1241
+ category: error.ErrorCategory.THIRD_PARTY,
1242
+ details: { id: score.id }
1243
+ },
1244
+ error$1
1245
+ );
1246
+ }
1247
+ }
1248
+ async getScoresByRunId({
1249
+ runId,
1250
+ pagination = { page: 0, perPage: 20 }
1251
+ }) {
1252
+ const pattern = `${storage.TABLE_SCORERS}:*`;
1253
+ const keys = await this.operations.scanKeys(pattern);
1254
+ if (keys.length === 0) {
1255
+ return {
1256
+ scores: [],
1257
+ pagination: { total: 0, page: pagination.page, perPage: pagination.perPage, hasMore: false }
1258
+ };
1259
+ }
1260
+ const pipeline = this.client.pipeline();
1261
+ keys.forEach((key) => pipeline.get(key));
1262
+ const results = await pipeline.exec();
1263
+ const filtered = results.map((row) => row).filter((row) => !!row && typeof row === "object" && row.runId === runId);
1264
+ const total = filtered.length;
1265
+ const { page, perPage } = pagination;
1266
+ const start = page * perPage;
1267
+ const end = start + perPage;
1268
+ const paged = filtered.slice(start, end);
1269
+ const scores = paged.map((row) => transformScoreRow(row));
1270
+ return {
1271
+ scores,
1272
+ pagination: {
1273
+ total,
1274
+ page,
1275
+ perPage,
1276
+ hasMore: end < total
1277
+ }
1278
+ };
1279
+ }
1280
+ async getScoresByEntityId({
1281
+ entityId,
1282
+ entityType,
1283
+ pagination = { page: 0, perPage: 20 }
1284
+ }) {
1285
+ const pattern = `${storage.TABLE_SCORERS}:*`;
1286
+ const keys = await this.operations.scanKeys(pattern);
1287
+ if (keys.length === 0) {
1288
+ return {
1289
+ scores: [],
1290
+ pagination: { total: 0, page: pagination.page, perPage: pagination.perPage, hasMore: false }
1291
+ };
1292
+ }
1293
+ const pipeline = this.client.pipeline();
1294
+ keys.forEach((key) => pipeline.get(key));
1295
+ const results = await pipeline.exec();
1296
+ const filtered = results.map((row) => row).filter((row) => {
1297
+ if (!row || typeof row !== "object") return false;
1298
+ if (row.entityId !== entityId) return false;
1299
+ if (entityType && row.entityType !== entityType) return false;
1300
+ return true;
682
1301
  });
683
- const data = await this.redis.get(key);
684
- if (!data) return null;
685
- return data.snapshot;
1302
+ const total = filtered.length;
1303
+ const { page, perPage } = pagination;
1304
+ const start = page * perPage;
1305
+ const end = start + perPage;
1306
+ const paged = filtered.slice(start, end);
1307
+ const scores = paged.map((row) => transformScoreRow(row));
1308
+ return {
1309
+ scores,
1310
+ pagination: {
1311
+ total,
1312
+ page,
1313
+ perPage,
1314
+ hasMore: end < total
1315
+ }
1316
+ };
1317
+ }
1318
+ };
1319
+ var TracesUpstash = class extends storage.TracesStorage {
1320
+ client;
1321
+ operations;
1322
+ constructor({ client, operations }) {
1323
+ super();
1324
+ this.client = client;
1325
+ this.operations = operations;
686
1326
  }
687
1327
  /**
688
- * Get all evaluations with pagination and total count
689
- * @param options Pagination and filtering options
690
- * @returns Object with evals array and total count
1328
+ * @deprecated use getTracesPaginated instead
691
1329
  */
692
- async getEvals(options) {
1330
+ async getTraces(args) {
1331
+ if (args.fromDate || args.toDate) {
1332
+ args.dateRange = {
1333
+ start: args.fromDate,
1334
+ end: args.toDate
1335
+ };
1336
+ }
693
1337
  try {
694
- const { agentName, type, page = 0, perPage = 100, dateRange } = options || {};
695
- const fromDate = dateRange?.start;
696
- const toDate = dateRange?.end;
697
- const pattern = `${storage.TABLE_EVALS}:*`;
698
- const keys = await this.scanKeys(pattern);
1338
+ const { traces } = await this.getTracesPaginated(args);
1339
+ return traces;
1340
+ } catch (error$1) {
1341
+ throw new error.MastraError(
1342
+ {
1343
+ id: "STORAGE_UPSTASH_STORAGE_GET_TRACES_FAILED",
1344
+ domain: error.ErrorDomain.STORAGE,
1345
+ category: error.ErrorCategory.THIRD_PARTY
1346
+ },
1347
+ error$1
1348
+ );
1349
+ }
1350
+ }
1351
+ async getTracesPaginated(args) {
1352
+ const { name, scope, page = 0, perPage = 100, attributes, filters, dateRange } = args;
1353
+ const fromDate = dateRange?.start;
1354
+ const toDate = dateRange?.end;
1355
+ try {
1356
+ const pattern = `${storage.TABLE_TRACES}:*`;
1357
+ const keys = await this.operations.scanKeys(pattern);
699
1358
  if (keys.length === 0) {
700
1359
  return {
701
- evals: [],
1360
+ traces: [],
702
1361
  total: 0,
703
1362
  page,
704
- perPage,
1363
+ perPage: perPage || 100,
705
1364
  hasMore: false
706
1365
  };
707
1366
  }
708
- const pipeline = this.redis.pipeline();
1367
+ const pipeline = this.client.pipeline();
709
1368
  keys.forEach((key) => pipeline.get(key));
710
1369
  const results = await pipeline.exec();
711
- let filteredEvals = results.map((result) => result).filter((record) => record !== null && typeof record === "object");
712
- if (agentName) {
713
- filteredEvals = filteredEvals.filter((record) => record.agent_name === agentName);
1370
+ let filteredTraces = results.filter(
1371
+ (record) => record !== null && typeof record === "object"
1372
+ );
1373
+ if (name) {
1374
+ filteredTraces = filteredTraces.filter((record) => record.name?.toLowerCase().startsWith(name.toLowerCase()));
714
1375
  }
715
- if (type === "test") {
716
- filteredEvals = filteredEvals.filter((record) => {
717
- if (!record.test_info) return false;
718
- try {
719
- if (typeof record.test_info === "string") {
720
- const parsedTestInfo = JSON.parse(record.test_info);
721
- return parsedTestInfo && typeof parsedTestInfo === "object" && "testPath" in parsedTestInfo;
722
- }
723
- return typeof record.test_info === "object" && "testPath" in record.test_info;
724
- } catch {
725
- return false;
726
- }
727
- });
728
- } else if (type === "live") {
729
- filteredEvals = filteredEvals.filter((record) => {
730
- if (!record.test_info) return true;
731
- try {
732
- if (typeof record.test_info === "string") {
733
- const parsedTestInfo = JSON.parse(record.test_info);
734
- return !(parsedTestInfo && typeof parsedTestInfo === "object" && "testPath" in parsedTestInfo);
735
- }
736
- return !(typeof record.test_info === "object" && "testPath" in record.test_info);
737
- } catch {
738
- return true;
739
- }
1376
+ if (scope) {
1377
+ filteredTraces = filteredTraces.filter((record) => record.scope === scope);
1378
+ }
1379
+ if (attributes) {
1380
+ filteredTraces = filteredTraces.filter((record) => {
1381
+ const recordAttributes = record.attributes;
1382
+ if (!recordAttributes) return false;
1383
+ const parsedAttributes = typeof recordAttributes === "string" ? JSON.parse(recordAttributes) : recordAttributes;
1384
+ return Object.entries(attributes).every(([key, value]) => parsedAttributes[key] === value);
740
1385
  });
741
1386
  }
1387
+ if (filters) {
1388
+ filteredTraces = filteredTraces.filter(
1389
+ (record) => Object.entries(filters).every(([key, value]) => record[key] === value)
1390
+ );
1391
+ }
742
1392
  if (fromDate) {
743
- filteredEvals = filteredEvals.filter((record) => {
744
- const createdAt = new Date(record.created_at || record.createdAt || 0);
745
- return createdAt.getTime() >= fromDate.getTime();
746
- });
1393
+ filteredTraces = filteredTraces.filter(
1394
+ (record) => new Date(record.createdAt).getTime() >= new Date(fromDate).getTime()
1395
+ );
747
1396
  }
748
1397
  if (toDate) {
749
- filteredEvals = filteredEvals.filter((record) => {
750
- const createdAt = new Date(record.created_at || record.createdAt || 0);
751
- return createdAt.getTime() <= toDate.getTime();
752
- });
1398
+ filteredTraces = filteredTraces.filter(
1399
+ (record) => new Date(record.createdAt).getTime() <= new Date(toDate).getTime()
1400
+ );
753
1401
  }
754
- filteredEvals.sort((a, b) => {
755
- const dateA = new Date(a.created_at || a.createdAt || 0).getTime();
756
- const dateB = new Date(b.created_at || b.createdAt || 0).getTime();
757
- return dateB - dateA;
758
- });
759
- const total = filteredEvals.length;
760
- const start = page * perPage;
761
- const end = start + perPage;
762
- const paginatedEvals = filteredEvals.slice(start, end);
1402
+ filteredTraces.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
1403
+ const transformedTraces = filteredTraces.map((record) => ({
1404
+ id: record.id,
1405
+ parentSpanId: record.parentSpanId,
1406
+ traceId: record.traceId,
1407
+ name: record.name,
1408
+ scope: record.scope,
1409
+ kind: record.kind,
1410
+ status: parseJSON(record.status),
1411
+ events: parseJSON(record.events),
1412
+ links: parseJSON(record.links),
1413
+ attributes: parseJSON(record.attributes),
1414
+ startTime: record.startTime,
1415
+ endTime: record.endTime,
1416
+ other: parseJSON(record.other),
1417
+ createdAt: ensureDate(record.createdAt)
1418
+ }));
1419
+ const total = transformedTraces.length;
1420
+ const resolvedPerPage = perPage || 100;
1421
+ const start = page * resolvedPerPage;
1422
+ const end = start + resolvedPerPage;
1423
+ const paginatedTraces = transformedTraces.slice(start, end);
763
1424
  const hasMore = end < total;
764
- const evals = paginatedEvals.map((record) => this.transformEvalRecord(record));
765
1425
  return {
766
- evals,
1426
+ traces: paginatedTraces,
767
1427
  total,
768
1428
  page,
769
- perPage,
1429
+ perPage: resolvedPerPage,
770
1430
  hasMore
771
1431
  };
772
- } catch (error) {
773
- const { page = 0, perPage = 100 } = options || {};
774
- console.error("Failed to get evals:", error);
1432
+ } catch (error$1) {
1433
+ const mastraError = new error.MastraError(
1434
+ {
1435
+ id: "STORAGE_UPSTASH_STORAGE_GET_TRACES_PAGINATED_FAILED",
1436
+ domain: error.ErrorDomain.STORAGE,
1437
+ category: error.ErrorCategory.THIRD_PARTY,
1438
+ details: {
1439
+ name: args.name || "",
1440
+ scope: args.scope || ""
1441
+ }
1442
+ },
1443
+ error$1
1444
+ );
1445
+ this.logger?.trackException(mastraError);
1446
+ this.logger.error(mastraError.toString());
775
1447
  return {
776
- evals: [],
1448
+ traces: [],
777
1449
  total: 0,
778
1450
  page,
779
- perPage,
1451
+ perPage: perPage || 100,
780
1452
  hasMore: false
781
1453
  };
782
1454
  }
783
1455
  }
1456
+ async batchTraceInsert(args) {
1457
+ return this.operations.batchInsert({
1458
+ tableName: storage.TABLE_TRACES,
1459
+ records: args.records
1460
+ });
1461
+ }
1462
+ };
1463
+ function parseWorkflowRun(row) {
1464
+ let parsedSnapshot = row.snapshot;
1465
+ if (typeof parsedSnapshot === "string") {
1466
+ try {
1467
+ parsedSnapshot = JSON.parse(row.snapshot);
1468
+ } catch (e) {
1469
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1470
+ }
1471
+ }
1472
+ return {
1473
+ workflowName: row.workflow_name,
1474
+ runId: row.run_id,
1475
+ snapshot: parsedSnapshot,
1476
+ createdAt: ensureDate(row.createdAt),
1477
+ updatedAt: ensureDate(row.updatedAt),
1478
+ resourceId: row.resourceId
1479
+ };
1480
+ }
1481
+ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
1482
+ client;
1483
+ operations;
1484
+ constructor({ client, operations }) {
1485
+ super();
1486
+ this.client = client;
1487
+ this.operations = operations;
1488
+ }
1489
+ async persistWorkflowSnapshot(params) {
1490
+ const { namespace = "workflows", workflowName, runId, snapshot } = params;
1491
+ try {
1492
+ await this.operations.insert({
1493
+ tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
1494
+ record: {
1495
+ namespace,
1496
+ workflow_name: workflowName,
1497
+ run_id: runId,
1498
+ snapshot,
1499
+ createdAt: /* @__PURE__ */ new Date(),
1500
+ updatedAt: /* @__PURE__ */ new Date()
1501
+ }
1502
+ });
1503
+ } catch (error$1) {
1504
+ throw new error.MastraError(
1505
+ {
1506
+ id: "STORAGE_UPSTASH_STORAGE_PERSIST_WORKFLOW_SNAPSHOT_FAILED",
1507
+ domain: error.ErrorDomain.STORAGE,
1508
+ category: error.ErrorCategory.THIRD_PARTY,
1509
+ details: {
1510
+ namespace,
1511
+ workflowName,
1512
+ runId
1513
+ }
1514
+ },
1515
+ error$1
1516
+ );
1517
+ }
1518
+ }
1519
+ async loadWorkflowSnapshot(params) {
1520
+ const { namespace = "workflows", workflowName, runId } = params;
1521
+ const key = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, {
1522
+ namespace,
1523
+ workflow_name: workflowName,
1524
+ run_id: runId
1525
+ });
1526
+ try {
1527
+ const data = await this.client.get(key);
1528
+ if (!data) return null;
1529
+ return data.snapshot;
1530
+ } catch (error$1) {
1531
+ throw new error.MastraError(
1532
+ {
1533
+ id: "STORAGE_UPSTASH_STORAGE_LOAD_WORKFLOW_SNAPSHOT_FAILED",
1534
+ domain: error.ErrorDomain.STORAGE,
1535
+ category: error.ErrorCategory.THIRD_PARTY,
1536
+ details: {
1537
+ namespace,
1538
+ workflowName,
1539
+ runId
1540
+ }
1541
+ },
1542
+ error$1
1543
+ );
1544
+ }
1545
+ }
1546
+ async getWorkflowRunById({
1547
+ runId,
1548
+ workflowName
1549
+ }) {
1550
+ try {
1551
+ const key = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace: "workflows", workflow_name: workflowName, run_id: runId }) + "*";
1552
+ const keys = await this.operations.scanKeys(key);
1553
+ const workflows = await Promise.all(
1554
+ keys.map(async (key2) => {
1555
+ const data2 = await this.client.get(key2);
1556
+ return data2;
1557
+ })
1558
+ );
1559
+ const data = workflows.find((w) => w?.run_id === runId && w?.workflow_name === workflowName);
1560
+ if (!data) return null;
1561
+ return parseWorkflowRun(data);
1562
+ } catch (error$1) {
1563
+ throw new error.MastraError(
1564
+ {
1565
+ id: "STORAGE_UPSTASH_STORAGE_GET_WORKFLOW_RUN_BY_ID_FAILED",
1566
+ domain: error.ErrorDomain.STORAGE,
1567
+ category: error.ErrorCategory.THIRD_PARTY,
1568
+ details: {
1569
+ namespace: "workflows",
1570
+ runId,
1571
+ workflowName: workflowName || ""
1572
+ }
1573
+ },
1574
+ error$1
1575
+ );
1576
+ }
1577
+ }
784
1578
  async getWorkflowRuns({
785
- namespace,
786
1579
  workflowName,
787
1580
  fromDate,
788
1581
  toDate,
789
1582
  limit,
790
1583
  offset,
791
1584
  resourceId
792
- } = { namespace: "workflows" }) {
1585
+ }) {
793
1586
  try {
794
- let pattern = this.getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace }) + ":*";
1587
+ let pattern = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace: "workflows" }) + ":*";
795
1588
  if (workflowName && resourceId) {
796
- pattern = this.getKey(storage.TABLE_WORKFLOW_SNAPSHOT, {
797
- namespace,
1589
+ pattern = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, {
1590
+ namespace: "workflows",
798
1591
  workflow_name: workflowName,
799
1592
  run_id: "*",
800
1593
  resourceId
801
1594
  });
802
1595
  } else if (workflowName) {
803
- pattern = this.getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace, workflow_name: workflowName }) + ":*";
1596
+ pattern = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace: "workflows", workflow_name: workflowName }) + ":*";
804
1597
  } else if (resourceId) {
805
- pattern = this.getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace, workflow_name: "*", run_id: "*", resourceId });
1598
+ pattern = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, {
1599
+ namespace: "workflows",
1600
+ workflow_name: "*",
1601
+ run_id: "*",
1602
+ resourceId
1603
+ });
806
1604
  }
807
- const keys = await this.scanKeys(pattern);
1605
+ const keys = await this.operations.scanKeys(pattern);
808
1606
  if (keys.length === 0) {
809
1607
  return { runs: [], total: 0 };
810
1608
  }
811
- const pipeline = this.redis.pipeline();
1609
+ const pipeline = this.client.pipeline();
812
1610
  keys.forEach((key) => pipeline.get(key));
813
1611
  const results = await pipeline.exec();
814
1612
  let runs = results.map((result) => result).filter(
815
1613
  (record) => record !== null && record !== void 0 && typeof record === "object" && "workflow_name" in record
816
- ).filter((record) => !workflowName || record.workflow_name === workflowName).map((w) => this.parseWorkflowRun(w)).filter((w) => {
1614
+ ).filter((record) => !workflowName || record.workflow_name === workflowName).map((w) => parseWorkflowRun(w)).filter((w) => {
817
1615
  if (fromDate && w.createdAt < fromDate) return false;
818
1616
  if (toDate && w.createdAt > toDate) return false;
819
1617
  return true;
@@ -823,107 +1621,223 @@ var UpstashStore = class extends storage.MastraStorage {
823
1621
  runs = runs.slice(offset, offset + limit);
824
1622
  }
825
1623
  return { runs, total };
826
- } catch (error) {
827
- console.error("Error getting workflow runs:", error);
828
- throw error;
1624
+ } catch (error$1) {
1625
+ throw new error.MastraError(
1626
+ {
1627
+ id: "STORAGE_UPSTASH_STORAGE_GET_WORKFLOW_RUNS_FAILED",
1628
+ domain: error.ErrorDomain.STORAGE,
1629
+ category: error.ErrorCategory.THIRD_PARTY,
1630
+ details: {
1631
+ namespace: "workflows",
1632
+ workflowName: workflowName || "",
1633
+ resourceId: resourceId || ""
1634
+ }
1635
+ },
1636
+ error$1
1637
+ );
829
1638
  }
830
1639
  }
1640
+ };
1641
+
1642
+ // src/storage/index.ts
1643
+ var UpstashStore = class extends storage.MastraStorage {
1644
+ redis;
1645
+ stores;
1646
+ constructor(config) {
1647
+ super({ name: "Upstash" });
1648
+ this.redis = new redis.Redis({
1649
+ url: config.url,
1650
+ token: config.token
1651
+ });
1652
+ const operations = new StoreOperationsUpstash({ client: this.redis });
1653
+ const traces = new TracesUpstash({ client: this.redis, operations });
1654
+ const scores = new ScoresUpstash({ client: this.redis, operations });
1655
+ const workflows = new WorkflowsUpstash({ client: this.redis, operations });
1656
+ const memory = new StoreMemoryUpstash({ client: this.redis, operations });
1657
+ const legacyEvals = new StoreLegacyEvalsUpstash({ client: this.redis, operations });
1658
+ this.stores = {
1659
+ operations,
1660
+ traces,
1661
+ scores,
1662
+ workflows,
1663
+ memory,
1664
+ legacyEvals
1665
+ };
1666
+ }
1667
+ get supports() {
1668
+ return {
1669
+ selectByIncludeResourceScope: true,
1670
+ resourceWorkingMemory: true,
1671
+ hasColumn: false,
1672
+ createTable: false,
1673
+ deleteMessages: true
1674
+ };
1675
+ }
1676
+ /**
1677
+ * @deprecated Use getEvals instead
1678
+ */
1679
+ async getEvalsByAgentName(agentName, type) {
1680
+ return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
1681
+ }
1682
+ /**
1683
+ * Get all evaluations with pagination and total count
1684
+ * @param options Pagination and filtering options
1685
+ * @returns Object with evals array and total count
1686
+ */
1687
+ async getEvals(options) {
1688
+ return this.stores.legacyEvals.getEvals(options);
1689
+ }
1690
+ /**
1691
+ * @deprecated use getTracesPaginated instead
1692
+ */
1693
+ async getTraces(args) {
1694
+ return this.stores.traces.getTraces(args);
1695
+ }
1696
+ async getTracesPaginated(args) {
1697
+ return this.stores.traces.getTracesPaginated(args);
1698
+ }
1699
+ async batchTraceInsert(args) {
1700
+ return this.stores.traces.batchTraceInsert(args);
1701
+ }
1702
+ async createTable({
1703
+ tableName,
1704
+ schema
1705
+ }) {
1706
+ return this.stores.operations.createTable({ tableName, schema });
1707
+ }
1708
+ /**
1709
+ * No-op: This backend is schemaless and does not require schema changes.
1710
+ * @param tableName Name of the table
1711
+ * @param schema Schema of the table
1712
+ * @param ifNotExists Array of column names to add if they don't exist
1713
+ */
1714
+ async alterTable(args) {
1715
+ return this.stores.operations.alterTable(args);
1716
+ }
1717
+ async clearTable({ tableName }) {
1718
+ return this.stores.operations.clearTable({ tableName });
1719
+ }
1720
+ async dropTable({ tableName }) {
1721
+ return this.stores.operations.dropTable({ tableName });
1722
+ }
1723
+ async insert({ tableName, record }) {
1724
+ return this.stores.operations.insert({ tableName, record });
1725
+ }
1726
+ async batchInsert(input) {
1727
+ return this.stores.operations.batchInsert(input);
1728
+ }
1729
+ async load({ tableName, keys }) {
1730
+ return this.stores.operations.load({ tableName, keys });
1731
+ }
1732
+ async getThreadById({ threadId }) {
1733
+ return this.stores.memory.getThreadById({ threadId });
1734
+ }
1735
+ /**
1736
+ * @deprecated use getThreadsByResourceIdPaginated instead
1737
+ */
1738
+ async getThreadsByResourceId({ resourceId }) {
1739
+ return this.stores.memory.getThreadsByResourceId({ resourceId });
1740
+ }
1741
+ async getThreadsByResourceIdPaginated(args) {
1742
+ return this.stores.memory.getThreadsByResourceIdPaginated(args);
1743
+ }
1744
+ async saveThread({ thread }) {
1745
+ return this.stores.memory.saveThread({ thread });
1746
+ }
1747
+ async updateThread({
1748
+ id,
1749
+ title,
1750
+ metadata
1751
+ }) {
1752
+ return this.stores.memory.updateThread({ id, title, metadata });
1753
+ }
1754
+ async deleteThread({ threadId }) {
1755
+ return this.stores.memory.deleteThread({ threadId });
1756
+ }
1757
+ async saveMessages(args) {
1758
+ return this.stores.memory.saveMessages(args);
1759
+ }
1760
+ async getMessages({
1761
+ threadId,
1762
+ selectBy,
1763
+ format
1764
+ }) {
1765
+ return this.stores.memory.getMessages({ threadId, selectBy, format });
1766
+ }
1767
+ async getMessagesPaginated(args) {
1768
+ return this.stores.memory.getMessagesPaginated(args);
1769
+ }
1770
+ async persistWorkflowSnapshot(params) {
1771
+ return this.stores.workflows.persistWorkflowSnapshot(params);
1772
+ }
1773
+ async loadWorkflowSnapshot(params) {
1774
+ return this.stores.workflows.loadWorkflowSnapshot(params);
1775
+ }
1776
+ async getWorkflowRuns({
1777
+ workflowName,
1778
+ fromDate,
1779
+ toDate,
1780
+ limit,
1781
+ offset,
1782
+ resourceId
1783
+ } = {}) {
1784
+ return this.stores.workflows.getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId });
1785
+ }
831
1786
  async getWorkflowRunById({
832
- namespace = "workflows",
833
1787
  runId,
834
1788
  workflowName
835
1789
  }) {
836
- try {
837
- const key = this.getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace, workflow_name: workflowName, run_id: runId }) + "*";
838
- const keys = await this.scanKeys(key);
839
- const workflows = await Promise.all(
840
- keys.map(async (key2) => {
841
- const data2 = await this.redis.get(key2);
842
- return data2;
843
- })
844
- );
845
- const data = workflows.find((w) => w?.run_id === runId && w?.workflow_name === workflowName);
846
- if (!data) return null;
847
- return this.parseWorkflowRun(data);
848
- } catch (error) {
849
- console.error("Error getting workflow run by ID:", error);
850
- throw error;
851
- }
1790
+ return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
852
1791
  }
853
1792
  async close() {
854
1793
  }
855
- async updateMessages(_args) {
856
- this.logger.error("updateMessages is not yet implemented in UpstashStore");
857
- throw new Error("Method not implemented");
1794
+ async updateMessages(args) {
1795
+ return this.stores.memory.updateMessages(args);
1796
+ }
1797
+ async deleteMessages(messageIds) {
1798
+ return this.stores.memory.deleteMessages(messageIds);
858
1799
  }
859
1800
  async getResourceById({ resourceId }) {
860
- try {
861
- const key = `${storage.TABLE_RESOURCES}:${resourceId}`;
862
- const data = await this.redis.get(key);
863
- if (!data) {
864
- return null;
865
- }
866
- return {
867
- ...data,
868
- createdAt: new Date(data.createdAt),
869
- updatedAt: new Date(data.updatedAt),
870
- // Ensure workingMemory is always returned as a string, regardless of automatic parsing
871
- workingMemory: typeof data.workingMemory === "object" ? JSON.stringify(data.workingMemory) : data.workingMemory,
872
- metadata: typeof data.metadata === "string" ? JSON.parse(data.metadata) : data.metadata
873
- };
874
- } catch (error) {
875
- this.logger.error("Error getting resource by ID:", error);
876
- throw error;
877
- }
1801
+ return this.stores.memory.getResourceById({ resourceId });
878
1802
  }
879
1803
  async saveResource({ resource }) {
880
- try {
881
- const key = `${storage.TABLE_RESOURCES}:${resource.id}`;
882
- const serializedResource = {
883
- ...resource,
884
- metadata: JSON.stringify(resource.metadata),
885
- createdAt: resource.createdAt.toISOString(),
886
- updatedAt: resource.updatedAt.toISOString()
887
- };
888
- await this.redis.set(key, serializedResource);
889
- return resource;
890
- } catch (error) {
891
- this.logger.error("Error saving resource:", error);
892
- throw error;
893
- }
1804
+ return this.stores.memory.saveResource({ resource });
894
1805
  }
895
1806
  async updateResource({
896
1807
  resourceId,
897
1808
  workingMemory,
898
1809
  metadata
899
1810
  }) {
900
- try {
901
- const existingResource = await this.getResourceById({ resourceId });
902
- if (!existingResource) {
903
- const newResource = {
904
- id: resourceId,
905
- workingMemory,
906
- metadata: metadata || {},
907
- createdAt: /* @__PURE__ */ new Date(),
908
- updatedAt: /* @__PURE__ */ new Date()
909
- };
910
- return this.saveResource({ resource: newResource });
911
- }
912
- const updatedResource = {
913
- ...existingResource,
914
- workingMemory: workingMemory !== void 0 ? workingMemory : existingResource.workingMemory,
915
- metadata: {
916
- ...existingResource.metadata,
917
- ...metadata
918
- },
919
- updatedAt: /* @__PURE__ */ new Date()
920
- };
921
- await this.saveResource({ resource: updatedResource });
922
- return updatedResource;
923
- } catch (error) {
924
- this.logger.error("Error updating resource:", error);
925
- throw error;
926
- }
1811
+ return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
1812
+ }
1813
+ async getScoreById({ id: _id }) {
1814
+ return this.stores.scores.getScoreById({ id: _id });
1815
+ }
1816
+ async saveScore(score) {
1817
+ return this.stores.scores.saveScore(score);
1818
+ }
1819
+ async getScoresByRunId({
1820
+ runId,
1821
+ pagination
1822
+ }) {
1823
+ return this.stores.scores.getScoresByRunId({ runId, pagination });
1824
+ }
1825
+ async getScoresByEntityId({
1826
+ entityId,
1827
+ entityType,
1828
+ pagination
1829
+ }) {
1830
+ return this.stores.scores.getScoresByEntityId({
1831
+ entityId,
1832
+ entityType,
1833
+ pagination
1834
+ });
1835
+ }
1836
+ async getScoresByScorerId({
1837
+ scorerId,
1838
+ pagination
1839
+ }) {
1840
+ return this.stores.scores.getScoresByScorerId({ scorerId, pagination });
927
1841
  }
928
1842
  };
929
1843
  var UpstashFilterTranslator = class extends filter.BaseFilterTranslator {
@@ -1139,21 +2053,40 @@ var UpstashVector = class extends vector.MastraVector {
1139
2053
  * @param {UpsertVectorParams} params - The parameters for the upsert operation.
1140
2054
  * @returns {Promise<string[]>} A promise that resolves to the IDs of the upserted vectors.
1141
2055
  */
1142
- async upsert({ indexName: namespace, vectors, metadata, ids }) {
2056
+ async upsert({
2057
+ indexName: namespace,
2058
+ vectors,
2059
+ metadata,
2060
+ ids,
2061
+ sparseVectors
2062
+ }) {
1143
2063
  const generatedIds = ids || vectors.map(() => crypto.randomUUID());
1144
2064
  const points = vectors.map((vector, index) => ({
1145
2065
  id: generatedIds[index],
1146
2066
  vector,
2067
+ ...sparseVectors?.[index] && { sparseVector: sparseVectors[index] },
1147
2068
  metadata: metadata?.[index]
1148
2069
  }));
1149
- await this.client.upsert(points, {
1150
- namespace
1151
- });
1152
- return generatedIds;
2070
+ try {
2071
+ await this.client.upsert(points, {
2072
+ namespace
2073
+ });
2074
+ return generatedIds;
2075
+ } catch (error$1) {
2076
+ throw new error.MastraError(
2077
+ {
2078
+ id: "STORAGE_UPSTASH_VECTOR_UPSERT_FAILED",
2079
+ domain: error.ErrorDomain.STORAGE,
2080
+ category: error.ErrorCategory.THIRD_PARTY,
2081
+ details: { namespace, vectorCount: vectors.length }
2082
+ },
2083
+ error$1
2084
+ );
2085
+ }
1153
2086
  }
1154
2087
  /**
1155
2088
  * Transforms a Mastra vector filter into an Upstash-compatible filter string.
1156
- * @param {VectorFilter} [filter] - The filter to transform.
2089
+ * @param {UpstashVectorFilter} [filter] - The filter to transform.
1157
2090
  * @returns {string | undefined} The transformed filter string, or undefined if no filter is provided.
1158
2091
  */
1159
2092
  transformFilter(filter) {
@@ -1178,31 +2111,60 @@ var UpstashVector = class extends vector.MastraVector {
1178
2111
  queryVector,
1179
2112
  topK = 10,
1180
2113
  filter,
1181
- includeVector = false
2114
+ includeVector = false,
2115
+ sparseVector,
2116
+ fusionAlgorithm,
2117
+ queryMode
1182
2118
  }) {
1183
- const ns = this.client.namespace(namespace);
1184
- const filterString = this.transformFilter(filter);
1185
- const results = await ns.query({
1186
- topK,
1187
- vector: queryVector,
1188
- includeVectors: includeVector,
1189
- includeMetadata: true,
1190
- ...filterString ? { filter: filterString } : {}
1191
- });
1192
- return (results || []).map((result) => ({
1193
- id: `${result.id}`,
1194
- score: result.score,
1195
- metadata: result.metadata,
1196
- ...includeVector && { vector: result.vector || [] }
1197
- }));
2119
+ try {
2120
+ const ns = this.client.namespace(namespace);
2121
+ const filterString = this.transformFilter(filter);
2122
+ const results = await ns.query({
2123
+ topK,
2124
+ vector: queryVector,
2125
+ ...sparseVector && { sparseVector },
2126
+ includeVectors: includeVector,
2127
+ includeMetadata: true,
2128
+ ...filterString ? { filter: filterString } : {},
2129
+ ...fusionAlgorithm && { fusionAlgorithm },
2130
+ ...queryMode && { queryMode }
2131
+ });
2132
+ return (results || []).map((result) => ({
2133
+ id: `${result.id}`,
2134
+ score: result.score,
2135
+ metadata: result.metadata,
2136
+ ...includeVector && { vector: result.vector || [] }
2137
+ }));
2138
+ } catch (error$1) {
2139
+ throw new error.MastraError(
2140
+ {
2141
+ id: "STORAGE_UPSTASH_VECTOR_QUERY_FAILED",
2142
+ domain: error.ErrorDomain.STORAGE,
2143
+ category: error.ErrorCategory.THIRD_PARTY,
2144
+ details: { namespace, topK }
2145
+ },
2146
+ error$1
2147
+ );
2148
+ }
1198
2149
  }
1199
2150
  /**
1200
2151
  * Lists all namespaces in the Upstash vector index, which correspond to indexes.
1201
2152
  * @returns {Promise<string[]>} A promise that resolves to a list of index names.
1202
2153
  */
1203
2154
  async listIndexes() {
1204
- const indexes = await this.client.listNamespaces();
1205
- return indexes.filter(Boolean);
2155
+ try {
2156
+ const indexes = await this.client.listNamespaces();
2157
+ return indexes.filter(Boolean);
2158
+ } catch (error$1) {
2159
+ throw new error.MastraError(
2160
+ {
2161
+ id: "STORAGE_UPSTASH_VECTOR_LIST_INDEXES_FAILED",
2162
+ domain: error.ErrorDomain.STORAGE,
2163
+ category: error.ErrorCategory.THIRD_PARTY
2164
+ },
2165
+ error$1
2166
+ );
2167
+ }
1206
2168
  }
1207
2169
  /**
1208
2170
  * Retrieves statistics about a vector index.
@@ -1211,12 +2173,24 @@ var UpstashVector = class extends vector.MastraVector {
1211
2173
  * @returns A promise that resolves to the index statistics including dimension, count and metric
1212
2174
  */
1213
2175
  async describeIndex({ indexName: namespace }) {
1214
- const info = await this.client.info();
1215
- return {
1216
- dimension: info.dimension,
1217
- count: info.namespaces?.[namespace]?.vectorCount || 0,
1218
- metric: info?.similarityFunction?.toLowerCase()
1219
- };
2176
+ try {
2177
+ const info = await this.client.info();
2178
+ return {
2179
+ dimension: info.dimension,
2180
+ count: info.namespaces?.[namespace]?.vectorCount || 0,
2181
+ metric: info?.similarityFunction?.toLowerCase()
2182
+ };
2183
+ } catch (error$1) {
2184
+ throw new error.MastraError(
2185
+ {
2186
+ id: "STORAGE_UPSTASH_VECTOR_DESCRIBE_INDEX_FAILED",
2187
+ domain: error.ErrorDomain.STORAGE,
2188
+ category: error.ErrorCategory.THIRD_PARTY,
2189
+ details: { namespace }
2190
+ },
2191
+ error$1
2192
+ );
2193
+ }
1220
2194
  }
1221
2195
  /**
1222
2196
  * Deletes an index (namespace).
@@ -1226,8 +2200,16 @@ var UpstashVector = class extends vector.MastraVector {
1226
2200
  async deleteIndex({ indexName: namespace }) {
1227
2201
  try {
1228
2202
  await this.client.deleteNamespace(namespace);
1229
- } catch (error) {
1230
- this.logger.error("Failed to delete namespace:", error);
2203
+ } catch (error$1) {
2204
+ throw new error.MastraError(
2205
+ {
2206
+ id: "STORAGE_UPSTASH_VECTOR_DELETE_INDEX_FAILED",
2207
+ domain: error.ErrorDomain.STORAGE,
2208
+ category: error.ErrorCategory.THIRD_PARTY,
2209
+ details: { namespace }
2210
+ },
2211
+ error$1
2212
+ );
1231
2213
  }
1232
2214
  }
1233
2215
  /**
@@ -1241,30 +2223,40 @@ var UpstashVector = class extends vector.MastraVector {
1241
2223
  * @throws Will throw an error if no updates are provided or if the update operation fails.
1242
2224
  */
1243
2225
  async updateVector({ indexName: namespace, id, update }) {
1244
- try {
1245
- if (!update.vector && !update.metadata) {
1246
- throw new Error("No update data provided");
1247
- }
1248
- if (!update.vector && update.metadata) {
1249
- throw new Error("Both vector and metadata must be provided for an update");
1250
- }
1251
- const updatePayload = { id };
1252
- if (update.vector) {
1253
- updatePayload.vector = update.vector;
1254
- }
1255
- if (update.metadata) {
1256
- updatePayload.metadata = update.metadata;
1257
- }
1258
- const points = {
1259
- id: updatePayload.id,
1260
- vector: updatePayload.vector,
1261
- metadata: updatePayload.metadata
1262
- };
1263
- await this.client.upsert(points, {
1264
- namespace
2226
+ if (!update.vector && !update.metadata && !update.sparseVector) {
2227
+ throw new error.MastraError({
2228
+ id: "STORAGE_UPSTASH_VECTOR_UPDATE_VECTOR_FAILED",
2229
+ domain: error.ErrorDomain.STORAGE,
2230
+ category: error.ErrorCategory.THIRD_PARTY,
2231
+ details: { namespace, id },
2232
+ text: "No update data provided"
1265
2233
  });
1266
- } catch (error) {
1267
- throw new Error(`Failed to update vector by id: ${id} for index name: ${namespace}: ${error.message}`);
2234
+ }
2235
+ if (!update.vector && !update.sparseVector && update.metadata) {
2236
+ throw new error.MastraError({
2237
+ id: "STORAGE_UPSTASH_VECTOR_UPDATE_VECTOR_FAILED",
2238
+ domain: error.ErrorDomain.STORAGE,
2239
+ category: error.ErrorCategory.THIRD_PARTY,
2240
+ details: { namespace, id },
2241
+ text: "Both vector and metadata must be provided for an update"
2242
+ });
2243
+ }
2244
+ try {
2245
+ const points = { id };
2246
+ if (update.vector) points.vector = update.vector;
2247
+ if (update.metadata) points.metadata = update.metadata;
2248
+ if (update.sparseVector) points.sparseVector = update.sparseVector;
2249
+ await this.client.upsert(points, { namespace });
2250
+ } catch (error$1) {
2251
+ throw new error.MastraError(
2252
+ {
2253
+ id: "STORAGE_UPSTASH_VECTOR_UPDATE_VECTOR_FAILED",
2254
+ domain: error.ErrorDomain.STORAGE,
2255
+ category: error.ErrorCategory.THIRD_PARTY,
2256
+ details: { namespace, id }
2257
+ },
2258
+ error$1
2259
+ );
1268
2260
  }
1269
2261
  }
1270
2262
  /**
@@ -1279,8 +2271,17 @@ var UpstashVector = class extends vector.MastraVector {
1279
2271
  await this.client.delete(id, {
1280
2272
  namespace
1281
2273
  });
1282
- } catch (error) {
1283
- this.logger.error(`Failed to delete vector by id: ${id} for namespace: ${namespace}:`, error);
2274
+ } catch (error$1) {
2275
+ const mastraError = new error.MastraError(
2276
+ {
2277
+ id: "STORAGE_UPSTASH_VECTOR_DELETE_VECTOR_FAILED",
2278
+ domain: error.ErrorDomain.STORAGE,
2279
+ category: error.ErrorCategory.THIRD_PARTY,
2280
+ details: { namespace, id }
2281
+ },
2282
+ error$1
2283
+ );
2284
+ this.logger?.error(mastraError.toString());
1284
2285
  }
1285
2286
  }
1286
2287
  };
@@ -1363,3 +2364,5 @@ Example Complex Query:
1363
2364
  exports.UPSTASH_PROMPT = UPSTASH_PROMPT;
1364
2365
  exports.UpstashStore = UpstashStore;
1365
2366
  exports.UpstashVector = UpstashVector;
2367
+ //# sourceMappingURL=index.cjs.map
2368
+ //# sourceMappingURL=index.cjs.map