@mastra/upstash 1.0.0-beta.1 → 1.0.0-beta.10

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.cjs CHANGED
@@ -5,19 +5,12 @@ var redis = require('@upstash/redis');
5
5
  var agent = require('@mastra/core/agent');
6
6
  var error = require('@mastra/core/error');
7
7
  var evals = require('@mastra/core/evals');
8
- var crypto = require('crypto');
8
+ var crypto$1 = require('crypto');
9
9
  var vector = require('@mastra/core/vector');
10
10
  var vector$1 = require('@upstash/vector');
11
11
  var filter = require('@mastra/core/vector/filter');
12
12
 
13
13
  // src/storage/index.ts
14
- function ensureDate(value) {
15
- if (!value) return null;
16
- if (value instanceof Date) return value;
17
- if (typeof value === "string") return new Date(value);
18
- if (typeof value === "number") return new Date(value);
19
- return null;
20
- }
21
14
  function getKey(tableName, keys) {
22
15
  const keyParts = Object.entries(keys).filter(([_, value]) => value !== void 0).map(([key, value]) => `${key}:${value}`);
23
16
  return `${tableName}:${keyParts.join(":")}`;
@@ -34,7 +27,7 @@ function processRecord(tableName, record) {
34
27
  ...record.resourceId ? { resourceId: record.resourceId } : {}
35
28
  });
36
29
  } else if (tableName === storage.TABLE_SCORERS) {
37
- key = getKey(tableName, { runId: record.runId });
30
+ key = getKey(tableName, { id: record.id });
38
31
  } else {
39
32
  key = getKey(tableName, { id: record.id });
40
33
  }
@@ -46,6 +39,107 @@ function processRecord(tableName, record) {
46
39
  return { key, processedRecord };
47
40
  }
48
41
 
42
+ // src/storage/db/index.ts
43
+ function resolveUpstashConfig(config) {
44
+ if ("client" in config) {
45
+ return config.client;
46
+ }
47
+ return new redis.Redis({
48
+ url: config.url,
49
+ token: config.token
50
+ });
51
+ }
52
+ var UpstashDB = class {
53
+ client;
54
+ constructor({ client }) {
55
+ this.client = client;
56
+ }
57
+ async insert({ tableName, record }) {
58
+ const { key, processedRecord } = processRecord(tableName, record);
59
+ try {
60
+ await this.client.set(key, processedRecord);
61
+ } catch (error$1) {
62
+ throw new error.MastraError(
63
+ {
64
+ id: storage.createStorageErrorId("UPSTASH", "INSERT", "FAILED"),
65
+ domain: error.ErrorDomain.STORAGE,
66
+ category: error.ErrorCategory.THIRD_PARTY,
67
+ details: {
68
+ tableName
69
+ }
70
+ },
71
+ error$1
72
+ );
73
+ }
74
+ }
75
+ async get({ tableName, keys }) {
76
+ const key = getKey(tableName, keys);
77
+ try {
78
+ const data = await this.client.get(key);
79
+ return data || null;
80
+ } catch (error$1) {
81
+ throw new error.MastraError(
82
+ {
83
+ id: storage.createStorageErrorId("UPSTASH", "LOAD", "FAILED"),
84
+ domain: error.ErrorDomain.STORAGE,
85
+ category: error.ErrorCategory.THIRD_PARTY,
86
+ details: {
87
+ tableName
88
+ }
89
+ },
90
+ error$1
91
+ );
92
+ }
93
+ }
94
+ async scanAndDelete(pattern, batchSize = 1e4) {
95
+ let cursor = "0";
96
+ let totalDeleted = 0;
97
+ do {
98
+ const [nextCursor, keys] = await this.client.scan(cursor, {
99
+ match: pattern,
100
+ count: batchSize
101
+ });
102
+ if (keys.length > 0) {
103
+ await this.client.del(...keys);
104
+ totalDeleted += keys.length;
105
+ }
106
+ cursor = nextCursor;
107
+ } while (cursor !== "0");
108
+ return totalDeleted;
109
+ }
110
+ async scanKeys(pattern, batchSize = 1e4) {
111
+ let cursor = "0";
112
+ let keys = [];
113
+ do {
114
+ const [nextCursor, batch] = await this.client.scan(cursor, {
115
+ match: pattern,
116
+ count: batchSize
117
+ });
118
+ keys.push(...batch);
119
+ cursor = nextCursor;
120
+ } while (cursor !== "0");
121
+ return keys;
122
+ }
123
+ async deleteData({ tableName }) {
124
+ const pattern = `${tableName}:*`;
125
+ try {
126
+ await this.scanAndDelete(pattern);
127
+ } catch (error$1) {
128
+ throw new error.MastraError(
129
+ {
130
+ id: storage.createStorageErrorId("UPSTASH", "CLEAR_TABLE", "FAILED"),
131
+ domain: error.ErrorDomain.STORAGE,
132
+ category: error.ErrorCategory.THIRD_PARTY,
133
+ details: {
134
+ tableName
135
+ }
136
+ },
137
+ error$1
138
+ );
139
+ }
140
+ }
141
+ };
142
+
49
143
  // src/storage/domains/memory/index.ts
50
144
  function getThreadMessagesKey(threadId) {
51
145
  return `thread:${threadId}:messages`;
@@ -54,31 +148,40 @@ function getMessageKey(threadId, messageId) {
54
148
  const key = getKey(storage.TABLE_MESSAGES, { threadId, id: messageId });
55
149
  return key;
56
150
  }
151
+ function getMessageIndexKey(messageId) {
152
+ return `msg-idx:${messageId}`;
153
+ }
57
154
  var StoreMemoryUpstash = class extends storage.MemoryStorage {
58
155
  client;
59
- operations;
60
- constructor({ client, operations }) {
156
+ #db;
157
+ constructor(config) {
61
158
  super();
159
+ const client = resolveUpstashConfig(config);
62
160
  this.client = client;
63
- this.operations = operations;
161
+ this.#db = new UpstashDB({ client });
162
+ }
163
+ async dangerouslyClearAll() {
164
+ await this.#db.deleteData({ tableName: storage.TABLE_THREADS });
165
+ await this.#db.deleteData({ tableName: storage.TABLE_MESSAGES });
166
+ await this.#db.deleteData({ tableName: storage.TABLE_RESOURCES });
64
167
  }
65
168
  async getThreadById({ threadId }) {
66
169
  try {
67
- const thread = await this.operations.load({
170
+ const thread = await this.#db.get({
68
171
  tableName: storage.TABLE_THREADS,
69
172
  keys: { id: threadId }
70
173
  });
71
174
  if (!thread) return null;
72
175
  return {
73
176
  ...thread,
74
- createdAt: ensureDate(thread.createdAt),
75
- updatedAt: ensureDate(thread.updatedAt),
177
+ createdAt: storage.ensureDate(thread.createdAt),
178
+ updatedAt: storage.ensureDate(thread.updatedAt),
76
179
  metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
77
180
  };
78
181
  } catch (error$1) {
79
182
  throw new error.MastraError(
80
183
  {
81
- id: "STORAGE_UPSTASH_STORAGE_GET_THREAD_BY_ID_FAILED",
184
+ id: storage.createStorageErrorId("UPSTASH", "GET_THREAD_BY_ID", "FAILED"),
82
185
  domain: error.ErrorDomain.STORAGE,
83
186
  category: error.ErrorCategory.THIRD_PARTY,
84
187
  details: {
@@ -96,7 +199,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
96
199
  if (page < 0) {
97
200
  throw new error.MastraError(
98
201
  {
99
- id: "STORAGE_UPSTASH_LIST_THREADS_BY_RESOURCE_ID_INVALID_PAGE",
202
+ id: storage.createStorageErrorId("UPSTASH", "LIST_THREADS_BY_RESOURCE_ID", "INVALID_PAGE"),
100
203
  domain: error.ErrorDomain.STORAGE,
101
204
  category: error.ErrorCategory.USER,
102
205
  details: { page }
@@ -108,7 +211,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
108
211
  try {
109
212
  let allThreads = [];
110
213
  const pattern = `${storage.TABLE_THREADS}:*`;
111
- const keys = await this.operations.scanKeys(pattern);
214
+ const keys = await this.#db.scanKeys(pattern);
112
215
  const pipeline = this.client.pipeline();
113
216
  keys.forEach((key) => pipeline.get(key));
114
217
  const results = await pipeline.exec();
@@ -117,8 +220,8 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
117
220
  if (thread && thread.resourceId === resourceId) {
118
221
  allThreads.push({
119
222
  ...thread,
120
- createdAt: ensureDate(thread.createdAt),
121
- updatedAt: ensureDate(thread.updatedAt),
223
+ createdAt: storage.ensureDate(thread.createdAt),
224
+ updatedAt: storage.ensureDate(thread.updatedAt),
122
225
  metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
123
226
  });
124
227
  }
@@ -138,7 +241,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
138
241
  } catch (error$1) {
139
242
  const mastraError = new error.MastraError(
140
243
  {
141
- id: "STORAGE_UPSTASH_STORAGE_LIST_THREADS_BY_RESOURCE_ID_FAILED",
244
+ id: storage.createStorageErrorId("UPSTASH", "LIST_THREADS_BY_RESOURCE_ID", "FAILED"),
142
245
  domain: error.ErrorDomain.STORAGE,
143
246
  category: error.ErrorCategory.THIRD_PARTY,
144
247
  details: {
@@ -162,7 +265,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
162
265
  }
163
266
  async saveThread({ thread }) {
164
267
  try {
165
- await this.operations.insert({
268
+ await this.#db.insert({
166
269
  tableName: storage.TABLE_THREADS,
167
270
  record: thread
168
271
  });
@@ -170,7 +273,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
170
273
  } catch (error$1) {
171
274
  const mastraError = new error.MastraError(
172
275
  {
173
- id: "STORAGE_UPSTASH_STORAGE_SAVE_THREAD_FAILED",
276
+ id: storage.createStorageErrorId("UPSTASH", "SAVE_THREAD", "FAILED"),
174
277
  domain: error.ErrorDomain.STORAGE,
175
278
  category: error.ErrorCategory.THIRD_PARTY,
176
279
  details: {
@@ -192,7 +295,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
192
295
  const thread = await this.getThreadById({ threadId: id });
193
296
  if (!thread) {
194
297
  throw new error.MastraError({
195
- id: "STORAGE_UPSTASH_STORAGE_UPDATE_THREAD_FAILED",
298
+ id: storage.createStorageErrorId("UPSTASH", "UPDATE_THREAD", "FAILED"),
196
299
  domain: error.ErrorDomain.STORAGE,
197
300
  category: error.ErrorCategory.USER,
198
301
  text: `Thread ${id} not found`,
@@ -215,7 +318,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
215
318
  } catch (error$1) {
216
319
  throw new error.MastraError(
217
320
  {
218
- id: "STORAGE_UPSTASH_STORAGE_UPDATE_THREAD_FAILED",
321
+ id: storage.createStorageErrorId("UPSTASH", "UPDATE_THREAD", "FAILED"),
219
322
  domain: error.ErrorDomain.STORAGE,
220
323
  category: error.ErrorCategory.THIRD_PARTY,
221
324
  details: {
@@ -240,11 +343,11 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
240
343
  pipeline.del(messageKey);
241
344
  }
242
345
  await pipeline.exec();
243
- await this.operations.scanAndDelete(getMessageKey(threadId, "*"));
346
+ await this.#db.scanAndDelete(getMessageKey(threadId, "*"));
244
347
  } catch (error$1) {
245
348
  throw new error.MastraError(
246
349
  {
247
- id: "STORAGE_UPSTASH_STORAGE_DELETE_THREAD_FAILED",
350
+ id: storage.createStorageErrorId("UPSTASH", "DELETE_THREAD", "FAILED"),
248
351
  domain: error.ErrorDomain.STORAGE,
249
352
  category: error.ErrorCategory.THIRD_PARTY,
250
353
  details: {
@@ -270,7 +373,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
270
373
  } catch (error$1) {
271
374
  throw new error.MastraError(
272
375
  {
273
- id: "STORAGE_UPSTASH_STORAGE_SAVE_MESSAGES_INVALID_ARGS",
376
+ id: storage.createStorageErrorId("UPSTASH", "SAVE_MESSAGES", "INVALID_ARGS"),
274
377
  domain: error.ErrorDomain.STORAGE,
275
378
  category: error.ErrorCategory.USER
276
379
  },
@@ -305,7 +408,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
305
408
  const createdAtScore = new Date(message.createdAt).getTime();
306
409
  const score = message._index !== void 0 ? message._index : createdAtScore;
307
410
  const existingKeyPattern = getMessageKey("*", message.id);
308
- const keys = await this.operations.scanKeys(existingKeyPattern);
411
+ const keys = await this.#db.scanKeys(existingKeyPattern);
309
412
  if (keys.length > 0) {
310
413
  const pipeline2 = this.client.pipeline();
311
414
  keys.forEach((key2) => pipeline2.get(key2));
@@ -320,6 +423,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
320
423
  }
321
424
  }
322
425
  pipeline.set(key, message);
426
+ pipeline.set(getMessageIndexKey(message.id), message.threadId);
323
427
  pipeline.zadd(getThreadMessagesKey(message.threadId), {
324
428
  score,
325
429
  member: message.id
@@ -339,7 +443,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
339
443
  } catch (error$1) {
340
444
  throw new error.MastraError(
341
445
  {
342
- id: "STORAGE_UPSTASH_STORAGE_SAVE_MESSAGES_FAILED",
446
+ id: storage.createStorageErrorId("UPSTASH", "SAVE_MESSAGES", "FAILED"),
343
447
  domain: error.ErrorDomain.STORAGE,
344
448
  category: error.ErrorCategory.THIRD_PARTY,
345
449
  details: {
@@ -350,43 +454,60 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
350
454
  );
351
455
  }
352
456
  }
353
- async _getIncludedMessages(threadId, include) {
354
- if (!threadId.trim()) throw new Error("threadId must be a non-empty string");
457
+ /**
458
+ * Lookup threadId for a message - tries index first (O(1)), falls back to scan (backwards compatible)
459
+ */
460
+ async _getThreadIdForMessage(messageId) {
461
+ const indexedThreadId = await this.client.get(getMessageIndexKey(messageId));
462
+ if (indexedThreadId) {
463
+ return indexedThreadId;
464
+ }
465
+ const existingKeyPattern = getMessageKey("*", messageId);
466
+ const keys = await this.#db.scanKeys(existingKeyPattern);
467
+ if (keys.length === 0) return null;
468
+ const messageData = await this.client.get(keys[0]);
469
+ if (!messageData) return null;
470
+ if (messageData.threadId) {
471
+ await this.client.set(getMessageIndexKey(messageId), messageData.threadId);
472
+ }
473
+ return messageData.threadId || null;
474
+ }
475
+ async _getIncludedMessages(include) {
476
+ if (!include?.length) return [];
355
477
  const messageIds = /* @__PURE__ */ new Set();
356
478
  const messageIdToThreadIds = {};
357
- if (include?.length) {
358
- for (const item of include) {
359
- messageIds.add(item.id);
360
- const itemThreadId = item.threadId || threadId;
361
- messageIdToThreadIds[item.id] = itemThreadId;
362
- const itemThreadMessagesKey = getThreadMessagesKey(itemThreadId);
363
- const rank = await this.client.zrank(itemThreadMessagesKey, item.id);
364
- if (rank === null) continue;
365
- if (item.withPreviousMessages) {
366
- const start = Math.max(0, rank - item.withPreviousMessages);
367
- const prevIds = rank === 0 ? [] : await this.client.zrange(itemThreadMessagesKey, start, rank - 1);
368
- prevIds.forEach((id) => {
369
- messageIds.add(id);
370
- messageIdToThreadIds[id] = itemThreadId;
371
- });
372
- }
373
- if (item.withNextMessages) {
374
- const nextIds = await this.client.zrange(itemThreadMessagesKey, rank + 1, rank + item.withNextMessages);
375
- nextIds.forEach((id) => {
376
- messageIds.add(id);
377
- messageIdToThreadIds[id] = itemThreadId;
378
- });
379
- }
479
+ for (const item of include) {
480
+ const itemThreadId = await this._getThreadIdForMessage(item.id);
481
+ if (!itemThreadId) continue;
482
+ messageIds.add(item.id);
483
+ messageIdToThreadIds[item.id] = itemThreadId;
484
+ const itemThreadMessagesKey = getThreadMessagesKey(itemThreadId);
485
+ const rank = await this.client.zrank(itemThreadMessagesKey, item.id);
486
+ if (rank === null) continue;
487
+ if (item.withPreviousMessages) {
488
+ const start = Math.max(0, rank - item.withPreviousMessages);
489
+ const prevIds = rank === 0 ? [] : await this.client.zrange(itemThreadMessagesKey, start, rank - 1);
490
+ prevIds.forEach((id) => {
491
+ messageIds.add(id);
492
+ messageIdToThreadIds[id] = itemThreadId;
493
+ });
494
+ }
495
+ if (item.withNextMessages) {
496
+ const nextIds = await this.client.zrange(itemThreadMessagesKey, rank + 1, rank + item.withNextMessages);
497
+ nextIds.forEach((id) => {
498
+ messageIds.add(id);
499
+ messageIdToThreadIds[id] = itemThreadId;
500
+ });
380
501
  }
381
- const pipeline = this.client.pipeline();
382
- Array.from(messageIds).forEach((id) => {
383
- const tId = messageIdToThreadIds[id] || threadId;
384
- pipeline.get(getMessageKey(tId, id));
385
- });
386
- const results = await pipeline.exec();
387
- return results.filter((result) => result !== null);
388
502
  }
389
- return [];
503
+ if (messageIds.size === 0) return [];
504
+ const pipeline = this.client.pipeline();
505
+ Array.from(messageIds).forEach((id) => {
506
+ const tId = messageIdToThreadIds[id];
507
+ pipeline.get(getMessageKey(tId, id));
508
+ });
509
+ const results = await pipeline.exec();
510
+ return results.filter((result) => result !== null);
390
511
  }
391
512
  parseStoredMessage(storedMessage) {
392
513
  const defaultMessageContent = { format: 2, parts: [{ type: "text", text: "" }] };
@@ -400,23 +521,55 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
400
521
  async listMessagesById({ messageIds }) {
401
522
  if (messageIds.length === 0) return { messages: [] };
402
523
  try {
403
- const threadKeys = await this.client.keys("thread:*");
404
- const result = await Promise.all(
405
- threadKeys.map((threadKey) => {
406
- const threadId = threadKey.split(":")[1];
407
- if (!threadId) throw new Error(`Failed to parse thread ID from thread key "${threadKey}"`);
408
- return this.client.mget(
409
- messageIds.map((id) => getMessageKey(threadId, id))
410
- );
411
- })
412
- );
413
- const rawMessages = result.flat(1).filter((msg) => !!msg);
524
+ const rawMessages = [];
525
+ const indexPipeline = this.client.pipeline();
526
+ messageIds.forEach((id) => indexPipeline.get(getMessageIndexKey(id)));
527
+ const indexResults = await indexPipeline.exec();
528
+ const indexedIds = [];
529
+ const unindexedIds = [];
530
+ messageIds.forEach((id, i) => {
531
+ const threadId = indexResults[i];
532
+ if (threadId) {
533
+ indexedIds.push({ messageId: id, threadId });
534
+ } else {
535
+ unindexedIds.push(id);
536
+ }
537
+ });
538
+ if (indexedIds.length > 0) {
539
+ const messagePipeline = this.client.pipeline();
540
+ indexedIds.forEach(({ messageId, threadId }) => messagePipeline.get(getMessageKey(threadId, messageId)));
541
+ const messageResults = await messagePipeline.exec();
542
+ rawMessages.push(...messageResults.filter((msg) => msg !== null));
543
+ }
544
+ if (unindexedIds.length > 0) {
545
+ const threadKeys = await this.client.keys("thread:*");
546
+ const result = await Promise.all(
547
+ threadKeys.map((threadKey) => {
548
+ const threadId = threadKey.split(":")[1];
549
+ if (!threadId) throw new Error(`Failed to parse thread ID from thread key "${threadKey}"`);
550
+ return this.client.mget(
551
+ unindexedIds.map((id) => getMessageKey(threadId, id))
552
+ );
553
+ })
554
+ );
555
+ const foundMessages = result.flat(1).filter((msg) => !!msg);
556
+ rawMessages.push(...foundMessages);
557
+ if (foundMessages.length > 0) {
558
+ const backfillPipeline = this.client.pipeline();
559
+ foundMessages.forEach((msg) => {
560
+ if (msg.threadId) {
561
+ backfillPipeline.set(getMessageIndexKey(msg.id), msg.threadId);
562
+ }
563
+ });
564
+ await backfillPipeline.exec();
565
+ }
566
+ }
414
567
  const list = new agent.MessageList().add(rawMessages.map(this.parseStoredMessage), "memory");
415
568
  return { messages: list.get.all.db() };
416
569
  } catch (error$1) {
417
570
  throw new error.MastraError(
418
571
  {
419
- id: "STORAGE_UPSTASH_STORAGE_LIST_MESSAGES_BY_ID_FAILED",
572
+ id: storage.createStorageErrorId("UPSTASH", "LIST_MESSAGES_BY_ID", "FAILED"),
420
573
  domain: error.ErrorDomain.STORAGE,
421
574
  category: error.ErrorCategory.THIRD_PARTY,
422
575
  details: {
@@ -429,25 +582,25 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
429
582
  }
430
583
  async listMessages(args) {
431
584
  const { threadId, resourceId, include, filter, perPage: perPageInput, page = 0, orderBy } = args;
432
- if (!threadId.trim()) {
585
+ const threadIds = Array.isArray(threadId) ? threadId : [threadId];
586
+ if (threadIds.length === 0 || threadIds.some((id) => !id.trim())) {
433
587
  throw new error.MastraError(
434
588
  {
435
- id: "STORAGE_UPSTASH_LIST_MESSAGES_INVALID_THREAD_ID",
589
+ id: storage.createStorageErrorId("UPSTASH", "LIST_MESSAGES", "INVALID_THREAD_ID"),
436
590
  domain: error.ErrorDomain.STORAGE,
437
591
  category: error.ErrorCategory.THIRD_PARTY,
438
- details: { threadId }
592
+ details: { threadId: Array.isArray(threadId) ? threadId.join(",") : threadId }
439
593
  },
440
- new Error("threadId must be a non-empty string")
594
+ new Error("threadId must be a non-empty string or array of non-empty strings")
441
595
  );
442
596
  }
443
- const threadMessagesKey = getThreadMessagesKey(threadId);
444
597
  const perPage = storage.normalizePerPage(perPageInput, 40);
445
598
  const { offset, perPage: perPageForResponse } = storage.calculatePagination(page, perPageInput, perPage);
446
599
  try {
447
600
  if (page < 0) {
448
601
  throw new error.MastraError(
449
602
  {
450
- id: "STORAGE_UPSTASH_LIST_MESSAGES_INVALID_PAGE",
603
+ id: storage.createStorageErrorId("UPSTASH", "LIST_MESSAGES", "INVALID_PAGE"),
451
604
  domain: error.ErrorDomain.STORAGE,
452
605
  category: error.ErrorCategory.USER,
453
606
  details: { page }
@@ -457,11 +610,18 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
457
610
  }
458
611
  let includedMessages = [];
459
612
  if (include && include.length > 0) {
460
- const included = await this._getIncludedMessages(threadId, include);
613
+ const included = await this._getIncludedMessages(include);
461
614
  includedMessages = included.map(this.parseStoredMessage);
462
615
  }
463
- const allMessageIds = await this.client.zrange(threadMessagesKey, 0, -1);
464
- if (allMessageIds.length === 0) {
616
+ const allMessageIdsWithThreads = [];
617
+ for (const tid of threadIds) {
618
+ const threadMessagesKey = getThreadMessagesKey(tid);
619
+ const messageIds2 = await this.client.zrange(threadMessagesKey, 0, -1);
620
+ for (const mid of messageIds2) {
621
+ allMessageIdsWithThreads.push({ threadId: tid, messageId: mid });
622
+ }
623
+ }
624
+ if (allMessageIdsWithThreads.length === 0) {
465
625
  return {
466
626
  messages: [],
467
627
  total: 0,
@@ -471,21 +631,17 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
471
631
  };
472
632
  }
473
633
  const pipeline = this.client.pipeline();
474
- allMessageIds.forEach((id) => pipeline.get(getMessageKey(threadId, id)));
634
+ allMessageIdsWithThreads.forEach(({ threadId: tid, messageId }) => pipeline.get(getMessageKey(tid, messageId)));
475
635
  const results = await pipeline.exec();
476
636
  let messagesData = results.filter((msg) => msg !== null).map(this.parseStoredMessage);
477
637
  if (resourceId) {
478
638
  messagesData = messagesData.filter((msg) => msg.resourceId === resourceId);
479
639
  }
480
- const dateRange = filter?.dateRange;
481
- if (dateRange?.start) {
482
- const fromDate = dateRange.start;
483
- messagesData = messagesData.filter((msg) => new Date(msg.createdAt).getTime() >= fromDate.getTime());
484
- }
485
- if (dateRange?.end) {
486
- const toDate = dateRange.end;
487
- messagesData = messagesData.filter((msg) => new Date(msg.createdAt).getTime() <= toDate.getTime());
488
- }
640
+ messagesData = storage.filterByDateRange(
641
+ messagesData,
642
+ (msg) => new Date(msg.createdAt),
643
+ filter?.dateRange
644
+ );
489
645
  const { field, direction } = this.parseOrderBy(orderBy, "ASC");
490
646
  const getFieldValue = (msg) => {
491
647
  if (field === "createdAt") {
@@ -500,13 +656,11 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
500
656
  }
501
657
  return 0;
502
658
  };
503
- if (orderBy) {
504
- messagesData.sort((a, b) => {
505
- const aValue = getFieldValue(a);
506
- const bValue = getFieldValue(b);
507
- return direction === "ASC" ? aValue - bValue : bValue - aValue;
508
- });
509
- }
659
+ messagesData.sort((a, b) => {
660
+ const aValue = getFieldValue(a);
661
+ const bValue = getFieldValue(b);
662
+ return direction === "ASC" ? aValue - bValue : bValue - aValue;
663
+ });
510
664
  const total = messagesData.length;
511
665
  const start = offset;
512
666
  const end = perPageInput === false ? total : start + perPage;
@@ -527,23 +681,11 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
527
681
  }
528
682
  const list = new agent.MessageList().add(allMessages, "memory");
529
683
  let finalMessages = list.get.all.db();
530
- if (orderBy) {
531
- finalMessages = finalMessages.sort((a, b) => {
532
- const aValue = getFieldValue(a);
533
- const bValue = getFieldValue(b);
534
- return direction === "ASC" ? aValue - bValue : bValue - aValue;
535
- });
536
- } else {
537
- const messageIdToPosition = /* @__PURE__ */ new Map();
538
- allMessageIds.forEach((id, index) => {
539
- messageIdToPosition.set(id, index);
540
- });
541
- finalMessages = finalMessages.sort((a, b) => {
542
- const aPos = messageIdToPosition.get(a.id) ?? Number.MAX_SAFE_INTEGER;
543
- const bPos = messageIdToPosition.get(b.id) ?? Number.MAX_SAFE_INTEGER;
544
- return aPos - bPos;
545
- });
546
- }
684
+ finalMessages = finalMessages.sort((a, b) => {
685
+ const aValue = getFieldValue(a);
686
+ const bValue = getFieldValue(b);
687
+ return direction === "ASC" ? aValue - bValue : bValue - aValue;
688
+ });
547
689
  const returnedThreadMessageIds = new Set(finalMessages.filter((m) => m.threadId === threadId).map((m) => m.id));
548
690
  const allThreadMessagesReturned = returnedThreadMessageIds.size >= total;
549
691
  const hasMore = perPageInput !== false && !allThreadMessagesReturned && end < total;
@@ -557,11 +699,11 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
557
699
  } catch (error$1) {
558
700
  const mastraError = new error.MastraError(
559
701
  {
560
- id: "STORAGE_UPSTASH_STORAGE_LIST_MESSAGES_FAILED",
702
+ id: storage.createStorageErrorId("UPSTASH", "LIST_MESSAGES", "FAILED"),
561
703
  domain: error.ErrorDomain.STORAGE,
562
704
  category: error.ErrorCategory.THIRD_PARTY,
563
705
  details: {
564
- threadId,
706
+ threadId: Array.isArray(threadId) ? threadId.join(",") : threadId,
565
707
  resourceId: resourceId ?? ""
566
708
  }
567
709
  },
@@ -658,7 +800,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
658
800
  const messageIdToKey = {};
659
801
  for (const messageId of messageIds) {
660
802
  const pattern = getMessageKey("*", messageId);
661
- const keys = await this.operations.scanKeys(pattern);
803
+ const keys = await this.#db.scanKeys(pattern);
662
804
  for (const key of keys) {
663
805
  const message = await this.client.get(key);
664
806
  if (message && message.id === messageId) {
@@ -748,7 +890,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
748
890
  } catch (error$1) {
749
891
  throw new error.MastraError(
750
892
  {
751
- id: "STORAGE_UPSTASH_STORAGE_UPDATE_MESSAGES_FAILED",
893
+ id: storage.createStorageErrorId("UPSTASH", "UPDATE_MESSAGES", "FAILED"),
752
894
  domain: error.ErrorDomain.STORAGE,
753
895
  category: error.ErrorCategory.THIRD_PARTY,
754
896
  details: {
@@ -766,13 +908,33 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
766
908
  try {
767
909
  const threadIds = /* @__PURE__ */ new Set();
768
910
  const messageKeys = [];
769
- for (const messageId of messageIds) {
911
+ const foundMessageIds = [];
912
+ const indexPipeline = this.client.pipeline();
913
+ messageIds.forEach((id) => indexPipeline.get(getMessageIndexKey(id)));
914
+ const indexResults = await indexPipeline.exec();
915
+ const indexedMessages = [];
916
+ const unindexedMessageIds = [];
917
+ messageIds.forEach((id, i) => {
918
+ const threadId = indexResults[i];
919
+ if (threadId) {
920
+ indexedMessages.push({ messageId: id, threadId });
921
+ } else {
922
+ unindexedMessageIds.push(id);
923
+ }
924
+ });
925
+ for (const { messageId, threadId } of indexedMessages) {
926
+ messageKeys.push(getMessageKey(threadId, messageId));
927
+ foundMessageIds.push(messageId);
928
+ threadIds.add(threadId);
929
+ }
930
+ for (const messageId of unindexedMessageIds) {
770
931
  const pattern = getMessageKey("*", messageId);
771
- const keys = await this.operations.scanKeys(pattern);
932
+ const keys = await this.#db.scanKeys(pattern);
772
933
  for (const key of keys) {
773
934
  const message = await this.client.get(key);
774
935
  if (message && message.id === messageId) {
775
936
  messageKeys.push(key);
937
+ foundMessageIds.push(messageId);
776
938
  if (message.threadId) {
777
939
  threadIds.add(message.threadId);
778
940
  }
@@ -787,6 +949,9 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
787
949
  for (const key of messageKeys) {
788
950
  pipeline.del(key);
789
951
  }
952
+ for (const messageId of foundMessageIds) {
953
+ pipeline.del(getMessageIndexKey(messageId));
954
+ }
790
955
  if (threadIds.size > 0) {
791
956
  for (const threadId of threadIds) {
792
957
  const threadKey = getKey(storage.TABLE_THREADS, { id: threadId });
@@ -804,7 +969,7 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
804
969
  } catch (error$1) {
805
970
  throw new error.MastraError(
806
971
  {
807
- id: "STORAGE_UPSTASH_DELETE_MESSAGES_FAILED",
972
+ id: storage.createStorageErrorId("UPSTASH", "DELETE_MESSAGES", "FAILED"),
808
973
  domain: error.ErrorDomain.STORAGE,
809
974
  category: error.ErrorCategory.THIRD_PARTY,
810
975
  details: { messageIds: messageIds.join(", ") }
@@ -824,182 +989,137 @@ var StoreMemoryUpstash = class extends storage.MemoryStorage {
824
989
  }
825
990
  });
826
991
  }
827
- };
828
- var StoreOperationsUpstash = class extends storage.StoreOperations {
829
- client;
830
- constructor({ client }) {
831
- super();
832
- this.client = client;
833
- }
834
- async createTable({
835
- tableName: _tableName,
836
- schema: _schema
837
- }) {
838
- }
839
- async alterTable({
840
- tableName: _tableName,
841
- schema: _schema,
842
- ifNotExists: _ifNotExists
843
- }) {
844
- }
845
- async clearTable({ tableName }) {
846
- const pattern = `${tableName}:*`;
847
- try {
848
- await this.scanAndDelete(pattern);
849
- } catch (error$1) {
850
- throw new error.MastraError(
851
- {
852
- id: "STORAGE_UPSTASH_STORAGE_CLEAR_TABLE_FAILED",
853
- domain: error.ErrorDomain.STORAGE,
854
- category: error.ErrorCategory.THIRD_PARTY,
855
- details: {
856
- tableName
857
- }
858
- },
859
- error$1
860
- );
992
+ async cloneThread(args) {
993
+ const { sourceThreadId, newThreadId: providedThreadId, resourceId, title, metadata, options } = args;
994
+ const sourceThread = await this.getThreadById({ threadId: sourceThreadId });
995
+ if (!sourceThread) {
996
+ throw new error.MastraError({
997
+ id: storage.createStorageErrorId("UPSTASH", "CLONE_THREAD", "SOURCE_NOT_FOUND"),
998
+ domain: error.ErrorDomain.STORAGE,
999
+ category: error.ErrorCategory.USER,
1000
+ text: `Source thread with id ${sourceThreadId} not found`,
1001
+ details: { sourceThreadId }
1002
+ });
861
1003
  }
862
- }
863
- async dropTable({ tableName }) {
864
- return this.clearTable({ tableName });
865
- }
866
- async insert({ tableName, record }) {
867
- const { key, processedRecord } = processRecord(tableName, record);
868
- try {
869
- await this.client.set(key, processedRecord);
870
- } catch (error$1) {
871
- throw new error.MastraError(
872
- {
873
- id: "STORAGE_UPSTASH_STORAGE_INSERT_FAILED",
874
- domain: error.ErrorDomain.STORAGE,
875
- category: error.ErrorCategory.THIRD_PARTY,
876
- details: {
877
- tableName
878
- }
879
- },
880
- error$1
881
- );
1004
+ const newThreadId = providedThreadId || crypto.randomUUID();
1005
+ const existingThread = await this.getThreadById({ threadId: newThreadId });
1006
+ if (existingThread) {
1007
+ throw new error.MastraError({
1008
+ id: storage.createStorageErrorId("UPSTASH", "CLONE_THREAD", "THREAD_EXISTS"),
1009
+ domain: error.ErrorDomain.STORAGE,
1010
+ category: error.ErrorCategory.USER,
1011
+ text: `Thread with id ${newThreadId} already exists`,
1012
+ details: { newThreadId }
1013
+ });
882
1014
  }
883
- }
884
- async batchInsert(input) {
885
- const { tableName, records } = input;
886
- if (!records.length) return;
887
- const batchSize = 1e3;
888
1015
  try {
889
- for (let i = 0; i < records.length; i += batchSize) {
890
- const batch = records.slice(i, i + batchSize);
891
- const pipeline = this.client.pipeline();
892
- for (const record of batch) {
893
- const { key, processedRecord } = processRecord(tableName, record);
894
- pipeline.set(key, processedRecord);
895
- }
896
- await pipeline.exec();
1016
+ const threadMessagesKey = getThreadMessagesKey(sourceThreadId);
1017
+ const messageIds = await this.client.zrange(threadMessagesKey, 0, -1);
1018
+ const pipeline = this.client.pipeline();
1019
+ for (const mid of messageIds) {
1020
+ pipeline.get(getMessageKey(sourceThreadId, mid));
897
1021
  }
898
- } catch (error$1) {
899
- throw new error.MastraError(
900
- {
901
- id: "STORAGE_UPSTASH_STORAGE_BATCH_INSERT_FAILED",
902
- domain: error.ErrorDomain.STORAGE,
903
- category: error.ErrorCategory.THIRD_PARTY,
904
- details: {
905
- tableName
906
- }
1022
+ const results = await pipeline.exec();
1023
+ let sourceMessages = results.filter((msg) => msg !== null).map((msg) => ({
1024
+ ...msg,
1025
+ createdAt: new Date(msg.createdAt)
1026
+ }));
1027
+ if (options?.messageFilter?.startDate || options?.messageFilter?.endDate) {
1028
+ sourceMessages = storage.filterByDateRange(sourceMessages, (msg) => new Date(msg.createdAt), {
1029
+ start: options.messageFilter?.startDate,
1030
+ end: options.messageFilter?.endDate
1031
+ });
1032
+ }
1033
+ if (options?.messageFilter?.messageIds && options.messageFilter.messageIds.length > 0) {
1034
+ const messageIdSet = new Set(options.messageFilter.messageIds);
1035
+ sourceMessages = sourceMessages.filter((msg) => messageIdSet.has(msg.id));
1036
+ }
1037
+ sourceMessages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
1038
+ if (options?.messageLimit && options.messageLimit > 0 && sourceMessages.length > options.messageLimit) {
1039
+ sourceMessages = sourceMessages.slice(-options.messageLimit);
1040
+ }
1041
+ const now = /* @__PURE__ */ new Date();
1042
+ const lastMessageId = sourceMessages.length > 0 ? sourceMessages[sourceMessages.length - 1].id : void 0;
1043
+ const cloneMetadata = {
1044
+ sourceThreadId,
1045
+ clonedAt: now,
1046
+ ...lastMessageId && { lastMessageId }
1047
+ };
1048
+ const newThread = {
1049
+ id: newThreadId,
1050
+ resourceId: resourceId || sourceThread.resourceId,
1051
+ title: title || (sourceThread.title ? `Clone of ${sourceThread.title}` : void 0),
1052
+ metadata: {
1053
+ ...metadata,
1054
+ clone: cloneMetadata
907
1055
  },
908
- error$1
909
- );
910
- }
911
- }
912
- async load({ tableName, keys }) {
913
- const key = getKey(tableName, keys);
914
- try {
915
- const data = await this.client.get(key);
916
- return data || null;
1056
+ createdAt: now,
1057
+ updatedAt: now
1058
+ };
1059
+ const writePipeline = this.client.pipeline();
1060
+ const threadKey = getKey(storage.TABLE_THREADS, { id: newThreadId });
1061
+ writePipeline.set(threadKey, processRecord(storage.TABLE_THREADS, newThread).processedRecord);
1062
+ const clonedMessages = [];
1063
+ const targetResourceId = resourceId || sourceThread.resourceId;
1064
+ const newThreadMessagesKey = getThreadMessagesKey(newThreadId);
1065
+ for (let i = 0; i < sourceMessages.length; i++) {
1066
+ const sourceMsg = sourceMessages[i];
1067
+ const newMessageId = crypto.randomUUID();
1068
+ const { _index, ...restMsg } = sourceMsg;
1069
+ const newMessage = {
1070
+ ...restMsg,
1071
+ id: newMessageId,
1072
+ threadId: newThreadId,
1073
+ resourceId: targetResourceId
1074
+ };
1075
+ const messageKey = getMessageKey(newThreadId, newMessageId);
1076
+ writePipeline.set(messageKey, newMessage);
1077
+ writePipeline.set(getMessageIndexKey(newMessageId), newThreadId);
1078
+ writePipeline.zadd(newThreadMessagesKey, {
1079
+ score: i,
1080
+ member: newMessageId
1081
+ });
1082
+ clonedMessages.push(newMessage);
1083
+ }
1084
+ await writePipeline.exec();
1085
+ return {
1086
+ thread: newThread,
1087
+ clonedMessages
1088
+ };
917
1089
  } catch (error$1) {
1090
+ if (error$1 instanceof error.MastraError) {
1091
+ throw error$1;
1092
+ }
918
1093
  throw new error.MastraError(
919
1094
  {
920
- id: "STORAGE_UPSTASH_STORAGE_LOAD_FAILED",
1095
+ id: storage.createStorageErrorId("UPSTASH", "CLONE_THREAD", "FAILED"),
921
1096
  domain: error.ErrorDomain.STORAGE,
922
1097
  category: error.ErrorCategory.THIRD_PARTY,
923
- details: {
924
- tableName
925
- }
1098
+ details: { sourceThreadId, newThreadId }
926
1099
  },
927
1100
  error$1
928
1101
  );
929
1102
  }
930
1103
  }
931
- async hasColumn(_tableName, _column) {
932
- return true;
933
- }
934
- async scanKeys(pattern, batchSize = 1e4) {
935
- let cursor = "0";
936
- let keys = [];
937
- do {
938
- const [nextCursor, batch] = await this.client.scan(cursor, {
939
- match: pattern,
940
- count: batchSize
941
- });
942
- keys.push(...batch);
943
- cursor = nextCursor;
944
- } while (cursor !== "0");
945
- return keys;
946
- }
947
- async scanAndDelete(pattern, batchSize = 1e4) {
948
- let cursor = "0";
949
- let totalDeleted = 0;
950
- do {
951
- const [nextCursor, keys] = await this.client.scan(cursor, {
952
- match: pattern,
953
- count: batchSize
954
- });
955
- if (keys.length > 0) {
956
- await this.client.del(...keys);
957
- totalDeleted += keys.length;
958
- }
959
- cursor = nextCursor;
960
- } while (cursor !== "0");
961
- return totalDeleted;
962
- }
963
1104
  };
964
1105
  function transformScoreRow(row) {
965
- const parseField = (v) => {
966
- if (typeof v === "string") {
967
- try {
968
- return JSON.parse(v);
969
- } catch {
970
- return v;
971
- }
972
- }
973
- return v;
974
- };
975
- return {
976
- ...row,
977
- scorer: parseField(row.scorer),
978
- preprocessStepResult: parseField(row.preprocessStepResult),
979
- generateScorePrompt: row.generateScorePrompt,
980
- generateReasonPrompt: row.generateReasonPrompt,
981
- analyzeStepResult: parseField(row.analyzeStepResult),
982
- metadata: parseField(row.metadata),
983
- input: parseField(row.input),
984
- output: parseField(row.output),
985
- additionalContext: parseField(row.additionalContext),
986
- requestContext: parseField(row.requestContext),
987
- entity: parseField(row.entity),
988
- createdAt: row.createdAt,
989
- updatedAt: row.updatedAt
990
- };
1106
+ return storage.transformScoreRow(row);
991
1107
  }
992
1108
  var ScoresUpstash = class extends storage.ScoresStorage {
993
1109
  client;
994
- operations;
995
- constructor({ client, operations }) {
1110
+ #db;
1111
+ constructor(config) {
996
1112
  super();
1113
+ const client = resolveUpstashConfig(config);
997
1114
  this.client = client;
998
- this.operations = operations;
1115
+ this.#db = new UpstashDB({ client });
1116
+ }
1117
+ async dangerouslyClearAll() {
1118
+ await this.#db.deleteData({ tableName: storage.TABLE_SCORERS });
999
1119
  }
1000
1120
  async getScoreById({ id }) {
1001
1121
  try {
1002
- const data = await this.operations.load({
1122
+ const data = await this.#db.get({
1003
1123
  tableName: storage.TABLE_SCORERS,
1004
1124
  keys: { id }
1005
1125
  });
@@ -1008,10 +1128,12 @@ var ScoresUpstash = class extends storage.ScoresStorage {
1008
1128
  } catch (error$1) {
1009
1129
  throw new error.MastraError(
1010
1130
  {
1011
- id: "STORAGE_UPSTASH_STORAGE_GET_SCORE_BY_ID_FAILED",
1131
+ id: storage.createStorageErrorId("UPSTASH", "GET_SCORE_BY_ID", "FAILED"),
1012
1132
  domain: error.ErrorDomain.STORAGE,
1013
1133
  category: error.ErrorCategory.THIRD_PARTY,
1014
- details: { id }
1134
+ details: {
1135
+ ...id && { id }
1136
+ }
1015
1137
  },
1016
1138
  error$1
1017
1139
  );
@@ -1025,7 +1147,7 @@ var ScoresUpstash = class extends storage.ScoresStorage {
1025
1147
  pagination = { page: 0, perPage: 20 }
1026
1148
  }) {
1027
1149
  const pattern = `${storage.TABLE_SCORERS}:*`;
1028
- const keys = await this.operations.scanKeys(pattern);
1150
+ const keys = await this.#db.scanKeys(pattern);
1029
1151
  const { page, perPage: perPageInput } = pagination;
1030
1152
  if (keys.length === 0) {
1031
1153
  return {
@@ -1077,24 +1199,41 @@ var ScoresUpstash = class extends storage.ScoresStorage {
1077
1199
  } catch (error$1) {
1078
1200
  throw new error.MastraError(
1079
1201
  {
1080
- id: "STORAGE_UPSTASH_STORAGE_SAVE_SCORE_VALIDATION_FAILED",
1202
+ id: storage.createStorageErrorId("UPSTASH", "SAVE_SCORE", "VALIDATION_FAILED"),
1081
1203
  domain: error.ErrorDomain.STORAGE,
1082
- category: error.ErrorCategory.THIRD_PARTY
1204
+ category: error.ErrorCategory.USER,
1205
+ details: {
1206
+ scorer: typeof score.scorer?.id === "string" ? score.scorer.id : String(score.scorer?.id ?? "unknown"),
1207
+ entityId: score.entityId ?? "unknown",
1208
+ entityType: score.entityType ?? "unknown",
1209
+ traceId: score.traceId ?? "",
1210
+ spanId: score.spanId ?? ""
1211
+ }
1083
1212
  },
1084
1213
  error$1
1085
1214
  );
1086
1215
  }
1087
- const { key, processedRecord } = processRecord(storage.TABLE_SCORERS, validatedScore);
1216
+ const now = /* @__PURE__ */ new Date();
1217
+ const id = crypto.randomUUID();
1218
+ const createdAt = now;
1219
+ const updatedAt = now;
1220
+ const scoreWithId = {
1221
+ ...validatedScore,
1222
+ id,
1223
+ createdAt,
1224
+ updatedAt
1225
+ };
1226
+ const { key, processedRecord } = processRecord(storage.TABLE_SCORERS, scoreWithId);
1088
1227
  try {
1089
1228
  await this.client.set(key, processedRecord);
1090
- return { score };
1229
+ return { score: { ...validatedScore, id, createdAt, updatedAt } };
1091
1230
  } catch (error$1) {
1092
1231
  throw new error.MastraError(
1093
1232
  {
1094
- id: "STORAGE_UPSTASH_STORAGE_SAVE_SCORE_FAILED",
1233
+ id: storage.createStorageErrorId("UPSTASH", "SAVE_SCORE", "FAILED"),
1095
1234
  domain: error.ErrorDomain.STORAGE,
1096
1235
  category: error.ErrorCategory.THIRD_PARTY,
1097
- details: { id: score.id }
1236
+ details: { id }
1098
1237
  },
1099
1238
  error$1
1100
1239
  );
@@ -1105,7 +1244,7 @@ var ScoresUpstash = class extends storage.ScoresStorage {
1105
1244
  pagination = { page: 0, perPage: 20 }
1106
1245
  }) {
1107
1246
  const pattern = `${storage.TABLE_SCORERS}:*`;
1108
- const keys = await this.operations.scanKeys(pattern);
1247
+ const keys = await this.#db.scanKeys(pattern);
1109
1248
  const { page, perPage: perPageInput } = pagination;
1110
1249
  if (keys.length === 0) {
1111
1250
  return {
@@ -1149,7 +1288,7 @@ var ScoresUpstash = class extends storage.ScoresStorage {
1149
1288
  pagination = { page: 0, perPage: 20 }
1150
1289
  }) {
1151
1290
  const pattern = `${storage.TABLE_SCORERS}:*`;
1152
- const keys = await this.operations.scanKeys(pattern);
1291
+ const keys = await this.#db.scanKeys(pattern);
1153
1292
  const { page, perPage: perPageInput } = pagination;
1154
1293
  if (keys.length === 0) {
1155
1294
  return {
@@ -1198,7 +1337,7 @@ var ScoresUpstash = class extends storage.ScoresStorage {
1198
1337
  pagination = { page: 0, perPage: 20 }
1199
1338
  }) {
1200
1339
  const pattern = `${storage.TABLE_SCORERS}:*`;
1201
- const keys = await this.operations.scanKeys(pattern);
1340
+ const keys = await this.#db.scanKeys(pattern);
1202
1341
  const { page, perPage: perPageInput } = pagination;
1203
1342
  if (keys.length === 0) {
1204
1343
  return {
@@ -1255,39 +1394,116 @@ function parseWorkflowRun(row) {
1255
1394
  workflowName: row.workflow_name,
1256
1395
  runId: row.run_id,
1257
1396
  snapshot: parsedSnapshot,
1258
- createdAt: ensureDate(row.createdAt),
1259
- updatedAt: ensureDate(row.updatedAt),
1397
+ createdAt: storage.ensureDate(row.createdAt),
1398
+ updatedAt: storage.ensureDate(row.updatedAt),
1260
1399
  resourceId: row.resourceId
1261
1400
  };
1262
1401
  }
1263
1402
  var WorkflowsUpstash = class extends storage.WorkflowsStorage {
1264
1403
  client;
1265
- operations;
1266
- constructor({ client, operations }) {
1404
+ #db;
1405
+ constructor(config) {
1267
1406
  super();
1407
+ const client = resolveUpstashConfig(config);
1268
1408
  this.client = client;
1269
- this.operations = operations;
1270
- }
1271
- updateWorkflowResults({
1272
- // workflowName,
1273
- // runId,
1274
- // stepId,
1275
- // result,
1276
- // requestContext,
1409
+ this.#db = new UpstashDB({ client });
1410
+ }
1411
+ async dangerouslyClearAll() {
1412
+ await this.#db.deleteData({ tableName: storage.TABLE_WORKFLOW_SNAPSHOT });
1413
+ }
1414
+ async updateWorkflowResults({
1415
+ workflowName,
1416
+ runId,
1417
+ stepId,
1418
+ result,
1419
+ requestContext
1277
1420
  }) {
1278
- throw new Error("Method not implemented.");
1421
+ try {
1422
+ const existingSnapshot = await this.loadWorkflowSnapshot({
1423
+ namespace: "workflows",
1424
+ workflowName,
1425
+ runId
1426
+ });
1427
+ let snapshot;
1428
+ if (!existingSnapshot) {
1429
+ snapshot = {
1430
+ context: {},
1431
+ activePaths: [],
1432
+ timestamp: Date.now(),
1433
+ suspendedPaths: {},
1434
+ activeStepsPath: {},
1435
+ resumeLabels: {},
1436
+ serializedStepGraph: [],
1437
+ status: "pending",
1438
+ value: {},
1439
+ waitingPaths: {},
1440
+ runId,
1441
+ requestContext: {}
1442
+ };
1443
+ } else {
1444
+ snapshot = existingSnapshot;
1445
+ }
1446
+ snapshot.context[stepId] = result;
1447
+ snapshot.requestContext = { ...snapshot.requestContext, ...requestContext };
1448
+ await this.persistWorkflowSnapshot({
1449
+ namespace: "workflows",
1450
+ workflowName,
1451
+ runId,
1452
+ snapshot
1453
+ });
1454
+ return snapshot.context;
1455
+ } catch (error$1) {
1456
+ if (error$1 instanceof error.MastraError) throw error$1;
1457
+ throw new error.MastraError(
1458
+ {
1459
+ id: storage.createStorageErrorId("UPSTASH", "UPDATE_WORKFLOW_RESULTS", "FAILED"),
1460
+ domain: error.ErrorDomain.STORAGE,
1461
+ category: error.ErrorCategory.THIRD_PARTY,
1462
+ details: { workflowName, runId, stepId }
1463
+ },
1464
+ error$1
1465
+ );
1466
+ }
1279
1467
  }
1280
- updateWorkflowState({
1281
- // workflowName,
1282
- // runId,
1283
- // opts,
1468
+ async updateWorkflowState({
1469
+ workflowName,
1470
+ runId,
1471
+ opts
1284
1472
  }) {
1285
- throw new Error("Method not implemented.");
1473
+ try {
1474
+ const existingSnapshot = await this.loadWorkflowSnapshot({
1475
+ namespace: "workflows",
1476
+ workflowName,
1477
+ runId
1478
+ });
1479
+ if (!existingSnapshot || !existingSnapshot.context) {
1480
+ return void 0;
1481
+ }
1482
+ const updatedSnapshot = { ...existingSnapshot, ...opts };
1483
+ await this.persistWorkflowSnapshot({
1484
+ namespace: "workflows",
1485
+ workflowName,
1486
+ runId,
1487
+ snapshot: updatedSnapshot
1488
+ });
1489
+ return updatedSnapshot;
1490
+ } catch (error$1) {
1491
+ if (error$1 instanceof error.MastraError) throw error$1;
1492
+ throw new error.MastraError(
1493
+ {
1494
+ id: storage.createStorageErrorId("UPSTASH", "UPDATE_WORKFLOW_STATE", "FAILED"),
1495
+ domain: error.ErrorDomain.STORAGE,
1496
+ category: error.ErrorCategory.THIRD_PARTY,
1497
+ details: { workflowName, runId }
1498
+ },
1499
+ error$1
1500
+ );
1501
+ }
1286
1502
  }
1287
1503
  async persistWorkflowSnapshot(params) {
1288
- const { namespace = "workflows", workflowName, runId, resourceId, snapshot } = params;
1504
+ const { namespace = "workflows", workflowName, runId, resourceId, snapshot, createdAt, updatedAt } = params;
1289
1505
  try {
1290
- await this.operations.insert({
1506
+ await this.#db.insert({
1291
1507
  tableName: storage.TABLE_WORKFLOW_SNAPSHOT,
1292
1508
  record: {
1293
1509
  namespace,
@@ -1295,14 +1511,14 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
1295
1511
  run_id: runId,
1296
1512
  resourceId,
1297
1513
  snapshot,
1298
- createdAt: /* @__PURE__ */ new Date(),
1299
- updatedAt: /* @__PURE__ */ new Date()
1514
+ createdAt: createdAt ?? /* @__PURE__ */ new Date(),
1515
+ updatedAt: updatedAt ?? /* @__PURE__ */ new Date()
1300
1516
  }
1301
1517
  });
1302
1518
  } catch (error$1) {
1303
1519
  throw new error.MastraError(
1304
1520
  {
1305
- id: "STORAGE_UPSTASH_STORAGE_PERSIST_WORKFLOW_SNAPSHOT_FAILED",
1521
+ id: storage.createStorageErrorId("UPSTASH", "PERSIST_WORKFLOW_SNAPSHOT", "FAILED"),
1306
1522
  domain: error.ErrorDomain.STORAGE,
1307
1523
  category: error.ErrorCategory.THIRD_PARTY,
1308
1524
  details: {
@@ -1329,7 +1545,7 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
1329
1545
  } catch (error$1) {
1330
1546
  throw new error.MastraError(
1331
1547
  {
1332
- id: "STORAGE_UPSTASH_STORAGE_LOAD_WORKFLOW_SNAPSHOT_FAILED",
1548
+ id: storage.createStorageErrorId("UPSTASH", "LOAD_WORKFLOW_SNAPSHOT", "FAILED"),
1333
1549
  domain: error.ErrorDomain.STORAGE,
1334
1550
  category: error.ErrorCategory.THIRD_PARTY,
1335
1551
  details: {
@@ -1348,7 +1564,7 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
1348
1564
  }) {
1349
1565
  try {
1350
1566
  const key = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace: "workflows", workflow_name: workflowName, run_id: runId }) + "*";
1351
- const keys = await this.operations.scanKeys(key);
1567
+ const keys = await this.#db.scanKeys(key);
1352
1568
  const workflows = await Promise.all(
1353
1569
  keys.map(async (key2) => {
1354
1570
  const data2 = await this.client.get(key2);
@@ -1361,7 +1577,7 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
1361
1577
  } catch (error$1) {
1362
1578
  throw new error.MastraError(
1363
1579
  {
1364
- id: "STORAGE_UPSTASH_STORAGE_GET_WORKFLOW_RUN_BY_ID_FAILED",
1580
+ id: storage.createStorageErrorId("UPSTASH", "GET_WORKFLOW_RUN_BY_ID", "FAILED"),
1365
1581
  domain: error.ErrorDomain.STORAGE,
1366
1582
  category: error.ErrorCategory.THIRD_PARTY,
1367
1583
  details: {
@@ -1374,6 +1590,26 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
1374
1590
  );
1375
1591
  }
1376
1592
  }
1593
+ async deleteWorkflowRunById({ runId, workflowName }) {
1594
+ const key = getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace: "workflows", workflow_name: workflowName, run_id: runId });
1595
+ try {
1596
+ await this.client.del(key);
1597
+ } catch (error$1) {
1598
+ throw new error.MastraError(
1599
+ {
1600
+ id: storage.createStorageErrorId("UPSTASH", "DELETE_WORKFLOW_RUN_BY_ID", "FAILED"),
1601
+ domain: error.ErrorDomain.STORAGE,
1602
+ category: error.ErrorCategory.THIRD_PARTY,
1603
+ details: {
1604
+ namespace: "workflows",
1605
+ runId,
1606
+ workflowName
1607
+ }
1608
+ },
1609
+ error$1
1610
+ );
1611
+ }
1612
+ }
1377
1613
  async listWorkflowRuns({
1378
1614
  workflowName,
1379
1615
  fromDate,
@@ -1382,12 +1618,12 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
1382
1618
  page,
1383
1619
  resourceId,
1384
1620
  status
1385
- }) {
1621
+ } = {}) {
1386
1622
  try {
1387
1623
  if (page !== void 0 && page < 0) {
1388
1624
  throw new error.MastraError(
1389
1625
  {
1390
- id: "UPSTASH_STORE_INVALID_PAGE",
1626
+ id: storage.createStorageErrorId("UPSTASH", "LIST_WORKFLOW_RUNS", "INVALID_PAGE"),
1391
1627
  domain: error.ErrorDomain.STORAGE,
1392
1628
  category: error.ErrorCategory.USER,
1393
1629
  details: { page }
@@ -1413,7 +1649,7 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
1413
1649
  resourceId
1414
1650
  });
1415
1651
  }
1416
- const keys = await this.operations.scanKeys(pattern);
1652
+ const keys = await this.#db.scanKeys(pattern);
1417
1653
  if (keys.length === 0) {
1418
1654
  return { runs: [], total: 0 };
1419
1655
  }
@@ -1447,9 +1683,10 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
1447
1683
  }
1448
1684
  return { runs, total };
1449
1685
  } catch (error$1) {
1686
+ if (error$1 instanceof error.MastraError) throw error$1;
1450
1687
  throw new error.MastraError(
1451
1688
  {
1452
- id: "STORAGE_UPSTASH_STORAGE_LIST_WORKFLOW_RUNS_FAILED",
1689
+ id: storage.createStorageErrorId("UPSTASH", "LIST_WORKFLOW_RUNS", "FAILED"),
1453
1690
  domain: error.ErrorDomain.STORAGE,
1454
1691
  category: error.ErrorCategory.THIRD_PARTY,
1455
1692
  details: {
@@ -1465,179 +1702,39 @@ var WorkflowsUpstash = class extends storage.WorkflowsStorage {
1465
1702
  };
1466
1703
 
1467
1704
  // src/storage/index.ts
1705
+ var isClientConfig = (config) => {
1706
+ return "client" in config;
1707
+ };
1468
1708
  var UpstashStore = class extends storage.MastraStorage {
1469
1709
  redis;
1470
1710
  stores;
1471
1711
  constructor(config) {
1472
- super({ id: config.id, name: "Upstash" });
1473
- this.redis = new redis.Redis({
1474
- url: config.url,
1475
- token: config.token
1476
- });
1477
- const operations = new StoreOperationsUpstash({ client: this.redis });
1478
- const scores = new ScoresUpstash({ client: this.redis, operations });
1479
- const workflows = new WorkflowsUpstash({ client: this.redis, operations });
1480
- const memory = new StoreMemoryUpstash({ client: this.redis, operations });
1712
+ super({ id: config.id, name: "Upstash", disableInit: config.disableInit });
1713
+ if (isClientConfig(config)) {
1714
+ this.redis = config.client;
1715
+ } else {
1716
+ if (!config.url || typeof config.url !== "string" || config.url.trim() === "") {
1717
+ throw new Error("UpstashStore: url is required and cannot be empty.");
1718
+ }
1719
+ if (!config.token || typeof config.token !== "string" || config.token.trim() === "") {
1720
+ throw new Error("UpstashStore: token is required and cannot be empty.");
1721
+ }
1722
+ this.redis = new redis.Redis({
1723
+ url: config.url,
1724
+ token: config.token
1725
+ });
1726
+ }
1727
+ const scores = new ScoresUpstash({ client: this.redis });
1728
+ const workflows = new WorkflowsUpstash({ client: this.redis });
1729
+ const memory = new StoreMemoryUpstash({ client: this.redis });
1481
1730
  this.stores = {
1482
- operations,
1483
1731
  scores,
1484
1732
  workflows,
1485
1733
  memory
1486
1734
  };
1487
1735
  }
1488
- get supports() {
1489
- return {
1490
- selectByIncludeResourceScope: true,
1491
- resourceWorkingMemory: true,
1492
- hasColumn: false,
1493
- createTable: false,
1494
- deleteMessages: true,
1495
- listScoresBySpan: true
1496
- };
1497
- }
1498
- async createTable({
1499
- tableName,
1500
- schema
1501
- }) {
1502
- return this.stores.operations.createTable({ tableName, schema });
1503
- }
1504
- /**
1505
- * No-op: This backend is schemaless and does not require schema changes.
1506
- * @param tableName Name of the table
1507
- * @param schema Schema of the table
1508
- * @param ifNotExists Array of column names to add if they don't exist
1509
- */
1510
- async alterTable(args) {
1511
- return this.stores.operations.alterTable(args);
1512
- }
1513
- async clearTable({ tableName }) {
1514
- return this.stores.operations.clearTable({ tableName });
1515
- }
1516
- async dropTable({ tableName }) {
1517
- return this.stores.operations.dropTable({ tableName });
1518
- }
1519
- async insert({ tableName, record }) {
1520
- return this.stores.operations.insert({ tableName, record });
1521
- }
1522
- async batchInsert(input) {
1523
- return this.stores.operations.batchInsert(input);
1524
- }
1525
- async load({ tableName, keys }) {
1526
- return this.stores.operations.load({ tableName, keys });
1527
- }
1528
- async getThreadById({ threadId }) {
1529
- return this.stores.memory.getThreadById({ threadId });
1530
- }
1531
- async saveThread({ thread }) {
1532
- return this.stores.memory.saveThread({ thread });
1533
- }
1534
- async updateThread({
1535
- id,
1536
- title,
1537
- metadata
1538
- }) {
1539
- return this.stores.memory.updateThread({ id, title, metadata });
1540
- }
1541
- async deleteThread({ threadId }) {
1542
- return this.stores.memory.deleteThread({ threadId });
1543
- }
1544
- async saveMessages(args) {
1545
- return this.stores.memory.saveMessages(args);
1546
- }
1547
- async listMessagesById({ messageIds }) {
1548
- return this.stores.memory.listMessagesById({ messageIds });
1549
- }
1550
- async updateWorkflowResults({
1551
- workflowName,
1552
- runId,
1553
- stepId,
1554
- result,
1555
- requestContext
1556
- }) {
1557
- return this.stores.workflows.updateWorkflowResults({ workflowName, runId, stepId, result, requestContext });
1558
- }
1559
- async updateWorkflowState({
1560
- workflowName,
1561
- runId,
1562
- opts
1563
- }) {
1564
- return this.stores.workflows.updateWorkflowState({ workflowName, runId, opts });
1565
- }
1566
- async persistWorkflowSnapshot(params) {
1567
- return this.stores.workflows.persistWorkflowSnapshot(params);
1568
- }
1569
- async loadWorkflowSnapshot(params) {
1570
- return this.stores.workflows.loadWorkflowSnapshot(params);
1571
- }
1572
- async listWorkflowRuns(args = {}) {
1573
- return this.stores.workflows.listWorkflowRuns(args);
1574
- }
1575
- async getWorkflowRunById({
1576
- runId,
1577
- workflowName
1578
- }) {
1579
- return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
1580
- }
1581
1736
  async close() {
1582
1737
  }
1583
- async updateMessages(args) {
1584
- return this.stores.memory.updateMessages(args);
1585
- }
1586
- async deleteMessages(messageIds) {
1587
- return this.stores.memory.deleteMessages(messageIds);
1588
- }
1589
- async getResourceById({ resourceId }) {
1590
- return this.stores.memory.getResourceById({ resourceId });
1591
- }
1592
- async saveResource({ resource }) {
1593
- return this.stores.memory.saveResource({ resource });
1594
- }
1595
- async updateResource({
1596
- resourceId,
1597
- workingMemory,
1598
- metadata
1599
- }) {
1600
- return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
1601
- }
1602
- async getScoreById({ id: _id }) {
1603
- return this.stores.scores.getScoreById({ id: _id });
1604
- }
1605
- async saveScore(score) {
1606
- return this.stores.scores.saveScore(score);
1607
- }
1608
- async listScoresByRunId({
1609
- runId,
1610
- pagination
1611
- }) {
1612
- return this.stores.scores.listScoresByRunId({ runId, pagination });
1613
- }
1614
- async listScoresByEntityId({
1615
- entityId,
1616
- entityType,
1617
- pagination
1618
- }) {
1619
- return this.stores.scores.listScoresByEntityId({
1620
- entityId,
1621
- entityType,
1622
- pagination
1623
- });
1624
- }
1625
- async listScoresByScorerId({
1626
- scorerId,
1627
- pagination,
1628
- entityId,
1629
- entityType,
1630
- source
1631
- }) {
1632
- return this.stores.scores.listScoresByScorerId({ scorerId, pagination, entityId, entityType, source });
1633
- }
1634
- async listScoresBySpan({
1635
- traceId,
1636
- spanId,
1637
- pagination
1638
- }) {
1639
- return this.stores.scores.listScoresBySpan({ traceId, spanId, pagination });
1640
- }
1641
1738
  };
1642
1739
  var UpstashFilterTranslator = class extends filter.BaseFilterTranslator {
1643
1740
  getSupportedOperators() {
@@ -1860,7 +1957,7 @@ var UpstashVector = class extends vector.MastraVector {
1860
1957
  ids,
1861
1958
  sparseVectors
1862
1959
  }) {
1863
- const generatedIds = ids || vectors.map(() => crypto.randomUUID());
1960
+ const generatedIds = ids || vectors.map(() => crypto$1.randomUUID());
1864
1961
  const points = vectors.map((vector, index) => ({
1865
1962
  id: generatedIds[index],
1866
1963
  vector,
@@ -1875,7 +1972,7 @@ var UpstashVector = class extends vector.MastraVector {
1875
1972
  } catch (error$1) {
1876
1973
  throw new error.MastraError(
1877
1974
  {
1878
- id: "STORAGE_UPSTASH_VECTOR_UPSERT_FAILED",
1975
+ id: storage.createVectorErrorId("UPSTASH", "UPSERT", "FAILED"),
1879
1976
  domain: error.ErrorDomain.STORAGE,
1880
1977
  category: error.ErrorCategory.THIRD_PARTY,
1881
1978
  details: { namespace, vectorCount: vectors.length }
@@ -1938,7 +2035,7 @@ var UpstashVector = class extends vector.MastraVector {
1938
2035
  } catch (error$1) {
1939
2036
  throw new error.MastraError(
1940
2037
  {
1941
- id: "STORAGE_UPSTASH_VECTOR_QUERY_FAILED",
2038
+ id: storage.createVectorErrorId("UPSTASH", "QUERY", "FAILED"),
1942
2039
  domain: error.ErrorDomain.STORAGE,
1943
2040
  category: error.ErrorCategory.THIRD_PARTY,
1944
2041
  details: { namespace, topK }
@@ -1958,7 +2055,7 @@ var UpstashVector = class extends vector.MastraVector {
1958
2055
  } catch (error$1) {
1959
2056
  throw new error.MastraError(
1960
2057
  {
1961
- id: "STORAGE_UPSTASH_VECTOR_LIST_INDEXES_FAILED",
2058
+ id: storage.createVectorErrorId("UPSTASH", "LIST_INDEXES", "FAILED"),
1962
2059
  domain: error.ErrorDomain.STORAGE,
1963
2060
  category: error.ErrorCategory.THIRD_PARTY
1964
2061
  },
@@ -1983,7 +2080,7 @@ var UpstashVector = class extends vector.MastraVector {
1983
2080
  } catch (error$1) {
1984
2081
  throw new error.MastraError(
1985
2082
  {
1986
- id: "STORAGE_UPSTASH_VECTOR_DESCRIBE_INDEX_FAILED",
2083
+ id: storage.createVectorErrorId("UPSTASH", "DESCRIBE_INDEX", "FAILED"),
1987
2084
  domain: error.ErrorDomain.STORAGE,
1988
2085
  category: error.ErrorCategory.THIRD_PARTY,
1989
2086
  details: { namespace }
@@ -2001,9 +2098,14 @@ var UpstashVector = class extends vector.MastraVector {
2001
2098
  try {
2002
2099
  await this.client.deleteNamespace(namespace);
2003
2100
  } catch (error$1) {
2101
+ const errorMessage = error$1?.message || "";
2102
+ if (errorMessage.includes("does not exist") || errorMessage.includes("not found")) {
2103
+ this.logger.info(`Namespace ${namespace} does not exist, treating as already deleted`);
2104
+ return;
2105
+ }
2004
2106
  throw new error.MastraError(
2005
2107
  {
2006
- id: "STORAGE_UPSTASH_VECTOR_DELETE_INDEX_FAILED",
2108
+ id: storage.createVectorErrorId("UPSTASH", "DELETE_INDEX", "FAILED"),
2007
2109
  domain: error.ErrorDomain.STORAGE,
2008
2110
  category: error.ErrorCategory.THIRD_PARTY,
2009
2111
  details: { namespace }
@@ -2013,47 +2115,124 @@ var UpstashVector = class extends vector.MastraVector {
2013
2115
  }
2014
2116
  }
2015
2117
  /**
2016
- * Updates a vector by its ID with the provided vector and/or metadata.
2017
- * @param indexName - The name of the namespace containing the vector.
2018
- * @param id - The ID of the vector to update.
2019
- * @param update - An object containing the vector and/or metadata to update.
2020
- * @param update.vector - An optional array of numbers representing the new vector.
2021
- * @param update.metadata - An optional record containing the new metadata.
2118
+ * Updates a vector by its ID or multiple vectors matching a filter.
2119
+ * @param params - Parameters containing the id or filter for targeting the vector(s) to update
2120
+ * @param params.indexName - The name of the namespace containing the vector.
2121
+ * @param params.id - The ID of the vector to update (mutually exclusive with filter).
2122
+ * @param params.filter - Filter to match multiple vectors to update (mutually exclusive with id).
2123
+ * @param params.update - An object containing the vector and/or metadata to update.
2022
2124
  * @returns A promise that resolves when the update is complete.
2023
2125
  * @throws Will throw an error if no updates are provided or if the update operation fails.
2024
2126
  */
2025
- async updateVector({ indexName: namespace, id, update }) {
2026
- if (!update.vector && !update.metadata && !update.sparseVector) {
2127
+ async updateVector(params) {
2128
+ const { indexName: namespace, update } = params;
2129
+ const upstashUpdate = update;
2130
+ const sparseVector = upstashUpdate.sparseVector;
2131
+ if ("id" in params && params.id && "filter" in params && params.filter) {
2132
+ throw new error.MastraError({
2133
+ id: storage.createVectorErrorId("UPSTASH", "UPDATE_VECTOR", "MUTUALLY_EXCLUSIVE"),
2134
+ text: "Cannot specify both id and filter - they are mutually exclusive",
2135
+ domain: error.ErrorDomain.STORAGE,
2136
+ category: error.ErrorCategory.USER,
2137
+ details: { namespace }
2138
+ });
2139
+ }
2140
+ if (!("id" in params && params.id) && !("filter" in params && params.filter)) {
2141
+ throw new error.MastraError({
2142
+ id: storage.createVectorErrorId("UPSTASH", "UPDATE_VECTOR", "NO_TARGET"),
2143
+ text: "Either id or filter must be provided",
2144
+ domain: error.ErrorDomain.STORAGE,
2145
+ category: error.ErrorCategory.USER,
2146
+ details: { namespace }
2147
+ });
2148
+ }
2149
+ if (!update.vector && !update.metadata && !sparseVector) {
2027
2150
  throw new error.MastraError({
2028
- id: "STORAGE_UPSTASH_VECTOR_UPDATE_VECTOR_FAILED",
2151
+ id: storage.createVectorErrorId("UPSTASH", "UPDATE_VECTOR", "NO_PAYLOAD"),
2152
+ text: "No update data provided",
2029
2153
  domain: error.ErrorDomain.STORAGE,
2030
- category: error.ErrorCategory.THIRD_PARTY,
2031
- details: { namespace, id },
2032
- text: "No update data provided"
2154
+ category: error.ErrorCategory.USER,
2155
+ details: { namespace }
2033
2156
  });
2034
2157
  }
2035
- if (!update.vector && !update.sparseVector && update.metadata) {
2158
+ if ("filter" in params && params.filter && Object.keys(params.filter).length === 0) {
2036
2159
  throw new error.MastraError({
2037
- id: "STORAGE_UPSTASH_VECTOR_UPDATE_VECTOR_FAILED",
2160
+ id: storage.createVectorErrorId("UPSTASH", "UPDATE_VECTOR", "EMPTY_FILTER"),
2161
+ text: "Filter cannot be an empty filter object",
2038
2162
  domain: error.ErrorDomain.STORAGE,
2039
- category: error.ErrorCategory.THIRD_PARTY,
2040
- details: { namespace, id },
2041
- text: "Both vector and metadata must be provided for an update"
2163
+ category: error.ErrorCategory.USER,
2164
+ details: { namespace }
2042
2165
  });
2043
2166
  }
2044
2167
  try {
2045
- const points = { id };
2046
- if (update.vector) points.vector = update.vector;
2047
- if (update.metadata) points.metadata = update.metadata;
2048
- if (update.sparseVector) points.sparseVector = update.sparseVector;
2049
- await this.client.upsert(points, { namespace });
2168
+ const ns = this.client.namespace(namespace);
2169
+ if ("id" in params && params.id) {
2170
+ const points = { id: params.id };
2171
+ if (!update.vector || !update.metadata) {
2172
+ try {
2173
+ const existing = await ns.fetch([params.id], {
2174
+ includeVectors: true,
2175
+ includeMetadata: true
2176
+ });
2177
+ if (existing && existing.length > 0 && existing[0]) {
2178
+ if (!update.vector && existing[0]?.vector) {
2179
+ points.vector = existing[0].vector;
2180
+ }
2181
+ if (!update.metadata && existing[0]?.metadata) {
2182
+ points.metadata = existing[0].metadata;
2183
+ }
2184
+ }
2185
+ } catch (fetchError) {
2186
+ this.logger.warn(`Failed to fetch existing vector ${params.id} for partial update: ${fetchError}`);
2187
+ }
2188
+ }
2189
+ if (update.vector) points.vector = update.vector;
2190
+ if (update.metadata) points.metadata = update.metadata;
2191
+ if (sparseVector) points.sparseVector = sparseVector;
2192
+ await ns.upsert(points);
2193
+ } else if ("filter" in params && params.filter) {
2194
+ const filterString = this.transformFilter(params.filter);
2195
+ if (filterString) {
2196
+ const stats = await this.describeIndex({ indexName: namespace });
2197
+ const dummyVector = new Array(stats.dimension).fill(1 / Math.sqrt(stats.dimension));
2198
+ const needsVectors = !update.vector;
2199
+ const results = await ns.query({
2200
+ vector: dummyVector,
2201
+ topK: 1e3,
2202
+ // Upstash's max query limit
2203
+ filter: filterString,
2204
+ includeVectors: needsVectors,
2205
+ includeMetadata: needsVectors
2206
+ });
2207
+ for (const result of results) {
2208
+ const points = { id: `${result.id}` };
2209
+ if (update.vector) {
2210
+ points.vector = update.vector;
2211
+ } else if (result.vector) {
2212
+ points.vector = result.vector;
2213
+ }
2214
+ if (update.metadata) {
2215
+ points.metadata = update.metadata;
2216
+ } else if (result.metadata) {
2217
+ points.metadata = result.metadata;
2218
+ }
2219
+ if (sparseVector) points.sparseVector = sparseVector;
2220
+ await ns.upsert(points);
2221
+ }
2222
+ }
2223
+ }
2050
2224
  } catch (error$1) {
2225
+ if (error$1 instanceof error.MastraError) throw error$1;
2051
2226
  throw new error.MastraError(
2052
2227
  {
2053
- id: "STORAGE_UPSTASH_VECTOR_UPDATE_VECTOR_FAILED",
2228
+ id: storage.createVectorErrorId("UPSTASH", "UPDATE_VECTOR", "FAILED"),
2054
2229
  domain: error.ErrorDomain.STORAGE,
2055
2230
  category: error.ErrorCategory.THIRD_PARTY,
2056
- details: { namespace, id }
2231
+ details: {
2232
+ namespace,
2233
+ ..."id" in params && params.id && { id: params.id },
2234
+ ..."filter" in params && params.filter && { filter: JSON.stringify(params.filter) }
2235
+ }
2057
2236
  },
2058
2237
  error$1
2059
2238
  );
@@ -2068,22 +2247,109 @@ var UpstashVector = class extends vector.MastraVector {
2068
2247
  */
2069
2248
  async deleteVector({ indexName: namespace, id }) {
2070
2249
  try {
2071
- await this.client.delete(id, {
2072
- namespace
2073
- });
2250
+ const ns = this.client.namespace(namespace);
2251
+ await ns.delete(id);
2074
2252
  } catch (error$1) {
2075
2253
  const mastraError = new error.MastraError(
2076
2254
  {
2077
- id: "STORAGE_UPSTASH_VECTOR_DELETE_VECTOR_FAILED",
2255
+ id: storage.createVectorErrorId("UPSTASH", "DELETE_VECTOR", "FAILED"),
2078
2256
  domain: error.ErrorDomain.STORAGE,
2079
2257
  category: error.ErrorCategory.THIRD_PARTY,
2080
- details: { namespace, id }
2258
+ details: {
2259
+ namespace,
2260
+ ...id && { id }
2261
+ }
2081
2262
  },
2082
2263
  error$1
2083
2264
  );
2084
2265
  this.logger?.error(mastraError.toString());
2085
2266
  }
2086
2267
  }
2268
+ /**
2269
+ * Deletes multiple vectors by IDs or filter.
2270
+ * @param indexName - The name of the namespace containing the vectors.
2271
+ * @param ids - Array of vector IDs to delete (mutually exclusive with filter).
2272
+ * @param filter - Filter to match vectors to delete (mutually exclusive with ids).
2273
+ * @returns A promise that resolves when the deletion is complete.
2274
+ * @throws Will throw an error if both ids and filter are provided, or if neither is provided.
2275
+ */
2276
+ async deleteVectors({ indexName: namespace, filter, ids }) {
2277
+ if (ids && filter) {
2278
+ throw new error.MastraError({
2279
+ id: storage.createVectorErrorId("UPSTASH", "DELETE_VECTORS", "MUTUALLY_EXCLUSIVE"),
2280
+ text: "Cannot specify both ids and filter - they are mutually exclusive",
2281
+ domain: error.ErrorDomain.STORAGE,
2282
+ category: error.ErrorCategory.USER,
2283
+ details: { namespace }
2284
+ });
2285
+ }
2286
+ if (!ids && !filter) {
2287
+ throw new error.MastraError({
2288
+ id: storage.createVectorErrorId("UPSTASH", "DELETE_VECTORS", "NO_TARGET"),
2289
+ text: "Either filter or ids must be provided",
2290
+ domain: error.ErrorDomain.STORAGE,
2291
+ category: error.ErrorCategory.USER,
2292
+ details: { namespace }
2293
+ });
2294
+ }
2295
+ if (ids && ids.length === 0) {
2296
+ throw new error.MastraError({
2297
+ id: storage.createVectorErrorId("UPSTASH", "DELETE_VECTORS", "EMPTY_IDS"),
2298
+ text: "Cannot delete with empty ids array",
2299
+ domain: error.ErrorDomain.STORAGE,
2300
+ category: error.ErrorCategory.USER,
2301
+ details: { namespace }
2302
+ });
2303
+ }
2304
+ if (filter && Object.keys(filter).length === 0) {
2305
+ throw new error.MastraError({
2306
+ id: storage.createVectorErrorId("UPSTASH", "DELETE_VECTORS", "EMPTY_FILTER"),
2307
+ text: "Cannot delete with empty filter object",
2308
+ domain: error.ErrorDomain.STORAGE,
2309
+ category: error.ErrorCategory.USER,
2310
+ details: { namespace }
2311
+ });
2312
+ }
2313
+ try {
2314
+ const ns = this.client.namespace(namespace);
2315
+ if (ids) {
2316
+ await ns.delete(ids);
2317
+ } else if (filter) {
2318
+ const filterString = this.transformFilter(filter);
2319
+ if (filterString) {
2320
+ const stats = await this.describeIndex({ indexName: namespace });
2321
+ const dummyVector = new Array(stats.dimension).fill(1 / Math.sqrt(stats.dimension));
2322
+ const results = await ns.query({
2323
+ vector: dummyVector,
2324
+ topK: 1e3,
2325
+ // Upstash's max query limit
2326
+ filter: filterString,
2327
+ includeVectors: false,
2328
+ includeMetadata: false
2329
+ });
2330
+ const idsToDelete = results.map((r) => `${r.id}`);
2331
+ if (idsToDelete.length > 0) {
2332
+ await ns.delete(idsToDelete);
2333
+ }
2334
+ }
2335
+ }
2336
+ } catch (error$1) {
2337
+ if (error$1 instanceof error.MastraError) throw error$1;
2338
+ throw new error.MastraError(
2339
+ {
2340
+ id: storage.createVectorErrorId("UPSTASH", "DELETE_VECTORS", "FAILED"),
2341
+ domain: error.ErrorDomain.STORAGE,
2342
+ category: error.ErrorCategory.THIRD_PARTY,
2343
+ details: {
2344
+ namespace,
2345
+ ...filter && { filter: JSON.stringify(filter) },
2346
+ ...ids && { idsCount: ids.length }
2347
+ }
2348
+ },
2349
+ error$1
2350
+ );
2351
+ }
2352
+ }
2087
2353
  };
2088
2354
 
2089
2355
  // src/vector/prompt.ts
@@ -2161,8 +2427,11 @@ Example Complex Query:
2161
2427
  ]
2162
2428
  }`;
2163
2429
 
2430
+ exports.ScoresUpstash = ScoresUpstash;
2431
+ exports.StoreMemoryUpstash = StoreMemoryUpstash;
2164
2432
  exports.UPSTASH_PROMPT = UPSTASH_PROMPT;
2165
2433
  exports.UpstashStore = UpstashStore;
2166
2434
  exports.UpstashVector = UpstashVector;
2435
+ exports.WorkflowsUpstash = WorkflowsUpstash;
2167
2436
  //# sourceMappingURL=index.cjs.map
2168
2437
  //# sourceMappingURL=index.cjs.map