@mastra/upstash 1.0.0-beta.9 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -1,4 +1,4 @@
1
- import { MemoryStorage, TABLE_THREADS, TABLE_MESSAGES, TABLE_RESOURCES, ensureDate, createStorageErrorId, normalizePerPage, calculatePagination, ScoresStorage, TABLE_SCORERS, WorkflowsStorage, TABLE_WORKFLOW_SNAPSHOT, MastraStorage, createVectorErrorId, serializeDate, transformScoreRow as transformScoreRow$1 } from '@mastra/core/storage';
1
+ import { MemoryStorage, TABLE_THREADS, TABLE_MESSAGES, TABLE_RESOURCES, ensureDate, createStorageErrorId, normalizePerPage, calculatePagination, filterByDateRange, ScoresStorage, TABLE_SCORERS, WorkflowsStorage, TABLE_WORKFLOW_SNAPSHOT, MastraCompositeStore, createVectorErrorId, serializeDate, transformScoreRow as transformScoreRow$1 } from '@mastra/core/storage';
2
2
  import { Redis } from '@upstash/redis';
3
3
  import { MessageList } from '@mastra/core/agent';
4
4
  import { MastraError, ErrorCategory, ErrorDomain } from '@mastra/core/error';
@@ -190,19 +190,34 @@ var StoreMemoryUpstash = class extends MemoryStorage {
190
190
  );
191
191
  }
192
192
  }
193
- async listThreadsByResourceId(args) {
194
- const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
193
+ async listThreads(args) {
194
+ const { page = 0, perPage: perPageInput, orderBy, filter } = args;
195
195
  const { field, direction } = this.parseOrderBy(orderBy);
196
+ try {
197
+ this.validatePaginationInput(page, perPageInput ?? 100);
198
+ } catch (error) {
199
+ throw new MastraError(
200
+ {
201
+ id: createStorageErrorId("UPSTASH", "LIST_THREADS", "INVALID_PAGE"),
202
+ domain: ErrorDomain.STORAGE,
203
+ category: ErrorCategory.USER,
204
+ details: { page, ...perPageInput !== void 0 && { perPage: perPageInput } }
205
+ },
206
+ error instanceof Error ? error : new Error("Invalid pagination parameters")
207
+ );
208
+ }
196
209
  const perPage = normalizePerPage(perPageInput, 100);
197
- if (page < 0) {
210
+ try {
211
+ this.validateMetadataKeys(filter?.metadata);
212
+ } catch (error) {
198
213
  throw new MastraError(
199
214
  {
200
- id: createStorageErrorId("UPSTASH", "LIST_THREADS_BY_RESOURCE_ID", "INVALID_PAGE"),
215
+ id: createStorageErrorId("UPSTASH", "LIST_THREADS", "INVALID_METADATA_KEY"),
201
216
  domain: ErrorDomain.STORAGE,
202
217
  category: ErrorCategory.USER,
203
- details: { page }
218
+ details: { metadataKeys: filter?.metadata ? Object.keys(filter.metadata).join(", ") : "" }
204
219
  },
205
- new Error("page must be >= 0")
220
+ error instanceof Error ? error : new Error("Invalid metadata key")
206
221
  );
207
222
  }
208
223
  const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
@@ -210,19 +225,35 @@ var StoreMemoryUpstash = class extends MemoryStorage {
210
225
  let allThreads = [];
211
226
  const pattern = `${TABLE_THREADS}:*`;
212
227
  const keys = await this.#db.scanKeys(pattern);
228
+ if (keys.length === 0) {
229
+ return {
230
+ threads: [],
231
+ total: 0,
232
+ page,
233
+ perPage: perPageForResponse,
234
+ hasMore: false
235
+ };
236
+ }
213
237
  const pipeline = this.client.pipeline();
214
238
  keys.forEach((key) => pipeline.get(key));
215
239
  const results = await pipeline.exec();
216
240
  for (let i = 0; i < results.length; i++) {
217
241
  const thread = results[i];
218
- if (thread && thread.resourceId === resourceId) {
219
- allThreads.push({
220
- ...thread,
221
- createdAt: ensureDate(thread.createdAt),
222
- updatedAt: ensureDate(thread.updatedAt),
223
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
224
- });
242
+ if (!thread) continue;
243
+ if (filter?.resourceId && thread.resourceId !== filter.resourceId) {
244
+ continue;
245
+ }
246
+ if (filter?.metadata && Object.keys(filter.metadata).length > 0) {
247
+ const threadMetadata = typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata;
248
+ const matches = Object.entries(filter.metadata).every(([key, value]) => threadMetadata?.[key] === value);
249
+ if (!matches) continue;
225
250
  }
251
+ allThreads.push({
252
+ ...thread,
253
+ createdAt: ensureDate(thread.createdAt),
254
+ updatedAt: ensureDate(thread.updatedAt),
255
+ metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
256
+ });
226
257
  }
227
258
  const sortedThreads = this.sortThreads(allThreads, field, direction);
228
259
  const total = sortedThreads.length;
@@ -239,11 +270,12 @@ var StoreMemoryUpstash = class extends MemoryStorage {
239
270
  } catch (error) {
240
271
  const mastraError = new MastraError(
241
272
  {
242
- id: createStorageErrorId("UPSTASH", "LIST_THREADS_BY_RESOURCE_ID", "FAILED"),
273
+ id: createStorageErrorId("UPSTASH", "LIST_THREADS", "FAILED"),
243
274
  domain: ErrorDomain.STORAGE,
244
275
  category: ErrorCategory.THIRD_PARTY,
245
276
  details: {
246
- resourceId,
277
+ ...filter?.resourceId && { resourceId: filter.resourceId },
278
+ hasMetadataFilter: !!filter?.metadata,
247
279
  page,
248
280
  perPage
249
281
  }
@@ -635,15 +667,11 @@ var StoreMemoryUpstash = class extends MemoryStorage {
635
667
  if (resourceId) {
636
668
  messagesData = messagesData.filter((msg) => msg.resourceId === resourceId);
637
669
  }
638
- const dateRange = filter?.dateRange;
639
- if (dateRange?.start) {
640
- const fromDate = dateRange.start;
641
- messagesData = messagesData.filter((msg) => new Date(msg.createdAt).getTime() >= fromDate.getTime());
642
- }
643
- if (dateRange?.end) {
644
- const toDate = dateRange.end;
645
- messagesData = messagesData.filter((msg) => new Date(msg.createdAt).getTime() <= toDate.getTime());
646
- }
670
+ messagesData = filterByDateRange(
671
+ messagesData,
672
+ (msg) => new Date(msg.createdAt),
673
+ filter?.dateRange
674
+ );
647
675
  const { field, direction } = this.parseOrderBy(orderBy, "ASC");
648
676
  const getFieldValue = (msg) => {
649
677
  if (field === "createdAt") {
@@ -991,6 +1019,118 @@ var StoreMemoryUpstash = class extends MemoryStorage {
991
1019
  }
992
1020
  });
993
1021
  }
1022
+ async cloneThread(args) {
1023
+ const { sourceThreadId, newThreadId: providedThreadId, resourceId, title, metadata, options } = args;
1024
+ const sourceThread = await this.getThreadById({ threadId: sourceThreadId });
1025
+ if (!sourceThread) {
1026
+ throw new MastraError({
1027
+ id: createStorageErrorId("UPSTASH", "CLONE_THREAD", "SOURCE_NOT_FOUND"),
1028
+ domain: ErrorDomain.STORAGE,
1029
+ category: ErrorCategory.USER,
1030
+ text: `Source thread with id ${sourceThreadId} not found`,
1031
+ details: { sourceThreadId }
1032
+ });
1033
+ }
1034
+ const newThreadId = providedThreadId || crypto.randomUUID();
1035
+ const existingThread = await this.getThreadById({ threadId: newThreadId });
1036
+ if (existingThread) {
1037
+ throw new MastraError({
1038
+ id: createStorageErrorId("UPSTASH", "CLONE_THREAD", "THREAD_EXISTS"),
1039
+ domain: ErrorDomain.STORAGE,
1040
+ category: ErrorCategory.USER,
1041
+ text: `Thread with id ${newThreadId} already exists`,
1042
+ details: { newThreadId }
1043
+ });
1044
+ }
1045
+ try {
1046
+ const threadMessagesKey = getThreadMessagesKey(sourceThreadId);
1047
+ const messageIds = await this.client.zrange(threadMessagesKey, 0, -1);
1048
+ const pipeline = this.client.pipeline();
1049
+ for (const mid of messageIds) {
1050
+ pipeline.get(getMessageKey(sourceThreadId, mid));
1051
+ }
1052
+ const results = await pipeline.exec();
1053
+ let sourceMessages = results.filter((msg) => msg !== null).map((msg) => ({
1054
+ ...msg,
1055
+ createdAt: new Date(msg.createdAt)
1056
+ }));
1057
+ if (options?.messageFilter?.startDate || options?.messageFilter?.endDate) {
1058
+ sourceMessages = filterByDateRange(sourceMessages, (msg) => new Date(msg.createdAt), {
1059
+ start: options.messageFilter?.startDate,
1060
+ end: options.messageFilter?.endDate
1061
+ });
1062
+ }
1063
+ if (options?.messageFilter?.messageIds && options.messageFilter.messageIds.length > 0) {
1064
+ const messageIdSet = new Set(options.messageFilter.messageIds);
1065
+ sourceMessages = sourceMessages.filter((msg) => messageIdSet.has(msg.id));
1066
+ }
1067
+ sourceMessages.sort((a, b) => new Date(a.createdAt).getTime() - new Date(b.createdAt).getTime());
1068
+ if (options?.messageLimit && options.messageLimit > 0 && sourceMessages.length > options.messageLimit) {
1069
+ sourceMessages = sourceMessages.slice(-options.messageLimit);
1070
+ }
1071
+ const now = /* @__PURE__ */ new Date();
1072
+ const lastMessageId = sourceMessages.length > 0 ? sourceMessages[sourceMessages.length - 1].id : void 0;
1073
+ const cloneMetadata = {
1074
+ sourceThreadId,
1075
+ clonedAt: now,
1076
+ ...lastMessageId && { lastMessageId }
1077
+ };
1078
+ const newThread = {
1079
+ id: newThreadId,
1080
+ resourceId: resourceId || sourceThread.resourceId,
1081
+ title: title || (sourceThread.title ? `Clone of ${sourceThread.title}` : void 0),
1082
+ metadata: {
1083
+ ...metadata,
1084
+ clone: cloneMetadata
1085
+ },
1086
+ createdAt: now,
1087
+ updatedAt: now
1088
+ };
1089
+ const writePipeline = this.client.pipeline();
1090
+ const threadKey = getKey(TABLE_THREADS, { id: newThreadId });
1091
+ writePipeline.set(threadKey, processRecord(TABLE_THREADS, newThread).processedRecord);
1092
+ const clonedMessages = [];
1093
+ const targetResourceId = resourceId || sourceThread.resourceId;
1094
+ const newThreadMessagesKey = getThreadMessagesKey(newThreadId);
1095
+ for (let i = 0; i < sourceMessages.length; i++) {
1096
+ const sourceMsg = sourceMessages[i];
1097
+ const newMessageId = crypto.randomUUID();
1098
+ const { _index, ...restMsg } = sourceMsg;
1099
+ const newMessage = {
1100
+ ...restMsg,
1101
+ id: newMessageId,
1102
+ threadId: newThreadId,
1103
+ resourceId: targetResourceId
1104
+ };
1105
+ const messageKey = getMessageKey(newThreadId, newMessageId);
1106
+ writePipeline.set(messageKey, newMessage);
1107
+ writePipeline.set(getMessageIndexKey(newMessageId), newThreadId);
1108
+ writePipeline.zadd(newThreadMessagesKey, {
1109
+ score: i,
1110
+ member: newMessageId
1111
+ });
1112
+ clonedMessages.push(newMessage);
1113
+ }
1114
+ await writePipeline.exec();
1115
+ return {
1116
+ thread: newThread,
1117
+ clonedMessages
1118
+ };
1119
+ } catch (error) {
1120
+ if (error instanceof MastraError) {
1121
+ throw error;
1122
+ }
1123
+ throw new MastraError(
1124
+ {
1125
+ id: createStorageErrorId("UPSTASH", "CLONE_THREAD", "FAILED"),
1126
+ domain: ErrorDomain.STORAGE,
1127
+ category: ErrorCategory.THIRD_PARTY,
1128
+ details: { sourceThreadId, newThreadId }
1129
+ },
1130
+ error
1131
+ );
1132
+ }
1133
+ }
994
1134
  };
995
1135
  function transformScoreRow(row) {
996
1136
  return transformScoreRow$1(row);
@@ -1271,24 +1411,6 @@ var ScoresUpstash = class extends ScoresStorage {
1271
1411
  };
1272
1412
  }
1273
1413
  };
1274
- function parseWorkflowRun(row) {
1275
- let parsedSnapshot = row.snapshot;
1276
- if (typeof parsedSnapshot === "string") {
1277
- try {
1278
- parsedSnapshot = JSON.parse(row.snapshot);
1279
- } catch (e) {
1280
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1281
- }
1282
- }
1283
- return {
1284
- workflowName: row.workflow_name,
1285
- runId: row.run_id,
1286
- snapshot: parsedSnapshot,
1287
- createdAt: ensureDate(row.createdAt),
1288
- updatedAt: ensureDate(row.updatedAt),
1289
- resourceId: row.resourceId
1290
- };
1291
- }
1292
1414
  var WorkflowsUpstash = class extends WorkflowsStorage {
1293
1415
  client;
1294
1416
  #db;
@@ -1298,6 +1420,24 @@ var WorkflowsUpstash = class extends WorkflowsStorage {
1298
1420
  this.client = client;
1299
1421
  this.#db = new UpstashDB({ client });
1300
1422
  }
1423
+ parseWorkflowRun(row) {
1424
+ let parsedSnapshot = row.snapshot;
1425
+ if (typeof parsedSnapshot === "string") {
1426
+ try {
1427
+ parsedSnapshot = JSON.parse(row.snapshot);
1428
+ } catch (e) {
1429
+ this.logger.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1430
+ }
1431
+ }
1432
+ return {
1433
+ workflowName: row.workflow_name,
1434
+ runId: row.run_id,
1435
+ snapshot: parsedSnapshot,
1436
+ createdAt: ensureDate(row.createdAt),
1437
+ updatedAt: ensureDate(row.updatedAt),
1438
+ resourceId: row.resourceId
1439
+ };
1440
+ }
1301
1441
  async dangerouslyClearAll() {
1302
1442
  await this.#db.deleteData({ tableName: TABLE_WORKFLOW_SNAPSHOT });
1303
1443
  }
@@ -1463,7 +1603,7 @@ var WorkflowsUpstash = class extends WorkflowsStorage {
1463
1603
  );
1464
1604
  const data = workflows.find((w) => w?.run_id === runId && w?.workflow_name === workflowName);
1465
1605
  if (!data) return null;
1466
- return parseWorkflowRun(data);
1606
+ return this.parseWorkflowRun(data);
1467
1607
  } catch (error) {
1468
1608
  throw new MastraError(
1469
1609
  {
@@ -1548,7 +1688,7 @@ var WorkflowsUpstash = class extends WorkflowsStorage {
1548
1688
  const results = await pipeline.exec();
1549
1689
  let runs = results.map((result) => result).filter(
1550
1690
  (record) => record !== null && record !== void 0 && typeof record === "object" && "workflow_name" in record
1551
- ).filter((record) => !workflowName || record.workflow_name === workflowName).map((w) => parseWorkflowRun(w)).filter((w) => {
1691
+ ).filter((record) => !workflowName || record.workflow_name === workflowName).map((w) => this.parseWorkflowRun(w)).filter((w) => {
1552
1692
  if (fromDate && w.createdAt < fromDate) return false;
1553
1693
  if (toDate && w.createdAt > toDate) return false;
1554
1694
  if (status) {
@@ -1557,7 +1697,7 @@ var WorkflowsUpstash = class extends WorkflowsStorage {
1557
1697
  try {
1558
1698
  snapshot = JSON.parse(snapshot);
1559
1699
  } catch (e) {
1560
- console.warn(`Failed to parse snapshot for workflow ${w.workflowName}: ${e}`);
1700
+ this.logger.warn(`Failed to parse snapshot for workflow ${w.workflowName}: ${e}`);
1561
1701
  return false;
1562
1702
  }
1563
1703
  }
@@ -1595,7 +1735,7 @@ var WorkflowsUpstash = class extends WorkflowsStorage {
1595
1735
  var isClientConfig = (config) => {
1596
1736
  return "client" in config;
1597
1737
  };
1598
- var UpstashStore = class extends MastraStorage {
1738
+ var UpstashStore = class extends MastraCompositeStore {
1599
1739
  redis;
1600
1740
  stores;
1601
1741
  constructor(config) {