@mastra/upstash 1.0.0-beta.10 → 1.0.0-beta.12

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/index.js CHANGED
@@ -190,19 +190,34 @@ var StoreMemoryUpstash = class extends MemoryStorage {
190
190
  );
191
191
  }
192
192
  }
193
- async listThreadsByResourceId(args) {
194
- const { resourceId, page = 0, perPage: perPageInput, orderBy } = args;
193
+ async listThreads(args) {
194
+ const { page = 0, perPage: perPageInput, orderBy, filter } = args;
195
195
  const { field, direction } = this.parseOrderBy(orderBy);
196
+ try {
197
+ this.validatePaginationInput(page, perPageInput ?? 100);
198
+ } catch (error) {
199
+ throw new MastraError(
200
+ {
201
+ id: createStorageErrorId("UPSTASH", "LIST_THREADS", "INVALID_PAGE"),
202
+ domain: ErrorDomain.STORAGE,
203
+ category: ErrorCategory.USER,
204
+ details: { page, ...perPageInput !== void 0 && { perPage: perPageInput } }
205
+ },
206
+ error instanceof Error ? error : new Error("Invalid pagination parameters")
207
+ );
208
+ }
196
209
  const perPage = normalizePerPage(perPageInput, 100);
197
- if (page < 0) {
210
+ try {
211
+ this.validateMetadataKeys(filter?.metadata);
212
+ } catch (error) {
198
213
  throw new MastraError(
199
214
  {
200
- id: createStorageErrorId("UPSTASH", "LIST_THREADS_BY_RESOURCE_ID", "INVALID_PAGE"),
215
+ id: createStorageErrorId("UPSTASH", "LIST_THREADS", "INVALID_METADATA_KEY"),
201
216
  domain: ErrorDomain.STORAGE,
202
217
  category: ErrorCategory.USER,
203
- details: { page }
218
+ details: { metadataKeys: filter?.metadata ? Object.keys(filter.metadata).join(", ") : "" }
204
219
  },
205
- new Error("page must be >= 0")
220
+ error instanceof Error ? error : new Error("Invalid metadata key")
206
221
  );
207
222
  }
208
223
  const { offset, perPage: perPageForResponse } = calculatePagination(page, perPageInput, perPage);
@@ -210,19 +225,35 @@ var StoreMemoryUpstash = class extends MemoryStorage {
210
225
  let allThreads = [];
211
226
  const pattern = `${TABLE_THREADS}:*`;
212
227
  const keys = await this.#db.scanKeys(pattern);
228
+ if (keys.length === 0) {
229
+ return {
230
+ threads: [],
231
+ total: 0,
232
+ page,
233
+ perPage: perPageForResponse,
234
+ hasMore: false
235
+ };
236
+ }
213
237
  const pipeline = this.client.pipeline();
214
238
  keys.forEach((key) => pipeline.get(key));
215
239
  const results = await pipeline.exec();
216
240
  for (let i = 0; i < results.length; i++) {
217
241
  const thread = results[i];
218
- if (thread && thread.resourceId === resourceId) {
219
- allThreads.push({
220
- ...thread,
221
- createdAt: ensureDate(thread.createdAt),
222
- updatedAt: ensureDate(thread.updatedAt),
223
- metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
224
- });
242
+ if (!thread) continue;
243
+ if (filter?.resourceId && thread.resourceId !== filter.resourceId) {
244
+ continue;
245
+ }
246
+ if (filter?.metadata && Object.keys(filter.metadata).length > 0) {
247
+ const threadMetadata = typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata;
248
+ const matches = Object.entries(filter.metadata).every(([key, value]) => threadMetadata?.[key] === value);
249
+ if (!matches) continue;
225
250
  }
251
+ allThreads.push({
252
+ ...thread,
253
+ createdAt: ensureDate(thread.createdAt),
254
+ updatedAt: ensureDate(thread.updatedAt),
255
+ metadata: typeof thread.metadata === "string" ? JSON.parse(thread.metadata) : thread.metadata
256
+ });
226
257
  }
227
258
  const sortedThreads = this.sortThreads(allThreads, field, direction);
228
259
  const total = sortedThreads.length;
@@ -239,11 +270,12 @@ var StoreMemoryUpstash = class extends MemoryStorage {
239
270
  } catch (error) {
240
271
  const mastraError = new MastraError(
241
272
  {
242
- id: createStorageErrorId("UPSTASH", "LIST_THREADS_BY_RESOURCE_ID", "FAILED"),
273
+ id: createStorageErrorId("UPSTASH", "LIST_THREADS", "FAILED"),
243
274
  domain: ErrorDomain.STORAGE,
244
275
  category: ErrorCategory.THIRD_PARTY,
245
276
  details: {
246
- resourceId,
277
+ ...filter?.resourceId && { resourceId: filter.resourceId },
278
+ hasMetadataFilter: !!filter?.metadata,
247
279
  page,
248
280
  perPage
249
281
  }
@@ -1379,24 +1411,6 @@ var ScoresUpstash = class extends ScoresStorage {
1379
1411
  };
1380
1412
  }
1381
1413
  };
1382
- function parseWorkflowRun(row) {
1383
- let parsedSnapshot = row.snapshot;
1384
- if (typeof parsedSnapshot === "string") {
1385
- try {
1386
- parsedSnapshot = JSON.parse(row.snapshot);
1387
- } catch (e) {
1388
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1389
- }
1390
- }
1391
- return {
1392
- workflowName: row.workflow_name,
1393
- runId: row.run_id,
1394
- snapshot: parsedSnapshot,
1395
- createdAt: ensureDate(row.createdAt),
1396
- updatedAt: ensureDate(row.updatedAt),
1397
- resourceId: row.resourceId
1398
- };
1399
- }
1400
1414
  var WorkflowsUpstash = class extends WorkflowsStorage {
1401
1415
  client;
1402
1416
  #db;
@@ -1406,6 +1420,24 @@ var WorkflowsUpstash = class extends WorkflowsStorage {
1406
1420
  this.client = client;
1407
1421
  this.#db = new UpstashDB({ client });
1408
1422
  }
1423
+ parseWorkflowRun(row) {
1424
+ let parsedSnapshot = row.snapshot;
1425
+ if (typeof parsedSnapshot === "string") {
1426
+ try {
1427
+ parsedSnapshot = JSON.parse(row.snapshot);
1428
+ } catch (e) {
1429
+ this.logger.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
1430
+ }
1431
+ }
1432
+ return {
1433
+ workflowName: row.workflow_name,
1434
+ runId: row.run_id,
1435
+ snapshot: parsedSnapshot,
1436
+ createdAt: ensureDate(row.createdAt),
1437
+ updatedAt: ensureDate(row.updatedAt),
1438
+ resourceId: row.resourceId
1439
+ };
1440
+ }
1409
1441
  async dangerouslyClearAll() {
1410
1442
  await this.#db.deleteData({ tableName: TABLE_WORKFLOW_SNAPSHOT });
1411
1443
  }
@@ -1571,7 +1603,7 @@ var WorkflowsUpstash = class extends WorkflowsStorage {
1571
1603
  );
1572
1604
  const data = workflows.find((w) => w?.run_id === runId && w?.workflow_name === workflowName);
1573
1605
  if (!data) return null;
1574
- return parseWorkflowRun(data);
1606
+ return this.parseWorkflowRun(data);
1575
1607
  } catch (error) {
1576
1608
  throw new MastraError(
1577
1609
  {
@@ -1656,7 +1688,7 @@ var WorkflowsUpstash = class extends WorkflowsStorage {
1656
1688
  const results = await pipeline.exec();
1657
1689
  let runs = results.map((result) => result).filter(
1658
1690
  (record) => record !== null && record !== void 0 && typeof record === "object" && "workflow_name" in record
1659
- ).filter((record) => !workflowName || record.workflow_name === workflowName).map((w) => parseWorkflowRun(w)).filter((w) => {
1691
+ ).filter((record) => !workflowName || record.workflow_name === workflowName).map((w) => this.parseWorkflowRun(w)).filter((w) => {
1660
1692
  if (fromDate && w.createdAt < fromDate) return false;
1661
1693
  if (toDate && w.createdAt > toDate) return false;
1662
1694
  if (status) {
@@ -1665,7 +1697,7 @@ var WorkflowsUpstash = class extends WorkflowsStorage {
1665
1697
  try {
1666
1698
  snapshot = JSON.parse(snapshot);
1667
1699
  } catch (e) {
1668
- console.warn(`Failed to parse snapshot for workflow ${w.workflowName}: ${e}`);
1700
+ this.logger.warn(`Failed to parse snapshot for workflow ${w.workflowName}: ${e}`);
1669
1701
  return false;
1670
1702
  }
1671
1703
  }