@mastra/upstash 0.10.0 → 0.10.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,23 +1,23 @@
1
1
 
2
- > @mastra/upstash@0.10.0-alpha.1 build /home/runner/work/mastra/mastra/stores/upstash
2
+ > @mastra/upstash@0.10.1-alpha.0 build /home/runner/work/mastra/mastra/stores/upstash
3
3
  > tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting
4
4
 
5
5
  CLI Building entry: src/index.ts
6
6
  CLI Using tsconfig: tsconfig.json
7
7
  CLI tsup v8.4.0
8
8
  TSC Build start
9
- TSC ⚡️ Build success in 8981ms
9
+ TSC ⚡️ Build success in 8661ms
10
10
  DTS Build start
11
11
  CLI Target: es2022
12
12
  Analysis will use the bundled TypeScript version 5.8.3
13
13
  Writing package typings: /home/runner/work/mastra/mastra/stores/upstash/dist/_tsup-dts-rollup.d.ts
14
14
  Analysis will use the bundled TypeScript version 5.8.3
15
15
  Writing package typings: /home/runner/work/mastra/mastra/stores/upstash/dist/_tsup-dts-rollup.d.cts
16
- DTS ⚡️ Build success in 10676ms
16
+ DTS ⚡️ Build success in 12455ms
17
17
  CLI Cleaning output folder
18
18
  ESM Build start
19
19
  CJS Build start
20
- ESM dist/index.js 30.09 KB
21
- ESM ⚡️ Build success in 1208ms
22
- CJS dist/index.cjs 30.23 KB
23
- CJS ⚡️ Build success in 1217ms
20
+ ESM dist/index.js 31.90 KB
21
+ ESM ⚡️ Build success in 970ms
22
+ CJS dist/index.cjs 32.04 KB
23
+ CJS ⚡️ Build success in 980ms
package/CHANGELOG.md CHANGED
@@ -1,5 +1,31 @@
1
1
  # @mastra/upstash
2
2
 
3
+ ## 0.10.1
4
+
5
+ ### Patch Changes
6
+
7
+ - e60402a: Use scan instead of keys for upstash redis and implement batchInsert
8
+ - Updated dependencies [d70b807]
9
+ - Updated dependencies [6d16390]
10
+ - Updated dependencies [1e4a421]
11
+ - Updated dependencies [200d0da]
12
+ - Updated dependencies [bf5f17b]
13
+ - Updated dependencies [5343f93]
14
+ - Updated dependencies [38aee50]
15
+ - Updated dependencies [5c41100]
16
+ - Updated dependencies [d6a759b]
17
+ - Updated dependencies [6015bdf]
18
+ - @mastra/core@0.10.1
19
+
20
+ ## 0.10.1-alpha.0
21
+
22
+ ### Patch Changes
23
+
24
+ - e60402a: Use scan instead of keys for upstash redis and implement batchInsert
25
+ - Updated dependencies [6d16390]
26
+ - Updated dependencies [1e4a421]
27
+ - @mastra/core@0.10.1-alpha.0
28
+
3
29
  ## 0.10.0
4
30
 
5
31
  ### Minor Changes
@@ -52,12 +52,30 @@ export declare class UpstashFilterTranslator extends BaseFilterTranslator {
52
52
  }
53
53
 
54
54
  declare class UpstashStore extends MastraStorage {
55
- batchInsert(_input: {
56
- tableName: TABLE_NAMES;
57
- records: Record<string, any>[];
58
- }): Promise<void>;
59
- getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]>;
55
+ private redis;
56
+ constructor(config: UpstashConfig);
60
57
  private transformEvalRecord;
58
+ private parseJSON;
59
+ private getKey;
60
+ private ensureDate;
61
+ private serializeDate;
62
+ /**
63
+ * Scans for keys matching the given pattern using SCAN and returns them as an array.
64
+ * @param pattern Redis key pattern, e.g. "table:*"
65
+ * @param batchSize Number of keys to scan per batch (default: 1000)
66
+ */
67
+ private scanKeys;
68
+ /**
69
+ * Deletes all keys matching the given pattern using SCAN and DEL in batches.
70
+ * @param pattern Redis key pattern, e.g. "table:*"
71
+ * @param batchSize Number of keys to delete per batch (default: 1000)
72
+ */
73
+ private scanAndDelete;
74
+ private getMessageKey;
75
+ private getThreadMessagesKey;
76
+ private parseWorkflowRun;
77
+ private processRecord;
78
+ getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]>;
61
79
  getTraces({ name, scope, page, perPage, attributes, filters, fromDate, toDate, }?: {
62
80
  name?: string;
63
81
  scope?: string;
@@ -68,12 +86,6 @@ declare class UpstashStore extends MastraStorage {
68
86
  fromDate?: Date;
69
87
  toDate?: Date;
70
88
  }): Promise<any[]>;
71
- private parseJSON;
72
- private redis;
73
- constructor(config: UpstashConfig);
74
- private getKey;
75
- private ensureDate;
76
- private serializeDate;
77
89
  createTable({ tableName, schema, }: {
78
90
  tableName: TABLE_NAMES;
79
91
  schema: Record<string, StorageColumn>;
@@ -85,6 +97,10 @@ declare class UpstashStore extends MastraStorage {
85
97
  tableName: TABLE_NAMES;
86
98
  record: Record<string, any>;
87
99
  }): Promise<void>;
100
+ batchInsert(input: {
101
+ tableName: TABLE_NAMES;
102
+ records: Record<string, any>[];
103
+ }): Promise<void>;
88
104
  load<R>({ tableName, keys }: {
89
105
  tableName: TABLE_NAMES;
90
106
  keys: Record<string, string>;
@@ -106,8 +122,6 @@ declare class UpstashStore extends MastraStorage {
106
122
  deleteThread({ threadId }: {
107
123
  threadId: string;
108
124
  }): Promise<void>;
109
- private getMessageKey;
110
- private getThreadMessagesKey;
111
125
  saveMessages({ messages }: {
112
126
  messages: MessageType[];
113
127
  }): Promise<MessageType[]>;
@@ -123,7 +137,6 @@ declare class UpstashStore extends MastraStorage {
123
137
  workflowName: string;
124
138
  runId: string;
125
139
  }): Promise<WorkflowRunState | null>;
126
- private parseWorkflowRun;
127
140
  getWorkflowRuns({ namespace, workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
128
141
  namespace: string;
129
142
  workflowName?: string;
@@ -52,12 +52,30 @@ export declare class UpstashFilterTranslator extends BaseFilterTranslator {
52
52
  }
53
53
 
54
54
  declare class UpstashStore extends MastraStorage {
55
- batchInsert(_input: {
56
- tableName: TABLE_NAMES;
57
- records: Record<string, any>[];
58
- }): Promise<void>;
59
- getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]>;
55
+ private redis;
56
+ constructor(config: UpstashConfig);
60
57
  private transformEvalRecord;
58
+ private parseJSON;
59
+ private getKey;
60
+ private ensureDate;
61
+ private serializeDate;
62
+ /**
63
+ * Scans for keys matching the given pattern using SCAN and returns them as an array.
64
+ * @param pattern Redis key pattern, e.g. "table:*"
65
+ * @param batchSize Number of keys to scan per batch (default: 1000)
66
+ */
67
+ private scanKeys;
68
+ /**
69
+ * Deletes all keys matching the given pattern using SCAN and DEL in batches.
70
+ * @param pattern Redis key pattern, e.g. "table:*"
71
+ * @param batchSize Number of keys to delete per batch (default: 1000)
72
+ */
73
+ private scanAndDelete;
74
+ private getMessageKey;
75
+ private getThreadMessagesKey;
76
+ private parseWorkflowRun;
77
+ private processRecord;
78
+ getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]>;
61
79
  getTraces({ name, scope, page, perPage, attributes, filters, fromDate, toDate, }?: {
62
80
  name?: string;
63
81
  scope?: string;
@@ -68,12 +86,6 @@ declare class UpstashStore extends MastraStorage {
68
86
  fromDate?: Date;
69
87
  toDate?: Date;
70
88
  }): Promise<any[]>;
71
- private parseJSON;
72
- private redis;
73
- constructor(config: UpstashConfig);
74
- private getKey;
75
- private ensureDate;
76
- private serializeDate;
77
89
  createTable({ tableName, schema, }: {
78
90
  tableName: TABLE_NAMES;
79
91
  schema: Record<string, StorageColumn>;
@@ -85,6 +97,10 @@ declare class UpstashStore extends MastraStorage {
85
97
  tableName: TABLE_NAMES;
86
98
  record: Record<string, any>;
87
99
  }): Promise<void>;
100
+ batchInsert(input: {
101
+ tableName: TABLE_NAMES;
102
+ records: Record<string, any>[];
103
+ }): Promise<void>;
88
104
  load<R>({ tableName, keys }: {
89
105
  tableName: TABLE_NAMES;
90
106
  keys: Record<string, string>;
@@ -106,8 +122,6 @@ declare class UpstashStore extends MastraStorage {
106
122
  deleteThread({ threadId }: {
107
123
  threadId: string;
108
124
  }): Promise<void>;
109
- private getMessageKey;
110
- private getThreadMessagesKey;
111
125
  saveMessages({ messages }: {
112
126
  messages: MessageType[];
113
127
  }): Promise<MessageType[]>;
@@ -123,7 +137,6 @@ declare class UpstashStore extends MastraStorage {
123
137
  workflowName: string;
124
138
  runId: string;
125
139
  }): Promise<WorkflowRunState | null>;
126
- private parseWorkflowRun;
127
140
  getWorkflowRuns({ namespace, workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
128
141
  namespace: string;
129
142
  workflowName?: string;
package/dist/index.cjs CHANGED
@@ -8,13 +8,157 @@ var filter = require('@mastra/core/vector/filter');
8
8
 
9
9
  // src/storage/index.ts
10
10
  var UpstashStore = class extends storage.MastraStorage {
11
- batchInsert(_input) {
12
- throw new Error("Method not implemented.");
11
+ redis;
12
+ constructor(config) {
13
+ super({ name: "Upstash" });
14
+ this.redis = new redis.Redis({
15
+ url: config.url,
16
+ token: config.token
17
+ });
18
+ }
19
+ transformEvalRecord(record) {
20
+ let result = record.result;
21
+ if (typeof result === "string") {
22
+ try {
23
+ result = JSON.parse(result);
24
+ } catch {
25
+ console.warn("Failed to parse result JSON:");
26
+ }
27
+ }
28
+ let testInfo = record.test_info;
29
+ if (typeof testInfo === "string") {
30
+ try {
31
+ testInfo = JSON.parse(testInfo);
32
+ } catch {
33
+ console.warn("Failed to parse test_info JSON:");
34
+ }
35
+ }
36
+ return {
37
+ agentName: record.agent_name,
38
+ input: record.input,
39
+ output: record.output,
40
+ result,
41
+ metricName: record.metric_name,
42
+ instructions: record.instructions,
43
+ testInfo,
44
+ globalRunId: record.global_run_id,
45
+ runId: record.run_id,
46
+ createdAt: typeof record.created_at === "string" ? record.created_at : record.created_at instanceof Date ? record.created_at.toISOString() : (/* @__PURE__ */ new Date()).toISOString()
47
+ };
48
+ }
49
+ parseJSON(value) {
50
+ if (typeof value === "string") {
51
+ try {
52
+ return JSON.parse(value);
53
+ } catch {
54
+ return value;
55
+ }
56
+ }
57
+ return value;
58
+ }
59
+ getKey(tableName, keys) {
60
+ const keyParts = Object.entries(keys).filter(([_, value]) => value !== void 0).map(([key, value]) => `${key}:${value}`);
61
+ return `${tableName}:${keyParts.join(":")}`;
62
+ }
63
+ ensureDate(date) {
64
+ if (!date) return void 0;
65
+ return date instanceof Date ? date : new Date(date);
66
+ }
67
+ serializeDate(date) {
68
+ if (!date) return void 0;
69
+ const dateObj = this.ensureDate(date);
70
+ return dateObj?.toISOString();
71
+ }
72
+ /**
73
+ * Scans for keys matching the given pattern using SCAN and returns them as an array.
74
+ * @param pattern Redis key pattern, e.g. "table:*"
75
+ * @param batchSize Number of keys to scan per batch (default: 1000)
76
+ */
77
+ async scanKeys(pattern, batchSize = 1e4) {
78
+ let cursor = "0";
79
+ let keys = [];
80
+ do {
81
+ const [nextCursor, batch] = await this.redis.scan(cursor, {
82
+ match: pattern,
83
+ count: batchSize
84
+ });
85
+ keys.push(...batch);
86
+ cursor = nextCursor;
87
+ } while (cursor !== "0");
88
+ return keys;
89
+ }
90
+ /**
91
+ * Deletes all keys matching the given pattern using SCAN and DEL in batches.
92
+ * @param pattern Redis key pattern, e.g. "table:*"
93
+ * @param batchSize Number of keys to delete per batch (default: 1000)
94
+ */
95
+ async scanAndDelete(pattern, batchSize = 1e4) {
96
+ let cursor = "0";
97
+ let totalDeleted = 0;
98
+ do {
99
+ const [nextCursor, keys] = await this.redis.scan(cursor, {
100
+ match: pattern,
101
+ count: batchSize
102
+ });
103
+ if (keys.length > 0) {
104
+ await this.redis.del(...keys);
105
+ totalDeleted += keys.length;
106
+ }
107
+ cursor = nextCursor;
108
+ } while (cursor !== "0");
109
+ return totalDeleted;
110
+ }
111
+ getMessageKey(threadId, messageId) {
112
+ return this.getKey(storage.TABLE_MESSAGES, { threadId, id: messageId });
113
+ }
114
+ getThreadMessagesKey(threadId) {
115
+ return `thread:${threadId}:messages`;
116
+ }
117
+ parseWorkflowRun(row) {
118
+ let parsedSnapshot = row.snapshot;
119
+ if (typeof parsedSnapshot === "string") {
120
+ try {
121
+ parsedSnapshot = JSON.parse(row.snapshot);
122
+ } catch (e) {
123
+ console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
124
+ }
125
+ }
126
+ return {
127
+ workflowName: row.workflow_name,
128
+ runId: row.run_id,
129
+ snapshot: parsedSnapshot,
130
+ createdAt: this.ensureDate(row.createdAt),
131
+ updatedAt: this.ensureDate(row.updatedAt),
132
+ resourceId: row.resourceId
133
+ };
134
+ }
135
+ processRecord(tableName, record) {
136
+ let key;
137
+ if (tableName === storage.TABLE_MESSAGES) {
138
+ key = this.getKey(tableName, { threadId: record.threadId, id: record.id });
139
+ } else if (tableName === storage.TABLE_WORKFLOW_SNAPSHOT) {
140
+ key = this.getKey(tableName, {
141
+ namespace: record.namespace || "workflows",
142
+ workflow_name: record.workflow_name,
143
+ run_id: record.run_id,
144
+ ...record.resourceId ? { resourceId: record.resourceId } : {}
145
+ });
146
+ } else if (tableName === storage.TABLE_EVALS) {
147
+ key = this.getKey(tableName, { id: record.run_id });
148
+ } else {
149
+ key = this.getKey(tableName, { id: record.id });
150
+ }
151
+ const processedRecord = {
152
+ ...record,
153
+ createdAt: this.serializeDate(record.createdAt),
154
+ updatedAt: this.serializeDate(record.updatedAt)
155
+ };
156
+ return { key, processedRecord };
13
157
  }
14
158
  async getEvalsByAgentName(agentName, type) {
15
159
  try {
16
160
  const pattern = `${storage.TABLE_EVALS}:*`;
17
- const keys = await this.redis.keys(pattern);
161
+ const keys = await this.scanKeys(pattern);
18
162
  const evalRecords = await Promise.all(
19
163
  keys.map(async (key) => {
20
164
  const data = await this.redis.get(key);
@@ -58,36 +202,6 @@ var UpstashStore = class extends storage.MastraStorage {
58
202
  return [];
59
203
  }
60
204
  }
61
- transformEvalRecord(record) {
62
- let result = record.result;
63
- if (typeof result === "string") {
64
- try {
65
- result = JSON.parse(result);
66
- } catch {
67
- console.warn("Failed to parse result JSON:");
68
- }
69
- }
70
- let testInfo = record.test_info;
71
- if (typeof testInfo === "string") {
72
- try {
73
- testInfo = JSON.parse(testInfo);
74
- } catch {
75
- console.warn("Failed to parse test_info JSON:");
76
- }
77
- }
78
- return {
79
- agentName: record.agent_name,
80
- input: record.input,
81
- output: record.output,
82
- result,
83
- metricName: record.metric_name,
84
- instructions: record.instructions,
85
- testInfo,
86
- globalRunId: record.global_run_id,
87
- runId: record.run_id,
88
- createdAt: typeof record.created_at === "string" ? record.created_at : record.created_at instanceof Date ? record.created_at.toISOString() : (/* @__PURE__ */ new Date()).toISOString()
89
- };
90
- }
91
205
  async getTraces({
92
206
  name,
93
207
  scope,
@@ -103,7 +217,7 @@ var UpstashStore = class extends storage.MastraStorage {
103
217
  }) {
104
218
  try {
105
219
  const pattern = `${storage.TABLE_TRACES}:*`;
106
- const keys = await this.redis.keys(pattern);
220
+ const keys = await this.scanKeys(pattern);
107
221
  const traceRecords = await Promise.all(
108
222
  keys.map(async (key) => {
109
223
  const data = await this.redis.get(key);
@@ -167,37 +281,6 @@ var UpstashStore = class extends storage.MastraStorage {
167
281
  return [];
168
282
  }
169
283
  }
170
- parseJSON(value) {
171
- if (typeof value === "string") {
172
- try {
173
- return JSON.parse(value);
174
- } catch {
175
- return value;
176
- }
177
- }
178
- return value;
179
- }
180
- redis;
181
- constructor(config) {
182
- super({ name: "Upstash" });
183
- this.redis = new redis.Redis({
184
- url: config.url,
185
- token: config.token
186
- });
187
- }
188
- getKey(tableName, keys) {
189
- const keyParts = Object.entries(keys).filter(([_, value]) => value !== void 0).map(([key, value]) => `${key}:${value}`);
190
- return `${tableName}:${keyParts.join(":")}`;
191
- }
192
- ensureDate(date) {
193
- if (!date) return void 0;
194
- return date instanceof Date ? date : new Date(date);
195
- }
196
- serializeDate(date) {
197
- if (!date) return void 0;
198
- const dateObj = this.ensureDate(date);
199
- return dateObj?.toISOString();
200
- }
201
284
  async createTable({
202
285
  tableName,
203
286
  schema
@@ -206,34 +289,26 @@ var UpstashStore = class extends storage.MastraStorage {
206
289
  }
207
290
  async clearTable({ tableName }) {
208
291
  const pattern = `${tableName}:*`;
209
- const keys = await this.redis.keys(pattern);
210
- if (keys.length > 0) {
211
- await this.redis.del(...keys);
212
- }
292
+ await this.scanAndDelete(pattern);
213
293
  }
214
294
  async insert({ tableName, record }) {
215
- let key;
216
- if (tableName === storage.TABLE_MESSAGES) {
217
- key = this.getKey(tableName, { threadId: record.threadId, id: record.id });
218
- } else if (tableName === storage.TABLE_WORKFLOW_SNAPSHOT) {
219
- key = this.getKey(tableName, {
220
- namespace: record.namespace || "workflows",
221
- workflow_name: record.workflow_name,
222
- run_id: record.run_id,
223
- ...record.resourceId ? { resourceId: record.resourceId } : {}
224
- });
225
- } else if (tableName === storage.TABLE_EVALS) {
226
- key = this.getKey(tableName, { id: record.run_id });
227
- } else {
228
- key = this.getKey(tableName, { id: record.id });
229
- }
230
- const processedRecord = {
231
- ...record,
232
- createdAt: this.serializeDate(record.createdAt),
233
- updatedAt: this.serializeDate(record.updatedAt)
234
- };
295
+ const { key, processedRecord } = this.processRecord(tableName, record);
235
296
  await this.redis.set(key, processedRecord);
236
297
  }
298
+ async batchInsert(input) {
299
+ const { tableName, records } = input;
300
+ if (!records.length) return;
301
+ const batchSize = 1e3;
302
+ for (let i = 0; i < records.length; i += batchSize) {
303
+ const batch = records.slice(i, i + batchSize);
304
+ const pipeline = this.redis.pipeline();
305
+ for (const record of batch) {
306
+ const { key, processedRecord } = this.processRecord(tableName, record);
307
+ pipeline.set(key, processedRecord);
308
+ }
309
+ await pipeline.exec();
310
+ }
311
+ }
237
312
  async load({ tableName, keys }) {
238
313
  const key = this.getKey(tableName, keys);
239
314
  const data = await this.redis.get(key);
@@ -254,7 +329,7 @@ var UpstashStore = class extends storage.MastraStorage {
254
329
  }
255
330
  async getThreadsByResourceId({ resourceId }) {
256
331
  const pattern = `${storage.TABLE_THREADS}:*`;
257
- const keys = await this.redis.keys(pattern);
332
+ const keys = await this.scanKeys(pattern);
258
333
  const threads = await Promise.all(
259
334
  keys.map(async (key) => {
260
335
  const data = await this.redis.get(key);
@@ -299,29 +374,27 @@ var UpstashStore = class extends storage.MastraStorage {
299
374
  const key = this.getKey(storage.TABLE_THREADS, { id: threadId });
300
375
  await this.redis.del(key);
301
376
  }
302
- getMessageKey(threadId, messageId) {
303
- return this.getKey(storage.TABLE_MESSAGES, { threadId, id: messageId });
304
- }
305
- getThreadMessagesKey(threadId) {
306
- return `thread:${threadId}:messages`;
307
- }
308
377
  async saveMessages({ messages }) {
309
378
  if (messages.length === 0) return [];
310
- const pipeline = this.redis.pipeline();
311
379
  const messagesWithIndex = messages.map((message, index) => ({
312
380
  ...message,
313
381
  _index: index
314
382
  }));
315
- for (const message of messagesWithIndex) {
316
- const key = this.getMessageKey(message.threadId, message.id);
317
- const score = message._index !== void 0 ? message._index : new Date(message.createdAt).getTime();
318
- pipeline.set(key, message);
319
- pipeline.zadd(this.getThreadMessagesKey(message.threadId), {
320
- score,
321
- member: message.id
322
- });
383
+ const batchSize = 1e3;
384
+ for (let i = 0; i < messagesWithIndex.length; i += batchSize) {
385
+ const batch = messagesWithIndex.slice(i, i + batchSize);
386
+ const pipeline = this.redis.pipeline();
387
+ for (const message of batch) {
388
+ const key = this.getMessageKey(message.threadId, message.id);
389
+ const score = message._index !== void 0 ? message._index : new Date(message.createdAt).getTime();
390
+ pipeline.set(key, message);
391
+ pipeline.zadd(this.getThreadMessagesKey(message.threadId), {
392
+ score,
393
+ member: message.id
394
+ });
395
+ }
396
+ await pipeline.exec();
323
397
  }
324
- await pipeline.exec();
325
398
  return messages;
326
399
  }
327
400
  async getMessages({ threadId, selectBy }) {
@@ -385,24 +458,6 @@ var UpstashStore = class extends storage.MastraStorage {
385
458
  if (!data) return null;
386
459
  return data.snapshot;
387
460
  }
388
- parseWorkflowRun(row) {
389
- let parsedSnapshot = row.snapshot;
390
- if (typeof parsedSnapshot === "string") {
391
- try {
392
- parsedSnapshot = JSON.parse(row.snapshot);
393
- } catch (e) {
394
- console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
395
- }
396
- }
397
- return {
398
- workflowName: row.workflow_name,
399
- runId: row.run_id,
400
- snapshot: parsedSnapshot,
401
- createdAt: this.ensureDate(row.createdAt),
402
- updatedAt: this.ensureDate(row.updatedAt),
403
- resourceId: row.resourceId
404
- };
405
- }
406
461
  async getWorkflowRuns({
407
462
  namespace,
408
463
  workflowName,
@@ -426,7 +481,7 @@ var UpstashStore = class extends storage.MastraStorage {
426
481
  } else if (resourceId) {
427
482
  pattern = this.getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace, workflow_name: "*", run_id: "*", resourceId });
428
483
  }
429
- const keys = await this.redis.keys(pattern);
484
+ const keys = await this.scanKeys(pattern);
430
485
  const workflows = await Promise.all(
431
486
  keys.map(async (key) => {
432
487
  const data = await this.redis.get(key);
@@ -455,7 +510,7 @@ var UpstashStore = class extends storage.MastraStorage {
455
510
  }) {
456
511
  try {
457
512
  const key = this.getKey(storage.TABLE_WORKFLOW_SNAPSHOT, { namespace, workflow_name: workflowName, run_id: runId }) + "*";
458
- const keys = await this.redis.keys(key);
513
+ const keys = await this.scanKeys(key);
459
514
  const workflows = await Promise.all(
460
515
  keys.map(async (key2) => {
461
516
  const data2 = await this.redis.get(key2);