@mastra/upstash 0.0.0-vnextWorkflows-20250422142014 → 0.0.0-workflow-deno-20250616130925
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +39 -0
- package/CHANGELOG.md +563 -2
- package/PAGINATION.md +397 -0
- package/dist/_tsup-dts-rollup.d.cts +150 -48
- package/dist/_tsup-dts-rollup.d.ts +150 -48
- package/dist/chunk-HSTZWXH7.js +1666 -0
- package/dist/chunk-IGKEDEDE.js +12 -0
- package/dist/chunk-N2CPQVE3.cjs +35 -0
- package/dist/chunk-U74OJRHU.cjs +1678 -0
- package/dist/getMachineId-bsd-HDZ73WR7.cjs +30 -0
- package/dist/getMachineId-bsd-KKIDU47O.js +28 -0
- package/dist/getMachineId-darwin-3PL23DL6.cjs +31 -0
- package/dist/getMachineId-darwin-UTKBTJ2U.js +29 -0
- package/dist/getMachineId-linux-K3QXQYAB.js +23 -0
- package/dist/getMachineId-linux-KYLPK3HC.cjs +25 -0
- package/dist/getMachineId-unsupported-DEDJN4ZS.cjs +17 -0
- package/dist/getMachineId-unsupported-VPWBQCK7.js +15 -0
- package/dist/getMachineId-win-L2EYIM5A.js +30 -0
- package/dist/getMachineId-win-ZTI2LRDJ.cjs +52 -0
- package/dist/index.cjs +44133 -247
- package/dist/index.d.cts +1 -0
- package/dist/index.d.ts +1 -0
- package/dist/index.js +44134 -249
- package/docker-compose.yaml +3 -3
- package/package.json +16 -12
- package/src/index.ts +1 -0
- package/src/storage/index.ts +872 -304
- package/src/storage/upstash.test.ts +729 -110
- package/src/vector/index.test.ts +23 -105
- package/src/vector/index.ts +72 -50
- package/src/vector/prompt.ts +77 -0
package/src/storage/index.ts
CHANGED
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import type { MetricResult, TestInfo } from '@mastra/core/eval';
|
|
2
|
-
import type { StorageThreadType,
|
|
2
|
+
import type { StorageThreadType, MastraMessageV1, MastraMessageV2 } from '@mastra/core/memory';
|
|
3
3
|
import {
|
|
4
4
|
MastraStorage,
|
|
5
5
|
TABLE_MESSAGES,
|
|
@@ -8,9 +8,20 @@ import {
|
|
|
8
8
|
TABLE_EVALS,
|
|
9
9
|
TABLE_TRACES,
|
|
10
10
|
} from '@mastra/core/storage';
|
|
11
|
-
import type {
|
|
11
|
+
import type {
|
|
12
|
+
TABLE_NAMES,
|
|
13
|
+
StorageColumn,
|
|
14
|
+
StorageGetMessagesArg,
|
|
15
|
+
EvalRow,
|
|
16
|
+
WorkflowRuns,
|
|
17
|
+
WorkflowRun,
|
|
18
|
+
PaginationInfo,
|
|
19
|
+
PaginationArgs,
|
|
20
|
+
StorageGetTracesArg,
|
|
21
|
+
} from '@mastra/core/storage';
|
|
12
22
|
import type { WorkflowRunState } from '@mastra/core/workflows';
|
|
13
23
|
import { Redis } from '@upstash/redis';
|
|
24
|
+
import { MessageList } from '../../../../packages/core/dist/agent/index.cjs';
|
|
14
25
|
|
|
15
26
|
export interface UpstashConfig {
|
|
16
27
|
url: string;
|
|
@@ -18,31 +29,206 @@ export interface UpstashConfig {
|
|
|
18
29
|
}
|
|
19
30
|
|
|
20
31
|
export class UpstashStore extends MastraStorage {
|
|
21
|
-
|
|
22
|
-
|
|
32
|
+
private redis: Redis;
|
|
33
|
+
|
|
34
|
+
constructor(config: UpstashConfig) {
|
|
35
|
+
super({ name: 'Upstash' });
|
|
36
|
+
this.redis = new Redis({
|
|
37
|
+
url: config.url,
|
|
38
|
+
token: config.token,
|
|
39
|
+
});
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
public get supports(): {
|
|
43
|
+
selectByIncludeResourceScope: boolean;
|
|
44
|
+
} {
|
|
45
|
+
return {
|
|
46
|
+
selectByIncludeResourceScope: true,
|
|
47
|
+
};
|
|
48
|
+
}
|
|
49
|
+
|
|
50
|
+
private transformEvalRecord(record: Record<string, any>): EvalRow {
|
|
51
|
+
// Parse JSON strings if needed
|
|
52
|
+
let result = record.result;
|
|
53
|
+
if (typeof result === 'string') {
|
|
54
|
+
try {
|
|
55
|
+
result = JSON.parse(result);
|
|
56
|
+
} catch {
|
|
57
|
+
console.warn('Failed to parse result JSON:');
|
|
58
|
+
}
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
let testInfo = record.test_info;
|
|
62
|
+
if (typeof testInfo === 'string') {
|
|
63
|
+
try {
|
|
64
|
+
testInfo = JSON.parse(testInfo);
|
|
65
|
+
} catch {
|
|
66
|
+
console.warn('Failed to parse test_info JSON:');
|
|
67
|
+
}
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
return {
|
|
71
|
+
agentName: record.agent_name,
|
|
72
|
+
input: record.input,
|
|
73
|
+
output: record.output,
|
|
74
|
+
result: result as MetricResult,
|
|
75
|
+
metricName: record.metric_name,
|
|
76
|
+
instructions: record.instructions,
|
|
77
|
+
testInfo: testInfo as TestInfo | undefined,
|
|
78
|
+
globalRunId: record.global_run_id,
|
|
79
|
+
runId: record.run_id,
|
|
80
|
+
createdAt:
|
|
81
|
+
typeof record.created_at === 'string'
|
|
82
|
+
? record.created_at
|
|
83
|
+
: record.created_at instanceof Date
|
|
84
|
+
? record.created_at.toISOString()
|
|
85
|
+
: new Date().toISOString(),
|
|
86
|
+
};
|
|
23
87
|
}
|
|
24
88
|
|
|
89
|
+
private parseJSON(value: any): any {
|
|
90
|
+
if (typeof value === 'string') {
|
|
91
|
+
try {
|
|
92
|
+
return JSON.parse(value);
|
|
93
|
+
} catch {
|
|
94
|
+
return value;
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
return value;
|
|
98
|
+
}
|
|
99
|
+
|
|
100
|
+
private getKey(tableName: TABLE_NAMES, keys: Record<string, any>): string {
|
|
101
|
+
const keyParts = Object.entries(keys)
|
|
102
|
+
.filter(([_, value]) => value !== undefined)
|
|
103
|
+
.map(([key, value]) => `${key}:${value}`);
|
|
104
|
+
return `${tableName}:${keyParts.join(':')}`;
|
|
105
|
+
}
|
|
106
|
+
|
|
107
|
+
/**
|
|
108
|
+
* Scans for keys matching the given pattern using SCAN and returns them as an array.
|
|
109
|
+
* @param pattern Redis key pattern, e.g. "table:*"
|
|
110
|
+
* @param batchSize Number of keys to scan per batch (default: 1000)
|
|
111
|
+
*/
|
|
112
|
+
private async scanKeys(pattern: string, batchSize = 10000): Promise<string[]> {
|
|
113
|
+
let cursor = '0';
|
|
114
|
+
let keys: string[] = [];
|
|
115
|
+
do {
|
|
116
|
+
// Upstash: scan(cursor, { match, count })
|
|
117
|
+
const [nextCursor, batch] = await this.redis.scan(cursor, {
|
|
118
|
+
match: pattern,
|
|
119
|
+
count: batchSize,
|
|
120
|
+
});
|
|
121
|
+
keys.push(...batch);
|
|
122
|
+
cursor = nextCursor;
|
|
123
|
+
} while (cursor !== '0');
|
|
124
|
+
return keys;
|
|
125
|
+
}
|
|
126
|
+
|
|
127
|
+
/**
|
|
128
|
+
* Deletes all keys matching the given pattern using SCAN and DEL in batches.
|
|
129
|
+
* @param pattern Redis key pattern, e.g. "table:*"
|
|
130
|
+
* @param batchSize Number of keys to delete per batch (default: 1000)
|
|
131
|
+
*/
|
|
132
|
+
private async scanAndDelete(pattern: string, batchSize = 10000): Promise<number> {
|
|
133
|
+
let cursor = '0';
|
|
134
|
+
let totalDeleted = 0;
|
|
135
|
+
do {
|
|
136
|
+
const [nextCursor, keys] = await this.redis.scan(cursor, {
|
|
137
|
+
match: pattern,
|
|
138
|
+
count: batchSize,
|
|
139
|
+
});
|
|
140
|
+
if (keys.length > 0) {
|
|
141
|
+
await this.redis.del(...keys);
|
|
142
|
+
totalDeleted += keys.length;
|
|
143
|
+
}
|
|
144
|
+
cursor = nextCursor;
|
|
145
|
+
} while (cursor !== '0');
|
|
146
|
+
return totalDeleted;
|
|
147
|
+
}
|
|
148
|
+
|
|
149
|
+
private getMessageKey(threadId: string, messageId: string): string {
|
|
150
|
+
const key = this.getKey(TABLE_MESSAGES, { threadId, id: messageId });
|
|
151
|
+
return key;
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
private getThreadMessagesKey(threadId: string): string {
|
|
155
|
+
return `thread:${threadId}:messages`;
|
|
156
|
+
}
|
|
157
|
+
|
|
158
|
+
private parseWorkflowRun(row: any): WorkflowRun {
|
|
159
|
+
let parsedSnapshot: WorkflowRunState | string = row.snapshot as string;
|
|
160
|
+
if (typeof parsedSnapshot === 'string') {
|
|
161
|
+
try {
|
|
162
|
+
parsedSnapshot = JSON.parse(row.snapshot as string) as WorkflowRunState;
|
|
163
|
+
} catch (e) {
|
|
164
|
+
// If parsing fails, return the raw snapshot string
|
|
165
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
166
|
+
}
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
return {
|
|
170
|
+
workflowName: row.workflow_name,
|
|
171
|
+
runId: row.run_id,
|
|
172
|
+
snapshot: parsedSnapshot,
|
|
173
|
+
createdAt: this.ensureDate(row.createdAt)!,
|
|
174
|
+
updatedAt: this.ensureDate(row.updatedAt)!,
|
|
175
|
+
resourceId: row.resourceId,
|
|
176
|
+
};
|
|
177
|
+
}
|
|
178
|
+
|
|
179
|
+
private processRecord(tableName: TABLE_NAMES, record: Record<string, any>) {
|
|
180
|
+
let key: string;
|
|
181
|
+
|
|
182
|
+
if (tableName === TABLE_MESSAGES) {
|
|
183
|
+
// For messages, use threadId as the primary key component
|
|
184
|
+
key = this.getKey(tableName, { threadId: record.threadId, id: record.id });
|
|
185
|
+
} else if (tableName === TABLE_WORKFLOW_SNAPSHOT) {
|
|
186
|
+
key = this.getKey(tableName, {
|
|
187
|
+
namespace: record.namespace || 'workflows',
|
|
188
|
+
workflow_name: record.workflow_name,
|
|
189
|
+
run_id: record.run_id,
|
|
190
|
+
...(record.resourceId ? { resourceId: record.resourceId } : {}),
|
|
191
|
+
});
|
|
192
|
+
} else if (tableName === TABLE_EVALS) {
|
|
193
|
+
key = this.getKey(tableName, { id: record.run_id });
|
|
194
|
+
} else {
|
|
195
|
+
key = this.getKey(tableName, { id: record.id });
|
|
196
|
+
}
|
|
197
|
+
|
|
198
|
+
// Convert dates to ISO strings before storing
|
|
199
|
+
const processedRecord = {
|
|
200
|
+
...record,
|
|
201
|
+
createdAt: this.serializeDate(record.createdAt),
|
|
202
|
+
updatedAt: this.serializeDate(record.updatedAt),
|
|
203
|
+
};
|
|
204
|
+
|
|
205
|
+
return { key, processedRecord };
|
|
206
|
+
}
|
|
207
|
+
|
|
208
|
+
/**
|
|
209
|
+
* @deprecated Use getEvals instead
|
|
210
|
+
*/
|
|
25
211
|
async getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]> {
|
|
26
212
|
try {
|
|
27
|
-
// Get all keys that match the evals table pattern
|
|
28
213
|
const pattern = `${TABLE_EVALS}:*`;
|
|
29
|
-
const keys = await this.
|
|
214
|
+
const keys = await this.scanKeys(pattern);
|
|
30
215
|
|
|
31
|
-
//
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
);
|
|
216
|
+
// Check if we have any keys before using pipeline
|
|
217
|
+
if (keys.length === 0) {
|
|
218
|
+
return [];
|
|
219
|
+
}
|
|
220
|
+
|
|
221
|
+
// Use pipeline for batch fetching to improve performance
|
|
222
|
+
const pipeline = this.redis.pipeline();
|
|
223
|
+
keys.forEach(key => pipeline.get(key));
|
|
224
|
+
const results = await pipeline.exec();
|
|
38
225
|
|
|
39
226
|
// Filter by agent name and remove nulls
|
|
40
|
-
const nonNullRecords =
|
|
227
|
+
const nonNullRecords = results.filter(
|
|
41
228
|
(record): record is Record<string, any> =>
|
|
42
229
|
record !== null && typeof record === 'object' && 'agent_name' in record && record.agent_name === agentName,
|
|
43
230
|
);
|
|
44
231
|
|
|
45
|
-
// Apply additional filtering based on type
|
|
46
232
|
let filteredEvals = nonNullRecords;
|
|
47
233
|
|
|
48
234
|
if (type === 'test') {
|
|
@@ -89,124 +275,88 @@ export class UpstashStore extends MastraStorage {
|
|
|
89
275
|
}
|
|
90
276
|
}
|
|
91
277
|
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
}
|
|
278
|
+
/**
|
|
279
|
+
* @deprecated use getTracesPaginated instead
|
|
280
|
+
*/
|
|
281
|
+
public async getTraces(args: StorageGetTracesArg): Promise<any[]> {
|
|
282
|
+
if (args.fromDate || args.toDate) {
|
|
283
|
+
(args as any).dateRange = {
|
|
284
|
+
start: args.fromDate,
|
|
285
|
+
end: args.toDate,
|
|
286
|
+
};
|
|
101
287
|
}
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
if (typeof testInfo === 'string') {
|
|
105
|
-
try {
|
|
106
|
-
testInfo = JSON.parse(testInfo);
|
|
107
|
-
} catch {
|
|
108
|
-
console.warn('Failed to parse test_info JSON:');
|
|
109
|
-
}
|
|
110
|
-
}
|
|
111
|
-
|
|
112
|
-
return {
|
|
113
|
-
agentName: record.agent_name,
|
|
114
|
-
input: record.input,
|
|
115
|
-
output: record.output,
|
|
116
|
-
result: result as MetricResult,
|
|
117
|
-
metricName: record.metric_name,
|
|
118
|
-
instructions: record.instructions,
|
|
119
|
-
testInfo: testInfo as TestInfo | undefined,
|
|
120
|
-
globalRunId: record.global_run_id,
|
|
121
|
-
runId: record.run_id,
|
|
122
|
-
createdAt:
|
|
123
|
-
typeof record.created_at === 'string'
|
|
124
|
-
? record.created_at
|
|
125
|
-
: record.created_at instanceof Date
|
|
126
|
-
? record.created_at.toISOString()
|
|
127
|
-
: new Date().toISOString(),
|
|
128
|
-
};
|
|
288
|
+
const { traces } = await this.getTracesPaginated(args);
|
|
289
|
+
return traces;
|
|
129
290
|
}
|
|
130
291
|
|
|
131
|
-
async
|
|
132
|
-
{
|
|
133
|
-
name,
|
|
134
|
-
scope,
|
|
135
|
-
page = 0,
|
|
136
|
-
perPage = 100,
|
|
137
|
-
attributes,
|
|
138
|
-
filters,
|
|
139
|
-
}: {
|
|
292
|
+
public async getTracesPaginated(
|
|
293
|
+
args: {
|
|
140
294
|
name?: string;
|
|
141
295
|
scope?: string;
|
|
142
|
-
page: number;
|
|
143
|
-
perPage: number;
|
|
144
296
|
attributes?: Record<string, string>;
|
|
145
297
|
filters?: Record<string, any>;
|
|
146
|
-
}
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
298
|
+
} & PaginationArgs,
|
|
299
|
+
): Promise<PaginationInfo & { traces: any[] }> {
|
|
300
|
+
const { name, scope, page = 0, perPage = 100, attributes, filters, dateRange } = args;
|
|
301
|
+
const fromDate = dateRange?.start;
|
|
302
|
+
const toDate = dateRange?.end;
|
|
303
|
+
|
|
151
304
|
try {
|
|
152
|
-
// Get all keys that match the traces table pattern
|
|
153
305
|
const pattern = `${TABLE_TRACES}:*`;
|
|
154
|
-
const keys = await this.
|
|
306
|
+
const keys = await this.scanKeys(pattern);
|
|
155
307
|
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
308
|
+
if (keys.length === 0) {
|
|
309
|
+
return {
|
|
310
|
+
traces: [],
|
|
311
|
+
total: 0,
|
|
312
|
+
page,
|
|
313
|
+
perPage: perPage || 100,
|
|
314
|
+
hasMore: false,
|
|
315
|
+
};
|
|
316
|
+
}
|
|
317
|
+
|
|
318
|
+
const pipeline = this.redis.pipeline();
|
|
319
|
+
keys.forEach(key => pipeline.get(key));
|
|
320
|
+
const results = await pipeline.exec();
|
|
163
321
|
|
|
164
|
-
|
|
165
|
-
let filteredTraces = traceRecords.filter(
|
|
322
|
+
let filteredTraces = results.filter(
|
|
166
323
|
(record): record is Record<string, any> => record !== null && typeof record === 'object',
|
|
167
324
|
);
|
|
168
325
|
|
|
169
|
-
// Apply name filter if provided
|
|
170
326
|
if (name) {
|
|
171
327
|
filteredTraces = filteredTraces.filter(record => record.name?.toLowerCase().startsWith(name.toLowerCase()));
|
|
172
328
|
}
|
|
173
|
-
|
|
174
|
-
// Apply scope filter if provided
|
|
175
329
|
if (scope) {
|
|
176
330
|
filteredTraces = filteredTraces.filter(record => record.scope === scope);
|
|
177
331
|
}
|
|
178
|
-
|
|
179
|
-
// Apply attributes filter if provided
|
|
180
332
|
if (attributes) {
|
|
181
333
|
filteredTraces = filteredTraces.filter(record => {
|
|
182
334
|
const recordAttributes = record.attributes;
|
|
183
335
|
if (!recordAttributes) return false;
|
|
184
|
-
|
|
185
|
-
// Parse attributes if stored as string
|
|
186
336
|
const parsedAttributes =
|
|
187
337
|
typeof recordAttributes === 'string' ? JSON.parse(recordAttributes) : recordAttributes;
|
|
188
|
-
|
|
189
338
|
return Object.entries(attributes).every(([key, value]) => parsedAttributes[key] === value);
|
|
190
339
|
});
|
|
191
340
|
}
|
|
192
|
-
|
|
193
|
-
// Apply custom filters if provided
|
|
194
341
|
if (filters) {
|
|
195
342
|
filteredTraces = filteredTraces.filter(record =>
|
|
196
343
|
Object.entries(filters).every(([key, value]) => record[key] === value),
|
|
197
344
|
);
|
|
198
345
|
}
|
|
346
|
+
if (fromDate) {
|
|
347
|
+
filteredTraces = filteredTraces.filter(
|
|
348
|
+
record => new Date(record.createdAt).getTime() >= new Date(fromDate).getTime(),
|
|
349
|
+
);
|
|
350
|
+
}
|
|
351
|
+
if (toDate) {
|
|
352
|
+
filteredTraces = filteredTraces.filter(
|
|
353
|
+
record => new Date(record.createdAt).getTime() <= new Date(toDate).getTime(),
|
|
354
|
+
);
|
|
355
|
+
}
|
|
199
356
|
|
|
200
|
-
// Sort traces by creation date (newest first)
|
|
201
357
|
filteredTraces.sort((a, b) => new Date(b.createdAt).getTime() - new Date(a.createdAt).getTime());
|
|
202
358
|
|
|
203
|
-
|
|
204
|
-
const start = page * perPage;
|
|
205
|
-
const end = start + perPage;
|
|
206
|
-
const paginatedTraces = filteredTraces.slice(start, end);
|
|
207
|
-
|
|
208
|
-
// Transform and return the traces
|
|
209
|
-
return paginatedTraces.map(record => ({
|
|
359
|
+
const transformedTraces = filteredTraces.map(record => ({
|
|
210
360
|
id: record.id,
|
|
211
361
|
parentSpanId: record.parentSpanId,
|
|
212
362
|
traceId: record.traceId,
|
|
@@ -222,51 +372,33 @@ export class UpstashStore extends MastraStorage {
|
|
|
222
372
|
other: this.parseJSON(record.other),
|
|
223
373
|
createdAt: this.ensureDate(record.createdAt),
|
|
224
374
|
}));
|
|
375
|
+
|
|
376
|
+
const total = transformedTraces.length;
|
|
377
|
+
const resolvedPerPage = perPage || 100;
|
|
378
|
+
const start = page * resolvedPerPage;
|
|
379
|
+
const end = start + resolvedPerPage;
|
|
380
|
+
const paginatedTraces = transformedTraces.slice(start, end);
|
|
381
|
+
const hasMore = end < total;
|
|
382
|
+
|
|
383
|
+
return {
|
|
384
|
+
traces: paginatedTraces,
|
|
385
|
+
total,
|
|
386
|
+
page,
|
|
387
|
+
perPage: resolvedPerPage,
|
|
388
|
+
hasMore,
|
|
389
|
+
};
|
|
225
390
|
} catch (error) {
|
|
226
391
|
console.error('Failed to get traces:', error);
|
|
227
|
-
return
|
|
392
|
+
return {
|
|
393
|
+
traces: [],
|
|
394
|
+
total: 0,
|
|
395
|
+
page,
|
|
396
|
+
perPage: perPage || 100,
|
|
397
|
+
hasMore: false,
|
|
398
|
+
};
|
|
228
399
|
}
|
|
229
400
|
}
|
|
230
401
|
|
|
231
|
-
private parseJSON(value: any): any {
|
|
232
|
-
if (typeof value === 'string') {
|
|
233
|
-
try {
|
|
234
|
-
return JSON.parse(value);
|
|
235
|
-
} catch {
|
|
236
|
-
return value;
|
|
237
|
-
}
|
|
238
|
-
}
|
|
239
|
-
return value;
|
|
240
|
-
}
|
|
241
|
-
|
|
242
|
-
private redis: Redis;
|
|
243
|
-
|
|
244
|
-
constructor(config: UpstashConfig) {
|
|
245
|
-
super({ name: 'Upstash' });
|
|
246
|
-
this.redis = new Redis({
|
|
247
|
-
url: config.url,
|
|
248
|
-
token: config.token,
|
|
249
|
-
});
|
|
250
|
-
}
|
|
251
|
-
|
|
252
|
-
private getKey(tableName: TABLE_NAMES, keys: Record<string, any>): string {
|
|
253
|
-
const keyParts = Object.entries(keys)
|
|
254
|
-
.filter(([_, value]) => value !== undefined)
|
|
255
|
-
.map(([key, value]) => `${key}:${value}`);
|
|
256
|
-
return `${tableName}:${keyParts.join(':')}`;
|
|
257
|
-
}
|
|
258
|
-
|
|
259
|
-
private ensureDate(date: Date | string | undefined): Date | undefined {
|
|
260
|
-
if (!date) return undefined;
|
|
261
|
-
return date instanceof Date ? date : new Date(date);
|
|
262
|
-
}
|
|
263
|
-
|
|
264
|
-
private serializeDate(date: Date | string | undefined): string | undefined {
|
|
265
|
-
if (!date) return undefined;
|
|
266
|
-
const dateObj = this.ensureDate(date);
|
|
267
|
-
return dateObj?.toISOString();
|
|
268
|
-
}
|
|
269
|
-
|
|
270
402
|
async createTable({
|
|
271
403
|
tableName,
|
|
272
404
|
schema,
|
|
@@ -279,42 +411,47 @@ export class UpstashStore extends MastraStorage {
|
|
|
279
411
|
await this.redis.set(`schema:${tableName}`, schema);
|
|
280
412
|
}
|
|
281
413
|
|
|
414
|
+
/**
|
|
415
|
+
* No-op: This backend is schemaless and does not require schema changes.
|
|
416
|
+
* @param tableName Name of the table
|
|
417
|
+
* @param schema Schema of the table
|
|
418
|
+
* @param ifNotExists Array of column names to add if they don't exist
|
|
419
|
+
*/
|
|
420
|
+
async alterTable(_args: {
|
|
421
|
+
tableName: TABLE_NAMES;
|
|
422
|
+
schema: Record<string, StorageColumn>;
|
|
423
|
+
ifNotExists: string[];
|
|
424
|
+
}): Promise<void> {
|
|
425
|
+
// Nothing to do here, Redis is schemaless
|
|
426
|
+
}
|
|
427
|
+
|
|
282
428
|
async clearTable({ tableName }: { tableName: TABLE_NAMES }): Promise<void> {
|
|
283
429
|
const pattern = `${tableName}:*`;
|
|
284
|
-
|
|
285
|
-
if (keys.length > 0) {
|
|
286
|
-
await this.redis.del(...keys);
|
|
287
|
-
}
|
|
430
|
+
await this.scanAndDelete(pattern);
|
|
288
431
|
}
|
|
289
432
|
|
|
290
433
|
async insert({ tableName, record }: { tableName: TABLE_NAMES; record: Record<string, any> }): Promise<void> {
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
if (tableName === TABLE_MESSAGES) {
|
|
294
|
-
// For messages, use threadId as the primary key component
|
|
295
|
-
key = this.getKey(tableName, { threadId: record.threadId, id: record.id });
|
|
296
|
-
} else if (tableName === TABLE_WORKFLOW_SNAPSHOT) {
|
|
297
|
-
key = this.getKey(tableName, {
|
|
298
|
-
namespace: record.namespace || 'workflows',
|
|
299
|
-
workflow_name: record.workflow_name,
|
|
300
|
-
run_id: record.run_id,
|
|
301
|
-
});
|
|
302
|
-
} else if (tableName === TABLE_EVALS) {
|
|
303
|
-
key = this.getKey(tableName, { id: record.run_id });
|
|
304
|
-
} else {
|
|
305
|
-
key = this.getKey(tableName, { id: record.id });
|
|
306
|
-
}
|
|
307
|
-
|
|
308
|
-
// Convert dates to ISO strings before storing
|
|
309
|
-
const processedRecord = {
|
|
310
|
-
...record,
|
|
311
|
-
createdAt: this.serializeDate(record.createdAt),
|
|
312
|
-
updatedAt: this.serializeDate(record.updatedAt),
|
|
313
|
-
};
|
|
434
|
+
const { key, processedRecord } = this.processRecord(tableName, record);
|
|
314
435
|
|
|
315
436
|
await this.redis.set(key, processedRecord);
|
|
316
437
|
}
|
|
317
438
|
|
|
439
|
+
async batchInsert(input: { tableName: TABLE_NAMES; records: Record<string, any>[] }): Promise<void> {
|
|
440
|
+
const { tableName, records } = input;
|
|
441
|
+
if (!records.length) return;
|
|
442
|
+
|
|
443
|
+
const batchSize = 1000;
|
|
444
|
+
for (let i = 0; i < records.length; i += batchSize) {
|
|
445
|
+
const batch = records.slice(i, i + batchSize);
|
|
446
|
+
const pipeline = this.redis.pipeline();
|
|
447
|
+
for (const record of batch) {
|
|
448
|
+
const { key, processedRecord } = this.processRecord(tableName, record);
|
|
449
|
+
pipeline.set(key, processedRecord);
|
|
450
|
+
}
|
|
451
|
+
await pipeline.exec();
|
|
452
|
+
}
|
|
453
|
+
}
|
|
454
|
+
|
|
318
455
|
async load<R>({ tableName, keys }: { tableName: TABLE_NAMES; keys: Record<string, string> }): Promise<R | null> {
|
|
319
456
|
const key = this.getKey(tableName, keys);
|
|
320
457
|
const data = await this.redis.get<R>(key);
|
|
@@ -337,24 +474,76 @@ export class UpstashStore extends MastraStorage {
|
|
|
337
474
|
};
|
|
338
475
|
}
|
|
339
476
|
|
|
477
|
+
/**
|
|
478
|
+
* @deprecated use getThreadsByResourceIdPaginated instead
|
|
479
|
+
*/
|
|
340
480
|
async getThreadsByResourceId({ resourceId }: { resourceId: string }): Promise<StorageThreadType[]> {
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
|
|
345
|
-
|
|
346
|
-
return
|
|
347
|
-
}
|
|
348
|
-
|
|
349
|
-
|
|
350
|
-
|
|
351
|
-
.
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
481
|
+
try {
|
|
482
|
+
const pattern = `${TABLE_THREADS}:*`;
|
|
483
|
+
const keys = await this.scanKeys(pattern);
|
|
484
|
+
|
|
485
|
+
if (keys.length === 0) {
|
|
486
|
+
return [];
|
|
487
|
+
}
|
|
488
|
+
|
|
489
|
+
const allThreads: StorageThreadType[] = [];
|
|
490
|
+
const pipeline = this.redis.pipeline();
|
|
491
|
+
keys.forEach(key => pipeline.get(key));
|
|
492
|
+
const results = await pipeline.exec();
|
|
493
|
+
|
|
494
|
+
for (let i = 0; i < results.length; i++) {
|
|
495
|
+
const thread = results[i] as StorageThreadType | null;
|
|
496
|
+
if (thread && thread.resourceId === resourceId) {
|
|
497
|
+
allThreads.push({
|
|
498
|
+
...thread,
|
|
499
|
+
createdAt: this.ensureDate(thread.createdAt)!,
|
|
500
|
+
updatedAt: this.ensureDate(thread.updatedAt)!,
|
|
501
|
+
metadata: typeof thread.metadata === 'string' ? JSON.parse(thread.metadata) : thread.metadata,
|
|
502
|
+
});
|
|
503
|
+
}
|
|
504
|
+
}
|
|
505
|
+
|
|
506
|
+
allThreads.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
507
|
+
return allThreads;
|
|
508
|
+
} catch (error) {
|
|
509
|
+
console.error('Error in getThreadsByResourceId:', error);
|
|
510
|
+
return [];
|
|
511
|
+
}
|
|
512
|
+
}
|
|
513
|
+
|
|
514
|
+
public async getThreadsByResourceIdPaginated(
|
|
515
|
+
args: {
|
|
516
|
+
resourceId: string;
|
|
517
|
+
} & PaginationArgs,
|
|
518
|
+
): Promise<PaginationInfo & { threads: StorageThreadType[] }> {
|
|
519
|
+
const { resourceId, page = 0, perPage = 100 } = args;
|
|
520
|
+
|
|
521
|
+
try {
|
|
522
|
+
const allThreads = await this.getThreadsByResourceId({ resourceId });
|
|
523
|
+
|
|
524
|
+
const total = allThreads.length;
|
|
525
|
+
const start = page * perPage;
|
|
526
|
+
const end = start + perPage;
|
|
527
|
+
const paginatedThreads = allThreads.slice(start, end);
|
|
528
|
+
const hasMore = end < total;
|
|
529
|
+
|
|
530
|
+
return {
|
|
531
|
+
threads: paginatedThreads,
|
|
532
|
+
total,
|
|
533
|
+
page,
|
|
534
|
+
perPage,
|
|
535
|
+
hasMore,
|
|
536
|
+
};
|
|
537
|
+
} catch (error) {
|
|
538
|
+
console.error('Error in getThreadsByResourceIdPaginated:', error);
|
|
539
|
+
return {
|
|
540
|
+
threads: [],
|
|
541
|
+
total: 0,
|
|
542
|
+
page,
|
|
543
|
+
perPage,
|
|
544
|
+
hasMore: false,
|
|
545
|
+
};
|
|
546
|
+
}
|
|
358
547
|
}
|
|
359
548
|
|
|
360
549
|
async saveThread({ thread }: { thread: StorageThreadType }): Promise<StorageThreadType> {
|
|
@@ -393,22 +582,45 @@ export class UpstashStore extends MastraStorage {
|
|
|
393
582
|
}
|
|
394
583
|
|
|
395
584
|
async deleteThread({ threadId }: { threadId: string }): Promise<void> {
|
|
396
|
-
|
|
397
|
-
|
|
398
|
-
|
|
585
|
+
// Delete thread metadata and sorted set
|
|
586
|
+
const threadKey = this.getKey(TABLE_THREADS, { id: threadId });
|
|
587
|
+
const threadMessagesKey = this.getThreadMessagesKey(threadId);
|
|
588
|
+
const messageIds: string[] = await this.redis.zrange(threadMessagesKey, 0, -1);
|
|
399
589
|
|
|
400
|
-
|
|
401
|
-
|
|
402
|
-
|
|
590
|
+
const pipeline = this.redis.pipeline();
|
|
591
|
+
pipeline.del(threadKey);
|
|
592
|
+
pipeline.del(threadMessagesKey);
|
|
403
593
|
|
|
404
|
-
|
|
405
|
-
|
|
594
|
+
for (let i = 0; i < messageIds.length; i++) {
|
|
595
|
+
const messageId = messageIds[i];
|
|
596
|
+
const messageKey = this.getMessageKey(threadId, messageId as string);
|
|
597
|
+
pipeline.del(messageKey);
|
|
598
|
+
}
|
|
599
|
+
|
|
600
|
+
await pipeline.exec();
|
|
601
|
+
|
|
602
|
+
// Bulk delete all message keys for this thread if any remain
|
|
603
|
+
await this.scanAndDelete(this.getMessageKey(threadId, '*'));
|
|
406
604
|
}
|
|
407
605
|
|
|
408
|
-
async saveMessages(
|
|
606
|
+
async saveMessages(args: { messages: MastraMessageV1[]; format?: undefined | 'v1' }): Promise<MastraMessageV1[]>;
|
|
607
|
+
async saveMessages(args: { messages: MastraMessageV2[]; format: 'v2' }): Promise<MastraMessageV2[]>;
|
|
608
|
+
async saveMessages(
|
|
609
|
+
args: { messages: MastraMessageV1[]; format?: undefined | 'v1' } | { messages: MastraMessageV2[]; format: 'v2' },
|
|
610
|
+
): Promise<MastraMessageV2[] | MastraMessageV1[]> {
|
|
611
|
+
const { messages, format = 'v1' } = args;
|
|
409
612
|
if (messages.length === 0) return [];
|
|
410
613
|
|
|
411
|
-
const
|
|
614
|
+
const threadId = messages[0]?.threadId;
|
|
615
|
+
if (!threadId) {
|
|
616
|
+
throw new Error('Thread ID is required');
|
|
617
|
+
}
|
|
618
|
+
|
|
619
|
+
// Check if thread exists
|
|
620
|
+
const thread = await this.getThreadById({ threadId });
|
|
621
|
+
if (!thread) {
|
|
622
|
+
throw new Error(`Thread ${threadId} not found`);
|
|
623
|
+
}
|
|
412
624
|
|
|
413
625
|
// Add an index to each message to maintain order
|
|
414
626
|
const messagesWithIndex = messages.map((message, index) => ({
|
|
@@ -416,78 +628,277 @@ export class UpstashStore extends MastraStorage {
|
|
|
416
628
|
_index: index,
|
|
417
629
|
}));
|
|
418
630
|
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
|
|
631
|
+
// Get current thread data once (all messages belong to same thread)
|
|
632
|
+
const threadKey = this.getKey(TABLE_THREADS, { id: threadId });
|
|
633
|
+
const existingThread = await this.redis.get<StorageThreadType>(threadKey);
|
|
422
634
|
|
|
423
|
-
|
|
424
|
-
|
|
635
|
+
const batchSize = 1000;
|
|
636
|
+
for (let i = 0; i < messagesWithIndex.length; i += batchSize) {
|
|
637
|
+
const batch = messagesWithIndex.slice(i, i + batchSize);
|
|
638
|
+
const pipeline = this.redis.pipeline();
|
|
425
639
|
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
-
|
|
640
|
+
for (const message of batch) {
|
|
641
|
+
const key = this.getMessageKey(message.threadId!, message.id);
|
|
642
|
+
const createdAtScore = new Date(message.createdAt).getTime();
|
|
643
|
+
const score = message._index !== undefined ? message._index : createdAtScore;
|
|
644
|
+
|
|
645
|
+
// Store the message data
|
|
646
|
+
pipeline.set(key, message);
|
|
647
|
+
|
|
648
|
+
// Add to sorted set for this thread
|
|
649
|
+
pipeline.zadd(this.getThreadMessagesKey(message.threadId!), {
|
|
650
|
+
score,
|
|
651
|
+
member: message.id,
|
|
652
|
+
});
|
|
653
|
+
}
|
|
654
|
+
|
|
655
|
+
// Update the thread's updatedAt field (only in the first batch)
|
|
656
|
+
if (i === 0 && existingThread) {
|
|
657
|
+
const updatedThread = {
|
|
658
|
+
...existingThread,
|
|
659
|
+
updatedAt: new Date(),
|
|
660
|
+
};
|
|
661
|
+
pipeline.set(threadKey, this.processRecord(TABLE_THREADS, updatedThread).processedRecord);
|
|
662
|
+
}
|
|
663
|
+
|
|
664
|
+
await pipeline.exec();
|
|
431
665
|
}
|
|
432
666
|
|
|
433
|
-
|
|
434
|
-
return
|
|
667
|
+
const list = new MessageList().add(messages, 'memory');
|
|
668
|
+
if (format === `v2`) return list.get.all.v2();
|
|
669
|
+
return list.get.all.v1();
|
|
435
670
|
}
|
|
436
671
|
|
|
437
|
-
async
|
|
438
|
-
|
|
672
|
+
private async _getIncludedMessages(
|
|
673
|
+
threadId: string,
|
|
674
|
+
selectBy: StorageGetMessagesArg['selectBy'],
|
|
675
|
+
): Promise<MastraMessageV2[] | MastraMessageV1[]> {
|
|
439
676
|
const messageIds = new Set<string>();
|
|
440
|
-
const
|
|
441
|
-
|
|
442
|
-
if (limit === 0 && !selectBy?.include) {
|
|
443
|
-
return [];
|
|
444
|
-
}
|
|
677
|
+
const messageIdToThreadIds: Record<string, string> = {};
|
|
445
678
|
|
|
446
679
|
// First, get specifically included messages and their context
|
|
447
680
|
if (selectBy?.include?.length) {
|
|
448
681
|
for (const item of selectBy.include) {
|
|
449
682
|
messageIds.add(item.id);
|
|
450
683
|
|
|
451
|
-
if
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
684
|
+
// Use per-include threadId if present, else fallback to main threadId
|
|
685
|
+
const itemThreadId = item.threadId || threadId;
|
|
686
|
+
messageIdToThreadIds[item.id] = itemThreadId;
|
|
687
|
+
const itemThreadMessagesKey = this.getThreadMessagesKey(itemThreadId);
|
|
688
|
+
|
|
689
|
+
// Get the rank of this message in the sorted set
|
|
690
|
+
const rank = await this.redis.zrank(itemThreadMessagesKey, item.id);
|
|
691
|
+
if (rank === null) continue;
|
|
692
|
+
|
|
693
|
+
// Get previous messages if requested
|
|
694
|
+
if (item.withPreviousMessages) {
|
|
695
|
+
const start = Math.max(0, rank - item.withPreviousMessages);
|
|
696
|
+
const prevIds = rank === 0 ? [] : await this.redis.zrange(itemThreadMessagesKey, start, rank - 1);
|
|
697
|
+
prevIds.forEach(id => {
|
|
698
|
+
messageIds.add(id as string);
|
|
699
|
+
messageIdToThreadIds[id as string] = itemThreadId;
|
|
700
|
+
});
|
|
701
|
+
}
|
|
462
702
|
|
|
463
|
-
|
|
464
|
-
|
|
465
|
-
|
|
466
|
-
|
|
467
|
-
|
|
703
|
+
// Get next messages if requested
|
|
704
|
+
if (item.withNextMessages) {
|
|
705
|
+
const nextIds = await this.redis.zrange(itemThreadMessagesKey, rank + 1, rank + item.withNextMessages);
|
|
706
|
+
nextIds.forEach(id => {
|
|
707
|
+
messageIds.add(id as string);
|
|
708
|
+
messageIdToThreadIds[id as string] = itemThreadId;
|
|
709
|
+
});
|
|
468
710
|
}
|
|
469
711
|
}
|
|
712
|
+
|
|
713
|
+
const pipeline = this.redis.pipeline();
|
|
714
|
+
Array.from(messageIds).forEach(id => {
|
|
715
|
+
const tId = messageIdToThreadIds[id] || threadId;
|
|
716
|
+
pipeline.get(this.getMessageKey(tId, id as string));
|
|
717
|
+
});
|
|
718
|
+
const results = await pipeline.exec();
|
|
719
|
+
return results.filter(result => result !== null) as MastraMessageV2[] | MastraMessageV1[];
|
|
720
|
+
}
|
|
721
|
+
|
|
722
|
+
return [];
|
|
723
|
+
}
|
|
724
|
+
|
|
725
|
+
/**
|
|
726
|
+
* @deprecated use getMessagesPaginated instead
|
|
727
|
+
*/
|
|
728
|
+
public async getMessages(args: StorageGetMessagesArg & { format?: 'v1' }): Promise<MastraMessageV1[]>;
|
|
729
|
+
public async getMessages(args: StorageGetMessagesArg & { format: 'v2' }): Promise<MastraMessageV2[]>;
|
|
730
|
+
public async getMessages({
|
|
731
|
+
threadId,
|
|
732
|
+
selectBy,
|
|
733
|
+
format,
|
|
734
|
+
}: StorageGetMessagesArg & { format?: 'v1' | 'v2' }): Promise<MastraMessageV1[] | MastraMessageV2[]> {
|
|
735
|
+
const threadMessagesKey = this.getThreadMessagesKey(threadId);
|
|
736
|
+
const allMessageIds = await this.redis.zrange(threadMessagesKey, 0, -1);
|
|
737
|
+
// When selectBy is undefined or selectBy.last is undefined, get ALL messages (not just 40)
|
|
738
|
+
let limit: number;
|
|
739
|
+
if (typeof selectBy?.last === 'number') {
|
|
740
|
+
limit = Math.max(0, selectBy.last);
|
|
741
|
+
} else if (selectBy?.last === false) {
|
|
742
|
+
limit = 0;
|
|
743
|
+
} else {
|
|
744
|
+
// No limit specified - get all messages
|
|
745
|
+
limit = Number.MAX_SAFE_INTEGER;
|
|
470
746
|
}
|
|
471
747
|
|
|
472
|
-
|
|
473
|
-
const
|
|
474
|
-
|
|
748
|
+
const messageIds = new Set<string>();
|
|
749
|
+
const messageIdToThreadIds: Record<string, string> = {};
|
|
750
|
+
|
|
751
|
+
if (limit === 0 && !selectBy?.include) {
|
|
752
|
+
return [];
|
|
753
|
+
}
|
|
754
|
+
|
|
755
|
+
// Then get the most recent messages (or all if no limit)
|
|
756
|
+
if (limit === Number.MAX_SAFE_INTEGER) {
|
|
757
|
+
// Get all messages
|
|
758
|
+
const allIds = await this.redis.zrange(threadMessagesKey, 0, -1);
|
|
759
|
+
allIds.forEach(id => {
|
|
760
|
+
messageIds.add(id as string);
|
|
761
|
+
messageIdToThreadIds[id as string] = threadId;
|
|
762
|
+
});
|
|
763
|
+
} else if (limit > 0) {
|
|
764
|
+
// Get limited number of recent messages
|
|
765
|
+
const latestIds = await this.redis.zrange(threadMessagesKey, -limit, -1);
|
|
766
|
+
latestIds.forEach(id => {
|
|
767
|
+
messageIds.add(id as string);
|
|
768
|
+
messageIdToThreadIds[id as string] = threadId;
|
|
769
|
+
});
|
|
770
|
+
}
|
|
771
|
+
|
|
772
|
+
const includedMessages = await this._getIncludedMessages(threadId, selectBy);
|
|
475
773
|
|
|
476
774
|
// Fetch all needed messages in parallel
|
|
477
|
-
const messages =
|
|
478
|
-
|
|
479
|
-
|
|
480
|
-
|
|
481
|
-
|
|
482
|
-
|
|
483
|
-
|
|
775
|
+
const messages = [
|
|
776
|
+
...includedMessages,
|
|
777
|
+
...((
|
|
778
|
+
await Promise.all(
|
|
779
|
+
Array.from(messageIds).map(async id => {
|
|
780
|
+
const tId = messageIdToThreadIds[id] || threadId;
|
|
781
|
+
const byThreadId = await this.redis.get<MastraMessageV2 & { _index?: number }>(this.getMessageKey(tId, id));
|
|
782
|
+
if (byThreadId) return byThreadId;
|
|
783
|
+
|
|
784
|
+
return null;
|
|
785
|
+
}),
|
|
786
|
+
)
|
|
787
|
+
).filter(msg => msg !== null) as (MastraMessageV2 & { _index?: number })[]),
|
|
788
|
+
];
|
|
484
789
|
|
|
485
790
|
// Sort messages by their position in the sorted set
|
|
486
|
-
|
|
487
|
-
messages.sort((a, b) => messageOrder.indexOf(a!.id) - messageOrder.indexOf(b!.id));
|
|
791
|
+
messages.sort((a, b) => allMessageIds.indexOf(a!.id) - allMessageIds.indexOf(b!.id));
|
|
488
792
|
|
|
489
|
-
|
|
490
|
-
|
|
793
|
+
const seen = new Set<string>();
|
|
794
|
+
const dedupedMessages = messages.filter(row => {
|
|
795
|
+
if (seen.has(row.id)) return false;
|
|
796
|
+
seen.add(row.id);
|
|
797
|
+
return true;
|
|
798
|
+
});
|
|
799
|
+
|
|
800
|
+
// Remove _index before returning and handle format conversion properly
|
|
801
|
+
const prepared = dedupedMessages
|
|
802
|
+
.filter(message => message !== null && message !== undefined)
|
|
803
|
+
.map(message => {
|
|
804
|
+
const { _index, ...messageWithoutIndex } = message as MastraMessageV2 & { _index?: number };
|
|
805
|
+
return messageWithoutIndex as unknown as MastraMessageV1;
|
|
806
|
+
});
|
|
807
|
+
|
|
808
|
+
// For backward compatibility, return messages directly without using MessageList
|
|
809
|
+
// since MessageList has deduplication logic that can cause issues
|
|
810
|
+
if (format === 'v2') {
|
|
811
|
+
// Convert V1 format back to V2 format
|
|
812
|
+
return prepared.map(msg => ({
|
|
813
|
+
...msg,
|
|
814
|
+
content: msg.content || { format: 2, parts: [{ type: 'text', text: '' }] },
|
|
815
|
+
})) as MastraMessageV2[];
|
|
816
|
+
}
|
|
817
|
+
|
|
818
|
+
return prepared;
|
|
819
|
+
}
|
|
820
|
+
|
|
821
|
+
public async getMessagesPaginated(
|
|
822
|
+
args: StorageGetMessagesArg & {
|
|
823
|
+
format?: 'v1' | 'v2';
|
|
824
|
+
},
|
|
825
|
+
): Promise<PaginationInfo & { messages: MastraMessageV1[] | MastraMessageV2[] }> {
|
|
826
|
+
const { threadId, selectBy, format } = args;
|
|
827
|
+
const { page = 0, perPage = 40, dateRange } = selectBy?.pagination || {};
|
|
828
|
+
const fromDate = dateRange?.start;
|
|
829
|
+
const toDate = dateRange?.end;
|
|
830
|
+
const threadMessagesKey = this.getThreadMessagesKey(threadId);
|
|
831
|
+
const messages: (MastraMessageV2 | MastraMessageV1)[] = [];
|
|
832
|
+
|
|
833
|
+
const includedMessages = await this._getIncludedMessages(threadId, selectBy);
|
|
834
|
+
messages.push(...includedMessages);
|
|
835
|
+
|
|
836
|
+
try {
|
|
837
|
+
const allMessageIds = await this.redis.zrange(threadMessagesKey, 0, -1);
|
|
838
|
+
if (allMessageIds.length === 0) {
|
|
839
|
+
return {
|
|
840
|
+
messages: [],
|
|
841
|
+
total: 0,
|
|
842
|
+
page,
|
|
843
|
+
perPage,
|
|
844
|
+
hasMore: false,
|
|
845
|
+
};
|
|
846
|
+
}
|
|
847
|
+
|
|
848
|
+
// Use pipeline to fetch all messages efficiently
|
|
849
|
+
const pipeline = this.redis.pipeline();
|
|
850
|
+
allMessageIds.forEach(id => pipeline.get(this.getMessageKey(threadId, id as string)));
|
|
851
|
+
const results = await pipeline.exec();
|
|
852
|
+
|
|
853
|
+
// Process messages and apply filters - handle undefined results from pipeline
|
|
854
|
+
let messagesData = results.filter((msg): msg is MastraMessageV2 | MastraMessageV1 => msg !== null) as (
|
|
855
|
+
| MastraMessageV2
|
|
856
|
+
| MastraMessageV1
|
|
857
|
+
)[];
|
|
858
|
+
|
|
859
|
+
// Apply date filters if provided
|
|
860
|
+
if (fromDate) {
|
|
861
|
+
messagesData = messagesData.filter(msg => msg && new Date(msg.createdAt).getTime() >= fromDate.getTime());
|
|
862
|
+
}
|
|
863
|
+
|
|
864
|
+
if (toDate) {
|
|
865
|
+
messagesData = messagesData.filter(msg => msg && new Date(msg.createdAt).getTime() <= toDate.getTime());
|
|
866
|
+
}
|
|
867
|
+
|
|
868
|
+
// Sort messages by their position in the sorted set
|
|
869
|
+
messagesData.sort((a, b) => allMessageIds.indexOf(a!.id) - allMessageIds.indexOf(b!.id));
|
|
870
|
+
|
|
871
|
+
const total = messagesData.length;
|
|
872
|
+
|
|
873
|
+
const start = page * perPage;
|
|
874
|
+
const end = start + perPage;
|
|
875
|
+
const hasMore = end < total;
|
|
876
|
+
const paginatedMessages = messagesData.slice(start, end);
|
|
877
|
+
|
|
878
|
+
messages.push(...paginatedMessages);
|
|
879
|
+
|
|
880
|
+
const list = new MessageList().add(messages, 'memory');
|
|
881
|
+
const finalMessages = (format === `v2` ? list.get.all.v2() : list.get.all.v1()) as
|
|
882
|
+
| MastraMessageV1[]
|
|
883
|
+
| MastraMessageV2[];
|
|
884
|
+
|
|
885
|
+
return {
|
|
886
|
+
messages: finalMessages,
|
|
887
|
+
total,
|
|
888
|
+
page,
|
|
889
|
+
perPage,
|
|
890
|
+
hasMore,
|
|
891
|
+
};
|
|
892
|
+
} catch (error) {
|
|
893
|
+
console.error('Failed to get paginated messages:', error);
|
|
894
|
+
return {
|
|
895
|
+
messages: [],
|
|
896
|
+
total: 0,
|
|
897
|
+
page,
|
|
898
|
+
perPage,
|
|
899
|
+
hasMore: false,
|
|
900
|
+
};
|
|
901
|
+
}
|
|
491
902
|
}
|
|
492
903
|
|
|
493
904
|
async persistWorkflowSnapshot(params: {
|
|
@@ -531,6 +942,137 @@ export class UpstashStore extends MastraStorage {
|
|
|
531
942
|
return data.snapshot;
|
|
532
943
|
}
|
|
533
944
|
|
|
945
|
+
/**
|
|
946
|
+
* Get all evaluations with pagination and total count
|
|
947
|
+
* @param options Pagination and filtering options
|
|
948
|
+
* @returns Object with evals array and total count
|
|
949
|
+
*/
|
|
950
|
+
async getEvals(
|
|
951
|
+
options?: {
|
|
952
|
+
agentName?: string;
|
|
953
|
+
type?: 'test' | 'live';
|
|
954
|
+
} & PaginationArgs,
|
|
955
|
+
): Promise<PaginationInfo & { evals: EvalRow[] }> {
|
|
956
|
+
try {
|
|
957
|
+
// Default pagination parameters
|
|
958
|
+
const { agentName, type, page = 0, perPage = 100, dateRange } = options || {};
|
|
959
|
+
const fromDate = dateRange?.start;
|
|
960
|
+
const toDate = dateRange?.end;
|
|
961
|
+
|
|
962
|
+
// Get all keys that match the evals table pattern using cursor-based scanning
|
|
963
|
+
const pattern = `${TABLE_EVALS}:*`;
|
|
964
|
+
const keys = await this.scanKeys(pattern);
|
|
965
|
+
|
|
966
|
+
// Check if we have any keys before using pipeline
|
|
967
|
+
if (keys.length === 0) {
|
|
968
|
+
return {
|
|
969
|
+
evals: [],
|
|
970
|
+
total: 0,
|
|
971
|
+
page,
|
|
972
|
+
perPage,
|
|
973
|
+
hasMore: false,
|
|
974
|
+
};
|
|
975
|
+
}
|
|
976
|
+
|
|
977
|
+
// Use pipeline for batch fetching to improve performance
|
|
978
|
+
const pipeline = this.redis.pipeline();
|
|
979
|
+
keys.forEach(key => pipeline.get(key));
|
|
980
|
+
const results = await pipeline.exec();
|
|
981
|
+
|
|
982
|
+
// Process results and apply filters
|
|
983
|
+
let filteredEvals = results
|
|
984
|
+
.map((result: any) => result as Record<string, any> | null)
|
|
985
|
+
.filter((record): record is Record<string, any> => record !== null && typeof record === 'object');
|
|
986
|
+
|
|
987
|
+
// Apply agent name filter if provided
|
|
988
|
+
if (agentName) {
|
|
989
|
+
filteredEvals = filteredEvals.filter(record => record.agent_name === agentName);
|
|
990
|
+
}
|
|
991
|
+
|
|
992
|
+
// Apply type filter if provided
|
|
993
|
+
if (type === 'test') {
|
|
994
|
+
filteredEvals = filteredEvals.filter(record => {
|
|
995
|
+
if (!record.test_info) return false;
|
|
996
|
+
|
|
997
|
+
try {
|
|
998
|
+
if (typeof record.test_info === 'string') {
|
|
999
|
+
const parsedTestInfo = JSON.parse(record.test_info);
|
|
1000
|
+
return parsedTestInfo && typeof parsedTestInfo === 'object' && 'testPath' in parsedTestInfo;
|
|
1001
|
+
}
|
|
1002
|
+
return typeof record.test_info === 'object' && 'testPath' in record.test_info;
|
|
1003
|
+
} catch {
|
|
1004
|
+
return false;
|
|
1005
|
+
}
|
|
1006
|
+
});
|
|
1007
|
+
} else if (type === 'live') {
|
|
1008
|
+
filteredEvals = filteredEvals.filter(record => {
|
|
1009
|
+
if (!record.test_info) return true;
|
|
1010
|
+
|
|
1011
|
+
try {
|
|
1012
|
+
if (typeof record.test_info === 'string') {
|
|
1013
|
+
const parsedTestInfo = JSON.parse(record.test_info);
|
|
1014
|
+
return !(parsedTestInfo && typeof parsedTestInfo === 'object' && 'testPath' in parsedTestInfo);
|
|
1015
|
+
}
|
|
1016
|
+
return !(typeof record.test_info === 'object' && 'testPath' in record.test_info);
|
|
1017
|
+
} catch {
|
|
1018
|
+
return true;
|
|
1019
|
+
}
|
|
1020
|
+
});
|
|
1021
|
+
}
|
|
1022
|
+
|
|
1023
|
+
// Apply date filters if provided
|
|
1024
|
+
if (fromDate) {
|
|
1025
|
+
filteredEvals = filteredEvals.filter(record => {
|
|
1026
|
+
const createdAt = new Date(record.created_at || record.createdAt || 0);
|
|
1027
|
+
return createdAt.getTime() >= fromDate.getTime();
|
|
1028
|
+
});
|
|
1029
|
+
}
|
|
1030
|
+
|
|
1031
|
+
if (toDate) {
|
|
1032
|
+
filteredEvals = filteredEvals.filter(record => {
|
|
1033
|
+
const createdAt = new Date(record.created_at || record.createdAt || 0);
|
|
1034
|
+
return createdAt.getTime() <= toDate.getTime();
|
|
1035
|
+
});
|
|
1036
|
+
}
|
|
1037
|
+
|
|
1038
|
+
// Sort by creation date (newest first)
|
|
1039
|
+
filteredEvals.sort((a, b) => {
|
|
1040
|
+
const dateA = new Date(a.created_at || a.createdAt || 0).getTime();
|
|
1041
|
+
const dateB = new Date(b.created_at || b.createdAt || 0).getTime();
|
|
1042
|
+
return dateB - dateA;
|
|
1043
|
+
});
|
|
1044
|
+
|
|
1045
|
+
const total = filteredEvals.length;
|
|
1046
|
+
|
|
1047
|
+
// Apply pagination
|
|
1048
|
+
const start = page * perPage;
|
|
1049
|
+
const end = start + perPage;
|
|
1050
|
+
const paginatedEvals = filteredEvals.slice(start, end);
|
|
1051
|
+
const hasMore = end < total;
|
|
1052
|
+
|
|
1053
|
+
// Transform to EvalRow format
|
|
1054
|
+
const evals = paginatedEvals.map(record => this.transformEvalRecord(record));
|
|
1055
|
+
|
|
1056
|
+
return {
|
|
1057
|
+
evals,
|
|
1058
|
+
total,
|
|
1059
|
+
page,
|
|
1060
|
+
perPage,
|
|
1061
|
+
hasMore,
|
|
1062
|
+
};
|
|
1063
|
+
} catch (error) {
|
|
1064
|
+
const { page = 0, perPage = 100 } = options || {};
|
|
1065
|
+
console.error('Failed to get evals:', error);
|
|
1066
|
+
return {
|
|
1067
|
+
evals: [],
|
|
1068
|
+
total: 0,
|
|
1069
|
+
page,
|
|
1070
|
+
perPage,
|
|
1071
|
+
hasMore: false,
|
|
1072
|
+
};
|
|
1073
|
+
}
|
|
1074
|
+
}
|
|
1075
|
+
|
|
534
1076
|
async getWorkflowRuns(
|
|
535
1077
|
{
|
|
536
1078
|
namespace,
|
|
@@ -539,6 +1081,7 @@ export class UpstashStore extends MastraStorage {
|
|
|
539
1081
|
toDate,
|
|
540
1082
|
limit,
|
|
541
1083
|
offset,
|
|
1084
|
+
resourceId,
|
|
542
1085
|
}: {
|
|
543
1086
|
namespace: string;
|
|
544
1087
|
workflowName?: string;
|
|
@@ -546,74 +1089,99 @@ export class UpstashStore extends MastraStorage {
|
|
|
546
1089
|
toDate?: Date;
|
|
547
1090
|
limit?: number;
|
|
548
1091
|
offset?: number;
|
|
1092
|
+
resourceId?: string;
|
|
549
1093
|
} = { namespace: 'workflows' },
|
|
550
|
-
): Promise<{
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
keys.
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
|
|
577
|
-
|
|
578
|
-
|
|
579
|
-
|
|
580
|
-
|
|
581
|
-
|
|
582
|
-
|
|
583
|
-
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
592
|
-
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
|
|
596
|
-
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
})
|
|
607
|
-
.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
608
|
-
|
|
609
|
-
const total = runs.length;
|
|
610
|
-
|
|
611
|
-
// Apply pagination if requested
|
|
612
|
-
if (limit !== undefined && offset !== undefined) {
|
|
613
|
-
runs = runs.slice(offset, offset + limit);
|
|
1094
|
+
): Promise<WorkflowRuns> {
|
|
1095
|
+
try {
|
|
1096
|
+
// Get all workflow keys
|
|
1097
|
+
let pattern = this.getKey(TABLE_WORKFLOW_SNAPSHOT, { namespace }) + ':*';
|
|
1098
|
+
if (workflowName && resourceId) {
|
|
1099
|
+
pattern = this.getKey(TABLE_WORKFLOW_SNAPSHOT, {
|
|
1100
|
+
namespace,
|
|
1101
|
+
workflow_name: workflowName,
|
|
1102
|
+
run_id: '*',
|
|
1103
|
+
resourceId,
|
|
1104
|
+
});
|
|
1105
|
+
} else if (workflowName) {
|
|
1106
|
+
pattern = this.getKey(TABLE_WORKFLOW_SNAPSHOT, { namespace, workflow_name: workflowName }) + ':*';
|
|
1107
|
+
} else if (resourceId) {
|
|
1108
|
+
pattern = this.getKey(TABLE_WORKFLOW_SNAPSHOT, { namespace, workflow_name: '*', run_id: '*', resourceId });
|
|
1109
|
+
}
|
|
1110
|
+
const keys = await this.scanKeys(pattern);
|
|
1111
|
+
|
|
1112
|
+
// Check if we have any keys before using pipeline
|
|
1113
|
+
if (keys.length === 0) {
|
|
1114
|
+
return { runs: [], total: 0 };
|
|
1115
|
+
}
|
|
1116
|
+
|
|
1117
|
+
// Use pipeline for batch fetching to improve performance
|
|
1118
|
+
const pipeline = this.redis.pipeline();
|
|
1119
|
+
keys.forEach(key => pipeline.get(key));
|
|
1120
|
+
const results = await pipeline.exec();
|
|
1121
|
+
|
|
1122
|
+
// Filter and transform results - handle undefined results
|
|
1123
|
+
let runs = results
|
|
1124
|
+
.map((result: any) => result as Record<string, any> | null)
|
|
1125
|
+
.filter(
|
|
1126
|
+
(record): record is Record<string, any> =>
|
|
1127
|
+
record !== null && record !== undefined && typeof record === 'object' && 'workflow_name' in record,
|
|
1128
|
+
)
|
|
1129
|
+
// Only filter by workflowName if it was specifically requested
|
|
1130
|
+
.filter(record => !workflowName || record.workflow_name === workflowName)
|
|
1131
|
+
.map(w => this.parseWorkflowRun(w!))
|
|
1132
|
+
.filter(w => {
|
|
1133
|
+
if (fromDate && w.createdAt < fromDate) return false;
|
|
1134
|
+
if (toDate && w.createdAt > toDate) return false;
|
|
1135
|
+
return true;
|
|
1136
|
+
})
|
|
1137
|
+
.sort((a, b) => b.createdAt.getTime() - a.createdAt.getTime());
|
|
1138
|
+
|
|
1139
|
+
const total = runs.length;
|
|
1140
|
+
|
|
1141
|
+
// Apply pagination if requested
|
|
1142
|
+
if (limit !== undefined && offset !== undefined) {
|
|
1143
|
+
runs = runs.slice(offset, offset + limit);
|
|
1144
|
+
}
|
|
1145
|
+
|
|
1146
|
+
return { runs, total };
|
|
1147
|
+
} catch (error) {
|
|
1148
|
+
console.error('Error getting workflow runs:', error);
|
|
1149
|
+
throw error;
|
|
614
1150
|
}
|
|
1151
|
+
}
|
|
615
1152
|
|
|
616
|
-
|
|
1153
|
+
async getWorkflowRunById({
|
|
1154
|
+
namespace = 'workflows',
|
|
1155
|
+
runId,
|
|
1156
|
+
workflowName,
|
|
1157
|
+
}: {
|
|
1158
|
+
namespace: string;
|
|
1159
|
+
runId: string;
|
|
1160
|
+
workflowName?: string;
|
|
1161
|
+
}): Promise<WorkflowRun | null> {
|
|
1162
|
+
try {
|
|
1163
|
+
const key = this.getKey(TABLE_WORKFLOW_SNAPSHOT, { namespace, workflow_name: workflowName, run_id: runId }) + '*';
|
|
1164
|
+
const keys = await this.scanKeys(key);
|
|
1165
|
+
const workflows = await Promise.all(
|
|
1166
|
+
keys.map(async key => {
|
|
1167
|
+
const data = await this.redis.get<{
|
|
1168
|
+
workflow_name: string;
|
|
1169
|
+
run_id: string;
|
|
1170
|
+
snapshot: WorkflowRunState | string;
|
|
1171
|
+
createdAt: string | Date;
|
|
1172
|
+
updatedAt: string | Date;
|
|
1173
|
+
resourceId: string;
|
|
1174
|
+
}>(key);
|
|
1175
|
+
return data;
|
|
1176
|
+
}),
|
|
1177
|
+
);
|
|
1178
|
+
const data = workflows.find(w => w?.run_id === runId && w?.workflow_name === workflowName) as WorkflowRun | null;
|
|
1179
|
+
if (!data) return null;
|
|
1180
|
+
return this.parseWorkflowRun(data);
|
|
1181
|
+
} catch (error) {
|
|
1182
|
+
console.error('Error getting workflow run by ID:', error);
|
|
1183
|
+
throw error;
|
|
1184
|
+
}
|
|
617
1185
|
}
|
|
618
1186
|
|
|
619
1187
|
async close(): Promise<void> {
|