@mastra/libsql 0.11.0 → 0.11.1-alpha.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +7 -7
- package/CHANGELOG.md +26 -0
- package/LICENSE.md +12 -4
- package/dist/_tsup-dts-rollup.d.cts +336 -30
- package/dist/_tsup-dts-rollup.d.ts +336 -30
- package/dist/index.cjs +1357 -847
- package/dist/index.js +1227 -717
- package/package.json +6 -6
- package/src/storage/domains/legacy-evals/index.ts +149 -0
- package/src/storage/domains/memory/index.ts +786 -0
- package/src/storage/domains/operations/index.ts +296 -0
- package/src/storage/domains/scores/index.ts +217 -0
- package/src/storage/domains/traces/index.ts +150 -0
- package/src/storage/domains/utils.ts +67 -0
- package/src/storage/domains/workflows/index.ts +198 -0
- package/src/storage/index.test.ts +2 -515
- package/src/storage/index.ts +186 -1338
package/src/storage/index.ts
CHANGED
|
@@ -1,126 +1,122 @@
|
|
|
1
1
|
import { createClient } from '@libsql/client';
|
|
2
|
-
import type { Client
|
|
3
|
-
import { MessageList } from '@mastra/core/agent';
|
|
2
|
+
import type { Client } from '@libsql/client';
|
|
4
3
|
import type { MastraMessageContentV2, MastraMessageV2 } from '@mastra/core/agent';
|
|
5
|
-
import { ErrorCategory, ErrorDomain, MastraError } from '@mastra/core/error';
|
|
6
|
-
import type { MetricResult, TestInfo } from '@mastra/core/eval';
|
|
7
4
|
import type { MastraMessageV1, StorageThreadType } from '@mastra/core/memory';
|
|
8
|
-
import {
|
|
9
|
-
|
|
10
|
-
TABLE_EVALS,
|
|
11
|
-
TABLE_MESSAGES,
|
|
12
|
-
TABLE_THREADS,
|
|
13
|
-
TABLE_TRACES,
|
|
14
|
-
TABLE_RESOURCES,
|
|
15
|
-
TABLE_WORKFLOW_SNAPSHOT,
|
|
16
|
-
} from '@mastra/core/storage';
|
|
5
|
+
import type { ScoreRowData } from '@mastra/core/scores';
|
|
6
|
+
import { MastraStorage } from '@mastra/core/storage';
|
|
17
7
|
import type {
|
|
18
8
|
EvalRow,
|
|
19
9
|
PaginationArgs,
|
|
20
10
|
PaginationInfo,
|
|
21
11
|
StorageColumn,
|
|
12
|
+
StoragePagination,
|
|
22
13
|
StorageGetMessagesArg,
|
|
23
14
|
StorageResourceType,
|
|
24
15
|
TABLE_NAMES,
|
|
25
16
|
WorkflowRun,
|
|
26
17
|
WorkflowRuns,
|
|
18
|
+
StorageGetTracesArg,
|
|
19
|
+
StorageDomains,
|
|
27
20
|
} from '@mastra/core/storage';
|
|
21
|
+
|
|
28
22
|
import type { Trace } from '@mastra/core/telemetry';
|
|
29
|
-
import { parseSqlIdentifier } from '@mastra/core/utils';
|
|
30
23
|
import type { WorkflowRunState } from '@mastra/core/workflows';
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
24
|
+
import { LegacyEvalsLibSQL } from './domains/legacy-evals';
|
|
25
|
+
import { MemoryLibSQL } from './domains/memory';
|
|
26
|
+
import { StoreOperationsLibSQL } from './domains/operations';
|
|
27
|
+
import { ScoresLibSQL } from './domains/scores';
|
|
28
|
+
import { TracesLibSQL } from './domains/traces';
|
|
29
|
+
import { WorkflowsLibSQL } from './domains/workflows';
|
|
30
|
+
|
|
31
|
+
export type LibSQLConfig =
|
|
32
|
+
| {
|
|
33
|
+
url: string;
|
|
34
|
+
authToken?: string;
|
|
35
|
+
/**
|
|
36
|
+
* Maximum number of retries for write operations if an SQLITE_BUSY error occurs.
|
|
37
|
+
* @default 5
|
|
38
|
+
*/
|
|
39
|
+
maxRetries?: number;
|
|
40
|
+
/**
|
|
41
|
+
* Initial backoff time in milliseconds for retrying write operations on SQLITE_BUSY.
|
|
42
|
+
* The backoff time will double with each retry (exponential backoff).
|
|
43
|
+
* @default 100
|
|
44
|
+
*/
|
|
45
|
+
initialBackoffMs?: number;
|
|
46
|
+
}
|
|
47
|
+
| {
|
|
48
|
+
client: Client;
|
|
49
|
+
maxRetries?: number;
|
|
50
|
+
initialBackoffMs?: number;
|
|
51
|
+
};
|
|
55
52
|
|
|
56
53
|
export class LibSQLStore extends MastraStorage {
|
|
57
54
|
private client: Client;
|
|
58
55
|
private readonly maxRetries: number;
|
|
59
56
|
private readonly initialBackoffMs: number;
|
|
60
57
|
|
|
58
|
+
stores: StorageDomains;
|
|
59
|
+
|
|
61
60
|
constructor(config: LibSQLConfig) {
|
|
62
61
|
super({ name: `LibSQLStore` });
|
|
63
62
|
|
|
64
63
|
this.maxRetries = config.maxRetries ?? 5;
|
|
65
64
|
this.initialBackoffMs = config.initialBackoffMs ?? 100;
|
|
66
65
|
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
66
|
+
if ('url' in config) {
|
|
67
|
+
// need to re-init every time for in memory dbs or the tables might not exist
|
|
68
|
+
if (config.url.endsWith(':memory:')) {
|
|
69
|
+
this.shouldCacheInit = false;
|
|
70
|
+
}
|
|
71
|
+
|
|
72
|
+
this.client = createClient({ url: config.url });
|
|
73
|
+
|
|
74
|
+
// Set PRAGMAs for better concurrency, especially for file-based databases
|
|
75
|
+
if (config.url.startsWith('file:') || config.url.includes(':memory:')) {
|
|
76
|
+
this.client
|
|
77
|
+
.execute('PRAGMA journal_mode=WAL;')
|
|
78
|
+
.then(() => this.logger.debug('LibSQLStore: PRAGMA journal_mode=WAL set.'))
|
|
79
|
+
.catch(err => this.logger.warn('LibSQLStore: Failed to set PRAGMA journal_mode=WAL.', err));
|
|
80
|
+
this.client
|
|
81
|
+
.execute('PRAGMA busy_timeout = 5000;') // 5 seconds
|
|
82
|
+
.then(() => this.logger.debug('LibSQLStore: PRAGMA busy_timeout=5000 set.'))
|
|
83
|
+
.catch(err => this.logger.warn('LibSQLStore: Failed to set PRAGMA busy_timeout.', err));
|
|
84
|
+
}
|
|
85
|
+
} else {
|
|
86
|
+
this.client = config.client;
|
|
70
87
|
}
|
|
71
88
|
|
|
72
|
-
|
|
89
|
+
const operations = new StoreOperationsLibSQL({
|
|
90
|
+
client: this.client,
|
|
91
|
+
maxRetries: this.maxRetries,
|
|
92
|
+
initialBackoffMs: this.initialBackoffMs,
|
|
93
|
+
});
|
|
73
94
|
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
95
|
+
const scores = new ScoresLibSQL({ client: this.client, operations });
|
|
96
|
+
const traces = new TracesLibSQL({ client: this.client, operations });
|
|
97
|
+
const workflows = new WorkflowsLibSQL({ client: this.client, operations });
|
|
98
|
+
const memory = new MemoryLibSQL({ client: this.client, operations });
|
|
99
|
+
const legacyEvals = new LegacyEvalsLibSQL({ client: this.client });
|
|
100
|
+
|
|
101
|
+
this.stores = {
|
|
102
|
+
operations,
|
|
103
|
+
scores,
|
|
104
|
+
traces,
|
|
105
|
+
workflows,
|
|
106
|
+
memory,
|
|
107
|
+
legacyEvals,
|
|
108
|
+
};
|
|
85
109
|
}
|
|
86
110
|
|
|
87
|
-
public get supports()
|
|
88
|
-
selectByIncludeResourceScope: boolean;
|
|
89
|
-
resourceWorkingMemory: boolean;
|
|
90
|
-
} {
|
|
111
|
+
public get supports() {
|
|
91
112
|
return {
|
|
92
113
|
selectByIncludeResourceScope: true,
|
|
93
114
|
resourceWorkingMemory: true,
|
|
115
|
+
hasColumn: true,
|
|
116
|
+
createTable: true,
|
|
94
117
|
};
|
|
95
118
|
}
|
|
96
119
|
|
|
97
|
-
private getCreateTableSQL(tableName: TABLE_NAMES, schema: Record<string, StorageColumn>): string {
|
|
98
|
-
const parsedTableName = parseSqlIdentifier(tableName, 'table name');
|
|
99
|
-
const columns = Object.entries(schema).map(([name, col]) => {
|
|
100
|
-
const parsedColumnName = parseSqlIdentifier(name, 'column name');
|
|
101
|
-
let type = col.type.toUpperCase();
|
|
102
|
-
if (type === 'TEXT') type = 'TEXT';
|
|
103
|
-
if (type === 'TIMESTAMP') type = 'TEXT'; // Store timestamps as ISO strings
|
|
104
|
-
// if (type === 'BIGINT') type = 'INTEGER';
|
|
105
|
-
|
|
106
|
-
const nullable = col.nullable ? '' : 'NOT NULL';
|
|
107
|
-
const primaryKey = col.primaryKey ? 'PRIMARY KEY' : '';
|
|
108
|
-
|
|
109
|
-
return `${parsedColumnName} ${type} ${nullable} ${primaryKey}`.trim();
|
|
110
|
-
});
|
|
111
|
-
|
|
112
|
-
// For workflow_snapshot table, create a composite primary key
|
|
113
|
-
if (tableName === TABLE_WORKFLOW_SNAPSHOT) {
|
|
114
|
-
const stmnt = `CREATE TABLE IF NOT EXISTS ${parsedTableName} (
|
|
115
|
-
${columns.join(',\n')},
|
|
116
|
-
PRIMARY KEY (workflow_name, run_id)
|
|
117
|
-
)`;
|
|
118
|
-
return stmnt;
|
|
119
|
-
}
|
|
120
|
-
|
|
121
|
-
return `CREATE TABLE IF NOT EXISTS ${parsedTableName} (${columns.join(', ')})`;
|
|
122
|
-
}
|
|
123
|
-
|
|
124
120
|
async createTable({
|
|
125
121
|
tableName,
|
|
126
122
|
schema,
|
|
@@ -128,36 +124,7 @@ export class LibSQLStore extends MastraStorage {
|
|
|
128
124
|
tableName: TABLE_NAMES;
|
|
129
125
|
schema: Record<string, StorageColumn>;
|
|
130
126
|
}): Promise<void> {
|
|
131
|
-
|
|
132
|
-
this.logger.debug(`Creating database table`, { tableName, operation: 'schema init' });
|
|
133
|
-
const sql = this.getCreateTableSQL(tableName, schema);
|
|
134
|
-
await this.client.execute(sql);
|
|
135
|
-
} catch (error) {
|
|
136
|
-
// this.logger.error(`Error creating table ${tableName}: ${error}`);
|
|
137
|
-
// throw error;
|
|
138
|
-
throw new MastraError(
|
|
139
|
-
{
|
|
140
|
-
id: 'LIBSQL_STORE_CREATE_TABLE_FAILED',
|
|
141
|
-
domain: ErrorDomain.STORAGE,
|
|
142
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
143
|
-
details: {
|
|
144
|
-
tableName,
|
|
145
|
-
},
|
|
146
|
-
},
|
|
147
|
-
error,
|
|
148
|
-
);
|
|
149
|
-
}
|
|
150
|
-
}
|
|
151
|
-
|
|
152
|
-
protected getSqlType(type: StorageColumn['type']): string {
|
|
153
|
-
switch (type) {
|
|
154
|
-
case 'bigint':
|
|
155
|
-
return 'INTEGER'; // SQLite uses INTEGER for all integer sizes
|
|
156
|
-
case 'jsonb':
|
|
157
|
-
return 'TEXT'; // Store JSON as TEXT in SQLite
|
|
158
|
-
default:
|
|
159
|
-
return super.getSqlType(type);
|
|
160
|
-
}
|
|
127
|
+
await this.stores.operations.createTable({ tableName, schema });
|
|
161
128
|
}
|
|
162
129
|
|
|
163
130
|
/**
|
|
@@ -175,368 +142,50 @@ export class LibSQLStore extends MastraStorage {
|
|
|
175
142
|
schema: Record<string, StorageColumn>;
|
|
176
143
|
ifNotExists: string[];
|
|
177
144
|
}): Promise<void> {
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
try {
|
|
181
|
-
// 1. Get existing columns using PRAGMA
|
|
182
|
-
const pragmaQuery = `PRAGMA table_info(${parsedTableName})`;
|
|
183
|
-
const result = await this.client.execute(pragmaQuery);
|
|
184
|
-
const existingColumnNames = new Set(result.rows.map((row: any) => row.name.toLowerCase()));
|
|
185
|
-
|
|
186
|
-
// 2. Add missing columns
|
|
187
|
-
for (const columnName of ifNotExists) {
|
|
188
|
-
if (!existingColumnNames.has(columnName.toLowerCase()) && schema[columnName]) {
|
|
189
|
-
const columnDef = schema[columnName];
|
|
190
|
-
const sqlType = this.getSqlType(columnDef.type); // ensure this exists or implement
|
|
191
|
-
const nullable = columnDef.nullable === false ? 'NOT NULL' : '';
|
|
192
|
-
// In SQLite, you must provide a DEFAULT if adding a NOT NULL column to a non-empty table
|
|
193
|
-
const defaultValue = columnDef.nullable === false ? this.getDefaultValue(columnDef.type) : '';
|
|
194
|
-
const alterSql =
|
|
195
|
-
`ALTER TABLE ${parsedTableName} ADD COLUMN "${columnName}" ${sqlType} ${nullable} ${defaultValue}`.trim();
|
|
196
|
-
|
|
197
|
-
await this.client.execute(alterSql);
|
|
198
|
-
this.logger?.debug?.(`Added column ${columnName} to table ${parsedTableName}`);
|
|
199
|
-
}
|
|
200
|
-
}
|
|
201
|
-
} catch (error) {
|
|
202
|
-
throw new MastraError(
|
|
203
|
-
{
|
|
204
|
-
id: 'LIBSQL_STORE_ALTER_TABLE_FAILED',
|
|
205
|
-
domain: ErrorDomain.STORAGE,
|
|
206
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
207
|
-
details: {
|
|
208
|
-
tableName,
|
|
209
|
-
},
|
|
210
|
-
},
|
|
211
|
-
error,
|
|
212
|
-
);
|
|
213
|
-
}
|
|
145
|
+
await this.stores.operations.alterTable({ tableName, schema, ifNotExists });
|
|
214
146
|
}
|
|
215
147
|
|
|
216
148
|
async clearTable({ tableName }: { tableName: TABLE_NAMES }): Promise<void> {
|
|
217
|
-
|
|
218
|
-
try {
|
|
219
|
-
await this.client.execute(`DELETE FROM ${parsedTableName}`);
|
|
220
|
-
} catch (e) {
|
|
221
|
-
const mastraError = new MastraError(
|
|
222
|
-
{
|
|
223
|
-
id: 'LIBSQL_STORE_CLEAR_TABLE_FAILED',
|
|
224
|
-
domain: ErrorDomain.STORAGE,
|
|
225
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
226
|
-
details: {
|
|
227
|
-
tableName,
|
|
228
|
-
},
|
|
229
|
-
},
|
|
230
|
-
e,
|
|
231
|
-
);
|
|
232
|
-
this.logger?.trackException?.(mastraError);
|
|
233
|
-
this.logger?.error?.(mastraError.toString());
|
|
234
|
-
}
|
|
149
|
+
await this.stores.operations.clearTable({ tableName });
|
|
235
150
|
}
|
|
236
151
|
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
args: InValue[];
|
|
240
|
-
} {
|
|
241
|
-
const parsedTableName = parseSqlIdentifier(tableName, 'table name');
|
|
242
|
-
const columns = Object.keys(record).map(col => parseSqlIdentifier(col, 'column name'));
|
|
243
|
-
const values = Object.values(record).map(v => {
|
|
244
|
-
if (typeof v === `undefined`) {
|
|
245
|
-
// returning an undefined value will cause libsql to throw
|
|
246
|
-
return null;
|
|
247
|
-
}
|
|
248
|
-
if (v instanceof Date) {
|
|
249
|
-
return v.toISOString();
|
|
250
|
-
}
|
|
251
|
-
return typeof v === 'object' ? JSON.stringify(v) : v;
|
|
252
|
-
});
|
|
253
|
-
const placeholders = values.map(() => '?').join(', ');
|
|
254
|
-
|
|
255
|
-
return {
|
|
256
|
-
sql: `INSERT OR REPLACE INTO ${parsedTableName} (${columns.join(', ')}) VALUES (${placeholders})`,
|
|
257
|
-
args: values,
|
|
258
|
-
};
|
|
259
|
-
}
|
|
260
|
-
|
|
261
|
-
private async executeWriteOperationWithRetry<T>(
|
|
262
|
-
operationFn: () => Promise<T>,
|
|
263
|
-
operationDescription: string,
|
|
264
|
-
): Promise<T> {
|
|
265
|
-
let retries = 0;
|
|
266
|
-
|
|
267
|
-
while (true) {
|
|
268
|
-
try {
|
|
269
|
-
return await operationFn();
|
|
270
|
-
} catch (error: any) {
|
|
271
|
-
if (
|
|
272
|
-
error.message &&
|
|
273
|
-
(error.message.includes('SQLITE_BUSY') || error.message.includes('database is locked')) &&
|
|
274
|
-
retries < this.maxRetries
|
|
275
|
-
) {
|
|
276
|
-
retries++;
|
|
277
|
-
const backoffTime = this.initialBackoffMs * Math.pow(2, retries - 1);
|
|
278
|
-
this.logger.warn(
|
|
279
|
-
`LibSQLStore: Encountered SQLITE_BUSY during ${operationDescription}. Retrying (${retries}/${this.maxRetries}) in ${backoffTime}ms...`,
|
|
280
|
-
);
|
|
281
|
-
await new Promise(resolve => setTimeout(resolve, backoffTime));
|
|
282
|
-
} else {
|
|
283
|
-
this.logger.error(`LibSQLStore: Error during ${operationDescription} after ${retries} retries: ${error}`);
|
|
284
|
-
throw error;
|
|
285
|
-
}
|
|
286
|
-
}
|
|
287
|
-
}
|
|
152
|
+
async dropTable({ tableName }: { tableName: TABLE_NAMES }): Promise<void> {
|
|
153
|
+
await this.stores.operations.dropTable({ tableName });
|
|
288
154
|
}
|
|
289
155
|
|
|
290
156
|
public insert(args: { tableName: TABLE_NAMES; record: Record<string, any> }): Promise<void> {
|
|
291
|
-
return this.
|
|
292
|
-
}
|
|
293
|
-
|
|
294
|
-
private async doInsert({
|
|
295
|
-
tableName,
|
|
296
|
-
record,
|
|
297
|
-
}: {
|
|
298
|
-
tableName: TABLE_NAMES;
|
|
299
|
-
record: Record<string, any>;
|
|
300
|
-
}): Promise<void> {
|
|
301
|
-
await this.client.execute(
|
|
302
|
-
this.prepareStatement({
|
|
303
|
-
tableName,
|
|
304
|
-
record,
|
|
305
|
-
}),
|
|
306
|
-
);
|
|
157
|
+
return this.stores.operations.insert(args);
|
|
307
158
|
}
|
|
308
159
|
|
|
309
160
|
public batchInsert(args: { tableName: TABLE_NAMES; records: Record<string, any>[] }): Promise<void> {
|
|
310
|
-
return this.
|
|
311
|
-
() => this.doBatchInsert(args),
|
|
312
|
-
`batch insert into table ${args.tableName}`,
|
|
313
|
-
).catch(error => {
|
|
314
|
-
throw new MastraError(
|
|
315
|
-
{
|
|
316
|
-
id: 'LIBSQL_STORE_BATCH_INSERT_FAILED',
|
|
317
|
-
domain: ErrorDomain.STORAGE,
|
|
318
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
319
|
-
details: {
|
|
320
|
-
tableName: args.tableName,
|
|
321
|
-
},
|
|
322
|
-
},
|
|
323
|
-
error,
|
|
324
|
-
);
|
|
325
|
-
});
|
|
326
|
-
}
|
|
327
|
-
|
|
328
|
-
private async doBatchInsert({
|
|
329
|
-
tableName,
|
|
330
|
-
records,
|
|
331
|
-
}: {
|
|
332
|
-
tableName: TABLE_NAMES;
|
|
333
|
-
records: Record<string, any>[];
|
|
334
|
-
}): Promise<void> {
|
|
335
|
-
if (records.length === 0) return;
|
|
336
|
-
const batchStatements = records.map(r => this.prepareStatement({ tableName, record: r }));
|
|
337
|
-
await this.client.batch(batchStatements, 'write');
|
|
161
|
+
return this.stores.operations.batchInsert(args);
|
|
338
162
|
}
|
|
339
163
|
|
|
340
164
|
async load<R>({ tableName, keys }: { tableName: TABLE_NAMES; keys: Record<string, string> }): Promise<R | null> {
|
|
341
|
-
|
|
342
|
-
|
|
343
|
-
const parsedKeys = Object.keys(keys).map(key => parseSqlIdentifier(key, 'column name'));
|
|
344
|
-
|
|
345
|
-
const conditions = parsedKeys.map(key => `${key} = ?`).join(' AND ');
|
|
346
|
-
const values = Object.values(keys);
|
|
347
|
-
|
|
348
|
-
const result = await this.client.execute({
|
|
349
|
-
sql: `SELECT * FROM ${parsedTableName} WHERE ${conditions} ORDER BY createdAt DESC LIMIT 1`,
|
|
350
|
-
args: values,
|
|
351
|
-
});
|
|
352
|
-
|
|
353
|
-
if (!result.rows || result.rows.length === 0) {
|
|
354
|
-
return null;
|
|
355
|
-
}
|
|
356
|
-
|
|
357
|
-
const row = result.rows[0];
|
|
358
|
-
// Checks whether the string looks like a JSON object ({}) or array ([])
|
|
359
|
-
// If the string starts with { or [, it assumes it's JSON and parses it
|
|
360
|
-
// Otherwise, it just returns, preventing unintended number conversions
|
|
361
|
-
const parsed = Object.fromEntries(
|
|
362
|
-
Object.entries(row || {}).map(([k, v]) => {
|
|
363
|
-
try {
|
|
364
|
-
return [k, typeof v === 'string' ? (v.startsWith('{') || v.startsWith('[') ? JSON.parse(v) : v) : v];
|
|
365
|
-
} catch {
|
|
366
|
-
return [k, v];
|
|
367
|
-
}
|
|
368
|
-
}),
|
|
369
|
-
);
|
|
370
|
-
|
|
371
|
-
return parsed as R;
|
|
165
|
+
return this.stores.operations.load({ tableName, keys });
|
|
372
166
|
}
|
|
373
167
|
|
|
374
168
|
async getThreadById({ threadId }: { threadId: string }): Promise<StorageThreadType | null> {
|
|
375
|
-
|
|
376
|
-
const result = await this.load<StorageThreadType>({
|
|
377
|
-
tableName: TABLE_THREADS,
|
|
378
|
-
keys: { id: threadId },
|
|
379
|
-
});
|
|
380
|
-
|
|
381
|
-
if (!result) {
|
|
382
|
-
return null;
|
|
383
|
-
}
|
|
384
|
-
|
|
385
|
-
return {
|
|
386
|
-
...result,
|
|
387
|
-
metadata: typeof result.metadata === 'string' ? JSON.parse(result.metadata) : result.metadata,
|
|
388
|
-
};
|
|
389
|
-
} catch (error) {
|
|
390
|
-
throw new MastraError(
|
|
391
|
-
{
|
|
392
|
-
id: 'LIBSQL_STORE_GET_THREAD_BY_ID_FAILED',
|
|
393
|
-
domain: ErrorDomain.STORAGE,
|
|
394
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
395
|
-
details: { threadId },
|
|
396
|
-
},
|
|
397
|
-
error,
|
|
398
|
-
);
|
|
399
|
-
}
|
|
169
|
+
return this.stores.memory.getThreadById({ threadId });
|
|
400
170
|
}
|
|
401
171
|
|
|
402
172
|
/**
|
|
403
173
|
* @deprecated use getThreadsByResourceIdPaginated instead for paginated results.
|
|
404
174
|
*/
|
|
405
175
|
public async getThreadsByResourceId(args: { resourceId: string }): Promise<StorageThreadType[]> {
|
|
406
|
-
|
|
407
|
-
|
|
408
|
-
try {
|
|
409
|
-
const baseQuery = `FROM ${TABLE_THREADS} WHERE resourceId = ?`;
|
|
410
|
-
const queryParams: InValue[] = [resourceId];
|
|
411
|
-
|
|
412
|
-
const mapRowToStorageThreadType = (row: any): StorageThreadType => ({
|
|
413
|
-
id: row.id as string,
|
|
414
|
-
resourceId: row.resourceId as string,
|
|
415
|
-
title: row.title as string,
|
|
416
|
-
createdAt: new Date(row.createdAt as string), // Convert string to Date
|
|
417
|
-
updatedAt: new Date(row.updatedAt as string), // Convert string to Date
|
|
418
|
-
metadata: typeof row.metadata === 'string' ? JSON.parse(row.metadata) : row.metadata,
|
|
419
|
-
});
|
|
420
|
-
|
|
421
|
-
// Non-paginated path
|
|
422
|
-
const result = await this.client.execute({
|
|
423
|
-
sql: `SELECT * ${baseQuery} ORDER BY createdAt DESC`,
|
|
424
|
-
args: queryParams,
|
|
425
|
-
});
|
|
426
|
-
|
|
427
|
-
if (!result.rows) {
|
|
428
|
-
return [];
|
|
429
|
-
}
|
|
430
|
-
return result.rows.map(mapRowToStorageThreadType);
|
|
431
|
-
} catch (error) {
|
|
432
|
-
const mastraError = new MastraError(
|
|
433
|
-
{
|
|
434
|
-
id: 'LIBSQL_STORE_GET_THREADS_BY_RESOURCE_ID_FAILED',
|
|
435
|
-
domain: ErrorDomain.STORAGE,
|
|
436
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
437
|
-
details: { resourceId },
|
|
438
|
-
},
|
|
439
|
-
error,
|
|
440
|
-
);
|
|
441
|
-
this.logger?.trackException?.(mastraError);
|
|
442
|
-
this.logger?.error?.(mastraError.toString());
|
|
443
|
-
return [];
|
|
444
|
-
}
|
|
176
|
+
return this.stores.memory.getThreadsByResourceId(args);
|
|
445
177
|
}
|
|
446
178
|
|
|
447
|
-
public async getThreadsByResourceIdPaginated(
|
|
448
|
-
|
|
449
|
-
|
|
450
|
-
|
|
451
|
-
): Promise<PaginationInfo & { threads: StorageThreadType[] }> {
|
|
452
|
-
|
|
453
|
-
|
|
454
|
-
try {
|
|
455
|
-
const baseQuery = `FROM ${TABLE_THREADS} WHERE resourceId = ?`;
|
|
456
|
-
const queryParams: InValue[] = [resourceId];
|
|
457
|
-
|
|
458
|
-
const mapRowToStorageThreadType = (row: any): StorageThreadType => ({
|
|
459
|
-
id: row.id as string,
|
|
460
|
-
resourceId: row.resourceId as string,
|
|
461
|
-
title: row.title as string,
|
|
462
|
-
createdAt: new Date(row.createdAt as string), // Convert string to Date
|
|
463
|
-
updatedAt: new Date(row.updatedAt as string), // Convert string to Date
|
|
464
|
-
metadata: typeof row.metadata === 'string' ? JSON.parse(row.metadata) : row.metadata,
|
|
465
|
-
});
|
|
466
|
-
|
|
467
|
-
const currentOffset = page * perPage;
|
|
468
|
-
|
|
469
|
-
const countResult = await this.client.execute({
|
|
470
|
-
sql: `SELECT COUNT(*) as count ${baseQuery}`,
|
|
471
|
-
args: queryParams,
|
|
472
|
-
});
|
|
473
|
-
const total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
474
|
-
|
|
475
|
-
if (total === 0) {
|
|
476
|
-
return {
|
|
477
|
-
threads: [],
|
|
478
|
-
total: 0,
|
|
479
|
-
page,
|
|
480
|
-
perPage,
|
|
481
|
-
hasMore: false,
|
|
482
|
-
};
|
|
483
|
-
}
|
|
484
|
-
|
|
485
|
-
const dataResult = await this.client.execute({
|
|
486
|
-
sql: `SELECT * ${baseQuery} ORDER BY createdAt DESC LIMIT ? OFFSET ?`,
|
|
487
|
-
args: [...queryParams, perPage, currentOffset],
|
|
488
|
-
});
|
|
489
|
-
|
|
490
|
-
const threads = (dataResult.rows || []).map(mapRowToStorageThreadType);
|
|
491
|
-
|
|
492
|
-
return {
|
|
493
|
-
threads,
|
|
494
|
-
total,
|
|
495
|
-
page,
|
|
496
|
-
perPage,
|
|
497
|
-
hasMore: currentOffset + threads.length < total,
|
|
498
|
-
};
|
|
499
|
-
} catch (error) {
|
|
500
|
-
const mastraError = new MastraError(
|
|
501
|
-
{
|
|
502
|
-
id: 'LIBSQL_STORE_GET_THREADS_BY_RESOURCE_ID_FAILED',
|
|
503
|
-
domain: ErrorDomain.STORAGE,
|
|
504
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
505
|
-
details: { resourceId },
|
|
506
|
-
},
|
|
507
|
-
error,
|
|
508
|
-
);
|
|
509
|
-
this.logger?.trackException?.(mastraError);
|
|
510
|
-
this.logger?.error?.(mastraError.toString());
|
|
511
|
-
return { threads: [], total: 0, page, perPage, hasMore: false };
|
|
512
|
-
}
|
|
179
|
+
public async getThreadsByResourceIdPaginated(args: {
|
|
180
|
+
resourceId: string;
|
|
181
|
+
page: number;
|
|
182
|
+
perPage: number;
|
|
183
|
+
}): Promise<PaginationInfo & { threads: StorageThreadType[] }> {
|
|
184
|
+
return this.stores.memory.getThreadsByResourceIdPaginated(args);
|
|
513
185
|
}
|
|
514
186
|
|
|
515
187
|
async saveThread({ thread }: { thread: StorageThreadType }): Promise<StorageThreadType> {
|
|
516
|
-
|
|
517
|
-
await this.insert({
|
|
518
|
-
tableName: TABLE_THREADS,
|
|
519
|
-
record: {
|
|
520
|
-
...thread,
|
|
521
|
-
metadata: JSON.stringify(thread.metadata),
|
|
522
|
-
},
|
|
523
|
-
});
|
|
524
|
-
|
|
525
|
-
return thread;
|
|
526
|
-
} catch (error) {
|
|
527
|
-
const mastraError = new MastraError(
|
|
528
|
-
{
|
|
529
|
-
id: 'LIBSQL_STORE_SAVE_THREAD_FAILED',
|
|
530
|
-
domain: ErrorDomain.STORAGE,
|
|
531
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
532
|
-
details: { threadId: thread.id },
|
|
533
|
-
},
|
|
534
|
-
error,
|
|
535
|
-
);
|
|
536
|
-
this.logger?.trackException?.(mastraError);
|
|
537
|
-
this.logger?.error?.(mastraError.toString());
|
|
538
|
-
throw mastraError;
|
|
539
|
-
}
|
|
188
|
+
return this.stores.memory.saveThread({ thread });
|
|
540
189
|
}
|
|
541
190
|
|
|
542
191
|
async updateThread({
|
|
@@ -548,142 +197,11 @@ export class LibSQLStore extends MastraStorage {
|
|
|
548
197
|
title: string;
|
|
549
198
|
metadata: Record<string, unknown>;
|
|
550
199
|
}): Promise<StorageThreadType> {
|
|
551
|
-
|
|
552
|
-
if (!thread) {
|
|
553
|
-
throw new MastraError({
|
|
554
|
-
id: 'LIBSQL_STORE_UPDATE_THREAD_FAILED_THREAD_NOT_FOUND',
|
|
555
|
-
domain: ErrorDomain.STORAGE,
|
|
556
|
-
category: ErrorCategory.USER,
|
|
557
|
-
text: `Thread ${id} not found`,
|
|
558
|
-
details: { threadId: id },
|
|
559
|
-
});
|
|
560
|
-
}
|
|
561
|
-
|
|
562
|
-
const updatedThread = {
|
|
563
|
-
...thread,
|
|
564
|
-
title,
|
|
565
|
-
metadata: {
|
|
566
|
-
...thread.metadata,
|
|
567
|
-
...metadata,
|
|
568
|
-
},
|
|
569
|
-
};
|
|
570
|
-
|
|
571
|
-
try {
|
|
572
|
-
await this.client.execute({
|
|
573
|
-
sql: `UPDATE ${TABLE_THREADS} SET title = ?, metadata = ? WHERE id = ?`,
|
|
574
|
-
args: [title, JSON.stringify(updatedThread.metadata), id],
|
|
575
|
-
});
|
|
576
|
-
|
|
577
|
-
return updatedThread;
|
|
578
|
-
} catch (error) {
|
|
579
|
-
throw new MastraError(
|
|
580
|
-
{
|
|
581
|
-
id: 'LIBSQL_STORE_UPDATE_THREAD_FAILED',
|
|
582
|
-
domain: ErrorDomain.STORAGE,
|
|
583
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
584
|
-
text: `Failed to update thread ${id}`,
|
|
585
|
-
details: { threadId: id },
|
|
586
|
-
},
|
|
587
|
-
error,
|
|
588
|
-
);
|
|
589
|
-
}
|
|
200
|
+
return this.stores.memory.updateThread({ id, title, metadata });
|
|
590
201
|
}
|
|
591
202
|
|
|
592
203
|
async deleteThread({ threadId }: { threadId: string }): Promise<void> {
|
|
593
|
-
|
|
594
|
-
try {
|
|
595
|
-
await this.client.execute({
|
|
596
|
-
sql: `DELETE FROM ${TABLE_MESSAGES} WHERE thread_id = ?`,
|
|
597
|
-
args: [threadId],
|
|
598
|
-
});
|
|
599
|
-
await this.client.execute({
|
|
600
|
-
sql: `DELETE FROM ${TABLE_THREADS} WHERE id = ?`,
|
|
601
|
-
args: [threadId],
|
|
602
|
-
});
|
|
603
|
-
} catch (error) {
|
|
604
|
-
throw new MastraError(
|
|
605
|
-
{
|
|
606
|
-
id: 'LIBSQL_STORE_DELETE_THREAD_FAILED',
|
|
607
|
-
domain: ErrorDomain.STORAGE,
|
|
608
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
609
|
-
details: { threadId },
|
|
610
|
-
},
|
|
611
|
-
error,
|
|
612
|
-
);
|
|
613
|
-
}
|
|
614
|
-
// TODO: Need to check if CASCADE is enabled so that messages will be automatically deleted due to CASCADE constraint
|
|
615
|
-
}
|
|
616
|
-
|
|
617
|
-
private parseRow(row: any): MastraMessageV2 {
|
|
618
|
-
let content = row.content;
|
|
619
|
-
try {
|
|
620
|
-
content = JSON.parse(row.content);
|
|
621
|
-
} catch {
|
|
622
|
-
// use content as is if it's not JSON
|
|
623
|
-
}
|
|
624
|
-
const result = {
|
|
625
|
-
id: row.id,
|
|
626
|
-
content,
|
|
627
|
-
role: row.role,
|
|
628
|
-
createdAt: new Date(row.createdAt as string),
|
|
629
|
-
threadId: row.thread_id,
|
|
630
|
-
resourceId: row.resourceId,
|
|
631
|
-
} as MastraMessageV2;
|
|
632
|
-
if (row.type && row.type !== `v2`) result.type = row.type;
|
|
633
|
-
return result;
|
|
634
|
-
}
|
|
635
|
-
|
|
636
|
-
private async _getIncludedMessages({
|
|
637
|
-
threadId,
|
|
638
|
-
selectBy,
|
|
639
|
-
}: {
|
|
640
|
-
threadId: string;
|
|
641
|
-
selectBy: StorageGetMessagesArg['selectBy'];
|
|
642
|
-
}) {
|
|
643
|
-
const include = selectBy?.include;
|
|
644
|
-
if (!include) return null;
|
|
645
|
-
|
|
646
|
-
const unionQueries: string[] = [];
|
|
647
|
-
const params: any[] = [];
|
|
648
|
-
|
|
649
|
-
for (const inc of include) {
|
|
650
|
-
const { id, withPreviousMessages = 0, withNextMessages = 0 } = inc;
|
|
651
|
-
// if threadId is provided, use it, otherwise use threadId from args
|
|
652
|
-
const searchId = inc.threadId || threadId;
|
|
653
|
-
unionQueries.push(
|
|
654
|
-
`
|
|
655
|
-
SELECT * FROM (
|
|
656
|
-
WITH numbered_messages AS (
|
|
657
|
-
SELECT
|
|
658
|
-
id, content, role, type, "createdAt", thread_id, "resourceId",
|
|
659
|
-
ROW_NUMBER() OVER (ORDER BY "createdAt" ASC) as row_num
|
|
660
|
-
FROM "${TABLE_MESSAGES}"
|
|
661
|
-
WHERE thread_id = ?
|
|
662
|
-
),
|
|
663
|
-
target_positions AS (
|
|
664
|
-
SELECT row_num as target_pos
|
|
665
|
-
FROM numbered_messages
|
|
666
|
-
WHERE id = ?
|
|
667
|
-
)
|
|
668
|
-
SELECT DISTINCT m.*
|
|
669
|
-
FROM numbered_messages m
|
|
670
|
-
CROSS JOIN target_positions t
|
|
671
|
-
WHERE m.row_num BETWEEN (t.target_pos - ?) AND (t.target_pos + ?)
|
|
672
|
-
)
|
|
673
|
-
`, // Keep ASC for final sorting after fetching context
|
|
674
|
-
);
|
|
675
|
-
params.push(searchId, id, withPreviousMessages, withNextMessages);
|
|
676
|
-
}
|
|
677
|
-
const finalQuery = unionQueries.join(' UNION ALL ') + ' ORDER BY "createdAt" ASC';
|
|
678
|
-
const includedResult = await this.client.execute({ sql: finalQuery, args: params });
|
|
679
|
-
const includedRows = includedResult.rows?.map(row => this.parseRow(row));
|
|
680
|
-
const seen = new Set<string>();
|
|
681
|
-
const dedupedRows = includedRows.filter(row => {
|
|
682
|
-
if (seen.has(row.id)) return false;
|
|
683
|
-
seen.add(row.id);
|
|
684
|
-
return true;
|
|
685
|
-
});
|
|
686
|
-
return dedupedRows;
|
|
204
|
+
return this.stores.memory.deleteThread({ threadId });
|
|
687
205
|
}
|
|
688
206
|
|
|
689
207
|
/**
|
|
@@ -698,52 +216,7 @@ export class LibSQLStore extends MastraStorage {
|
|
|
698
216
|
}: StorageGetMessagesArg & {
|
|
699
217
|
format?: 'v1' | 'v2';
|
|
700
218
|
}): Promise<MastraMessageV1[] | MastraMessageV2[]> {
|
|
701
|
-
|
|
702
|
-
const messages: MastraMessageV2[] = [];
|
|
703
|
-
const limit = this.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
|
|
704
|
-
if (selectBy?.include?.length) {
|
|
705
|
-
const includeMessages = await this._getIncludedMessages({ threadId, selectBy });
|
|
706
|
-
if (includeMessages) {
|
|
707
|
-
messages.push(...includeMessages);
|
|
708
|
-
}
|
|
709
|
-
}
|
|
710
|
-
|
|
711
|
-
const excludeIds = messages.map(m => m.id);
|
|
712
|
-
const remainingSql = `
|
|
713
|
-
SELECT
|
|
714
|
-
id,
|
|
715
|
-
content,
|
|
716
|
-
role,
|
|
717
|
-
type,
|
|
718
|
-
"createdAt",
|
|
719
|
-
thread_id,
|
|
720
|
-
"resourceId"
|
|
721
|
-
FROM "${TABLE_MESSAGES}"
|
|
722
|
-
WHERE thread_id = ?
|
|
723
|
-
${excludeIds.length ? `AND id NOT IN (${excludeIds.map(() => '?').join(', ')})` : ''}
|
|
724
|
-
ORDER BY "createdAt" DESC
|
|
725
|
-
LIMIT ?
|
|
726
|
-
`;
|
|
727
|
-
const remainingArgs = [threadId, ...(excludeIds.length ? excludeIds : []), limit];
|
|
728
|
-
const remainingResult = await this.client.execute({ sql: remainingSql, args: remainingArgs });
|
|
729
|
-
if (remainingResult.rows) {
|
|
730
|
-
messages.push(...remainingResult.rows.map((row: any) => this.parseRow(row)));
|
|
731
|
-
}
|
|
732
|
-
messages.sort((a, b) => a.createdAt.getTime() - b.createdAt.getTime());
|
|
733
|
-
const list = new MessageList().add(messages, 'memory');
|
|
734
|
-
if (format === `v2`) return list.get.all.v2();
|
|
735
|
-
return list.get.all.v1();
|
|
736
|
-
} catch (error) {
|
|
737
|
-
throw new MastraError(
|
|
738
|
-
{
|
|
739
|
-
id: 'LIBSQL_STORE_GET_MESSAGES_FAILED',
|
|
740
|
-
domain: ErrorDomain.STORAGE,
|
|
741
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
742
|
-
details: { threadId },
|
|
743
|
-
},
|
|
744
|
-
error,
|
|
745
|
-
);
|
|
746
|
-
}
|
|
219
|
+
return this.stores.memory.getMessages({ threadId, selectBy, format });
|
|
747
220
|
}
|
|
748
221
|
|
|
749
222
|
public async getMessagesPaginated(
|
|
@@ -751,177 +224,15 @@ export class LibSQLStore extends MastraStorage {
|
|
|
751
224
|
format?: 'v1' | 'v2';
|
|
752
225
|
},
|
|
753
226
|
): Promise<PaginationInfo & { messages: MastraMessageV1[] | MastraMessageV2[] }> {
|
|
754
|
-
|
|
755
|
-
const { page = 0, perPage: perPageInput, dateRange } = selectBy?.pagination || {};
|
|
756
|
-
const perPage =
|
|
757
|
-
perPageInput !== undefined ? perPageInput : this.resolveMessageLimit({ last: selectBy?.last, defaultLimit: 40 });
|
|
758
|
-
const fromDate = dateRange?.start;
|
|
759
|
-
const toDate = dateRange?.end;
|
|
760
|
-
|
|
761
|
-
const messages: MastraMessageV2[] = [];
|
|
762
|
-
|
|
763
|
-
if (selectBy?.include?.length) {
|
|
764
|
-
try {
|
|
765
|
-
const includeMessages = await this._getIncludedMessages({ threadId, selectBy });
|
|
766
|
-
if (includeMessages) {
|
|
767
|
-
messages.push(...includeMessages);
|
|
768
|
-
}
|
|
769
|
-
} catch (error) {
|
|
770
|
-
throw new MastraError(
|
|
771
|
-
{
|
|
772
|
-
id: 'LIBSQL_STORE_GET_MESSAGES_PAGINATED_GET_INCLUDE_MESSAGES_FAILED',
|
|
773
|
-
domain: ErrorDomain.STORAGE,
|
|
774
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
775
|
-
details: { threadId },
|
|
776
|
-
},
|
|
777
|
-
error,
|
|
778
|
-
);
|
|
779
|
-
}
|
|
780
|
-
}
|
|
781
|
-
|
|
782
|
-
try {
|
|
783
|
-
const currentOffset = page * perPage;
|
|
784
|
-
|
|
785
|
-
const conditions: string[] = [`thread_id = ?`];
|
|
786
|
-
const queryParams: InValue[] = [threadId];
|
|
787
|
-
|
|
788
|
-
if (fromDate) {
|
|
789
|
-
conditions.push(`"createdAt" >= ?`);
|
|
790
|
-
queryParams.push(fromDate.toISOString());
|
|
791
|
-
}
|
|
792
|
-
if (toDate) {
|
|
793
|
-
conditions.push(`"createdAt" <= ?`);
|
|
794
|
-
queryParams.push(toDate.toISOString());
|
|
795
|
-
}
|
|
796
|
-
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
797
|
-
|
|
798
|
-
const countResult = await this.client.execute({
|
|
799
|
-
sql: `SELECT COUNT(*) as count FROM ${TABLE_MESSAGES} ${whereClause}`,
|
|
800
|
-
args: queryParams,
|
|
801
|
-
});
|
|
802
|
-
const total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
803
|
-
|
|
804
|
-
if (total === 0 && messages.length === 0) {
|
|
805
|
-
return {
|
|
806
|
-
messages: [],
|
|
807
|
-
total: 0,
|
|
808
|
-
page,
|
|
809
|
-
perPage,
|
|
810
|
-
hasMore: false,
|
|
811
|
-
};
|
|
812
|
-
}
|
|
813
|
-
|
|
814
|
-
const excludeIds = messages.map(m => m.id);
|
|
815
|
-
const excludeIdsParam = excludeIds.map((_, idx) => `$${idx + queryParams.length + 1}`).join(', ');
|
|
816
|
-
|
|
817
|
-
const dataResult = await this.client.execute({
|
|
818
|
-
sql: `SELECT id, content, role, type, "createdAt", "resourceId", "thread_id" FROM ${TABLE_MESSAGES} ${whereClause} ${excludeIds.length ? `AND id NOT IN (${excludeIdsParam})` : ''} ORDER BY "createdAt" DESC LIMIT ? OFFSET ?`,
|
|
819
|
-
args: [...queryParams, ...excludeIds, perPage, currentOffset],
|
|
820
|
-
});
|
|
821
|
-
|
|
822
|
-
messages.push(...(dataResult.rows || []).map((row: any) => this.parseRow(row)));
|
|
823
|
-
|
|
824
|
-
const messagesToReturn =
|
|
825
|
-
format === 'v1'
|
|
826
|
-
? new MessageList().add(messages, 'memory').get.all.v1()
|
|
827
|
-
: new MessageList().add(messages, 'memory').get.all.v2();
|
|
828
|
-
|
|
829
|
-
return {
|
|
830
|
-
messages: messagesToReturn,
|
|
831
|
-
total,
|
|
832
|
-
page,
|
|
833
|
-
perPage,
|
|
834
|
-
hasMore: currentOffset + messages.length < total,
|
|
835
|
-
};
|
|
836
|
-
} catch (error) {
|
|
837
|
-
const mastraError = new MastraError(
|
|
838
|
-
{
|
|
839
|
-
id: 'LIBSQL_STORE_GET_MESSAGES_PAGINATED_FAILED',
|
|
840
|
-
domain: ErrorDomain.STORAGE,
|
|
841
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
842
|
-
details: { threadId },
|
|
843
|
-
},
|
|
844
|
-
error,
|
|
845
|
-
);
|
|
846
|
-
this.logger?.trackException?.(mastraError);
|
|
847
|
-
this.logger?.error?.(mastraError.toString());
|
|
848
|
-
return { messages: [], total: 0, page, perPage, hasMore: false };
|
|
849
|
-
}
|
|
227
|
+
return this.stores.memory.getMessagesPaginated(args);
|
|
850
228
|
}
|
|
851
229
|
|
|
852
230
|
async saveMessages(args: { messages: MastraMessageV1[]; format?: undefined | 'v1' }): Promise<MastraMessageV1[]>;
|
|
853
231
|
async saveMessages(args: { messages: MastraMessageV2[]; format: 'v2' }): Promise<MastraMessageV2[]>;
|
|
854
|
-
async saveMessages(
|
|
855
|
-
messages,
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
| { messages: MastraMessageV1[]; format?: undefined | 'v1' }
|
|
859
|
-
| { messages: MastraMessageV2[]; format: 'v2' }): Promise<MastraMessageV2[] | MastraMessageV1[]> {
|
|
860
|
-
if (messages.length === 0) return messages;
|
|
861
|
-
|
|
862
|
-
try {
|
|
863
|
-
const threadId = messages[0]?.threadId;
|
|
864
|
-
if (!threadId) {
|
|
865
|
-
throw new Error('Thread ID is required');
|
|
866
|
-
}
|
|
867
|
-
|
|
868
|
-
// Prepare batch statements for all messages
|
|
869
|
-
const batchStatements = messages.map(message => {
|
|
870
|
-
const time = message.createdAt || new Date();
|
|
871
|
-
if (!message.threadId) {
|
|
872
|
-
throw new Error(
|
|
873
|
-
`Expected to find a threadId for message, but couldn't find one. An unexpected error has occurred.`,
|
|
874
|
-
);
|
|
875
|
-
}
|
|
876
|
-
if (!message.resourceId) {
|
|
877
|
-
throw new Error(
|
|
878
|
-
`Expected to find a resourceId for message, but couldn't find one. An unexpected error has occurred.`,
|
|
879
|
-
);
|
|
880
|
-
}
|
|
881
|
-
return {
|
|
882
|
-
sql: `INSERT INTO ${TABLE_MESSAGES} (id, thread_id, content, role, type, createdAt, resourceId)
|
|
883
|
-
VALUES (?, ?, ?, ?, ?, ?, ?)
|
|
884
|
-
ON CONFLICT(id) DO UPDATE SET
|
|
885
|
-
thread_id=excluded.thread_id,
|
|
886
|
-
content=excluded.content,
|
|
887
|
-
role=excluded.role,
|
|
888
|
-
type=excluded.type,
|
|
889
|
-
resourceId=excluded.resourceId
|
|
890
|
-
`,
|
|
891
|
-
args: [
|
|
892
|
-
message.id,
|
|
893
|
-
message.threadId!,
|
|
894
|
-
typeof message.content === 'object' ? JSON.stringify(message.content) : message.content,
|
|
895
|
-
message.role,
|
|
896
|
-
message.type || 'v2',
|
|
897
|
-
time instanceof Date ? time.toISOString() : time,
|
|
898
|
-
message.resourceId,
|
|
899
|
-
],
|
|
900
|
-
};
|
|
901
|
-
});
|
|
902
|
-
|
|
903
|
-
const now = new Date().toISOString();
|
|
904
|
-
batchStatements.push({
|
|
905
|
-
sql: `UPDATE ${TABLE_THREADS} SET updatedAt = ? WHERE id = ?`,
|
|
906
|
-
args: [now, threadId],
|
|
907
|
-
});
|
|
908
|
-
|
|
909
|
-
// Execute all inserts in a single batch
|
|
910
|
-
await this.client.batch(batchStatements, 'write');
|
|
911
|
-
|
|
912
|
-
const list = new MessageList().add(messages, 'memory');
|
|
913
|
-
if (format === `v2`) return list.get.all.v2();
|
|
914
|
-
return list.get.all.v1();
|
|
915
|
-
} catch (error) {
|
|
916
|
-
throw new MastraError(
|
|
917
|
-
{
|
|
918
|
-
id: 'LIBSQL_STORE_SAVE_MESSAGES_FAILED',
|
|
919
|
-
domain: ErrorDomain.STORAGE,
|
|
920
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
921
|
-
},
|
|
922
|
-
error,
|
|
923
|
-
);
|
|
924
|
-
}
|
|
232
|
+
async saveMessages(
|
|
233
|
+
args: { messages: MastraMessageV1[]; format?: undefined | 'v1' } | { messages: MastraMessageV2[]; format: 'v2' },
|
|
234
|
+
): Promise<MastraMessageV2[] | MastraMessageV1[]> {
|
|
235
|
+
return this.stores.memory.saveMessages(args);
|
|
925
236
|
}
|
|
926
237
|
|
|
927
238
|
async updateMessages({
|
|
@@ -932,158 +243,12 @@ export class LibSQLStore extends MastraStorage {
|
|
|
932
243
|
content?: { metadata?: MastraMessageContentV2['metadata']; content?: MastraMessageContentV2['content'] };
|
|
933
244
|
})[];
|
|
934
245
|
}): Promise<MastraMessageV2[]> {
|
|
935
|
-
|
|
936
|
-
return [];
|
|
937
|
-
}
|
|
938
|
-
|
|
939
|
-
const messageIds = messages.map(m => m.id);
|
|
940
|
-
const placeholders = messageIds.map(() => '?').join(',');
|
|
941
|
-
|
|
942
|
-
const selectSql = `SELECT * FROM ${TABLE_MESSAGES} WHERE id IN (${placeholders})`;
|
|
943
|
-
const existingResult = await this.client.execute({ sql: selectSql, args: messageIds });
|
|
944
|
-
const existingMessages: MastraMessageV2[] = existingResult.rows.map(row => this.parseRow(row));
|
|
945
|
-
|
|
946
|
-
if (existingMessages.length === 0) {
|
|
947
|
-
return [];
|
|
948
|
-
}
|
|
949
|
-
|
|
950
|
-
const batchStatements = [];
|
|
951
|
-
const threadIdsToUpdate = new Set<string>();
|
|
952
|
-
const columnMapping: Record<string, string> = {
|
|
953
|
-
threadId: 'thread_id',
|
|
954
|
-
};
|
|
955
|
-
|
|
956
|
-
for (const existingMessage of existingMessages) {
|
|
957
|
-
const updatePayload = messages.find(m => m.id === existingMessage.id);
|
|
958
|
-
if (!updatePayload) continue;
|
|
959
|
-
|
|
960
|
-
const { id, ...fieldsToUpdate } = updatePayload;
|
|
961
|
-
if (Object.keys(fieldsToUpdate).length === 0) continue;
|
|
962
|
-
|
|
963
|
-
threadIdsToUpdate.add(existingMessage.threadId!);
|
|
964
|
-
if (updatePayload.threadId && updatePayload.threadId !== existingMessage.threadId) {
|
|
965
|
-
threadIdsToUpdate.add(updatePayload.threadId);
|
|
966
|
-
}
|
|
967
|
-
|
|
968
|
-
const setClauses = [];
|
|
969
|
-
const args: InValue[] = [];
|
|
970
|
-
const updatableFields = { ...fieldsToUpdate };
|
|
971
|
-
|
|
972
|
-
// Special handling for the 'content' field to merge instead of overwrite
|
|
973
|
-
if (updatableFields.content) {
|
|
974
|
-
const newContent = {
|
|
975
|
-
...existingMessage.content,
|
|
976
|
-
...updatableFields.content,
|
|
977
|
-
// Deep merge metadata if it exists on both
|
|
978
|
-
...(existingMessage.content?.metadata && updatableFields.content.metadata
|
|
979
|
-
? {
|
|
980
|
-
metadata: {
|
|
981
|
-
...existingMessage.content.metadata,
|
|
982
|
-
...updatableFields.content.metadata,
|
|
983
|
-
},
|
|
984
|
-
}
|
|
985
|
-
: {}),
|
|
986
|
-
};
|
|
987
|
-
setClauses.push(`${parseSqlIdentifier('content', 'column name')} = ?`);
|
|
988
|
-
args.push(JSON.stringify(newContent));
|
|
989
|
-
delete updatableFields.content;
|
|
990
|
-
}
|
|
991
|
-
|
|
992
|
-
for (const key in updatableFields) {
|
|
993
|
-
if (Object.prototype.hasOwnProperty.call(updatableFields, key)) {
|
|
994
|
-
const dbKey = columnMapping[key] || key;
|
|
995
|
-
setClauses.push(`${parseSqlIdentifier(dbKey, 'column name')} = ?`);
|
|
996
|
-
let value = updatableFields[key as keyof typeof updatableFields];
|
|
997
|
-
|
|
998
|
-
if (typeof value === 'object' && value !== null) {
|
|
999
|
-
value = JSON.stringify(value);
|
|
1000
|
-
}
|
|
1001
|
-
args.push(value as InValue);
|
|
1002
|
-
}
|
|
1003
|
-
}
|
|
1004
|
-
|
|
1005
|
-
if (setClauses.length === 0) continue;
|
|
1006
|
-
|
|
1007
|
-
args.push(id);
|
|
1008
|
-
|
|
1009
|
-
const sql = `UPDATE ${TABLE_MESSAGES} SET ${setClauses.join(', ')} WHERE id = ?`;
|
|
1010
|
-
batchStatements.push({ sql, args });
|
|
1011
|
-
}
|
|
1012
|
-
|
|
1013
|
-
if (batchStatements.length === 0) {
|
|
1014
|
-
return existingMessages;
|
|
1015
|
-
}
|
|
1016
|
-
|
|
1017
|
-
const now = new Date().toISOString();
|
|
1018
|
-
for (const threadId of threadIdsToUpdate) {
|
|
1019
|
-
if (threadId) {
|
|
1020
|
-
batchStatements.push({
|
|
1021
|
-
sql: `UPDATE ${TABLE_THREADS} SET updatedAt = ? WHERE id = ?`,
|
|
1022
|
-
args: [now, threadId],
|
|
1023
|
-
});
|
|
1024
|
-
}
|
|
1025
|
-
}
|
|
1026
|
-
|
|
1027
|
-
await this.client.batch(batchStatements, 'write');
|
|
1028
|
-
|
|
1029
|
-
const updatedResult = await this.client.execute({ sql: selectSql, args: messageIds });
|
|
1030
|
-
return updatedResult.rows.map(row => this.parseRow(row));
|
|
1031
|
-
}
|
|
1032
|
-
|
|
1033
|
-
private transformEvalRow(row: Record<string, any>): EvalRow {
|
|
1034
|
-
const resultValue = JSON.parse(row.result as string);
|
|
1035
|
-
const testInfoValue = row.test_info ? JSON.parse(row.test_info as string) : undefined;
|
|
1036
|
-
|
|
1037
|
-
if (!resultValue || typeof resultValue !== 'object' || !('score' in resultValue)) {
|
|
1038
|
-
throw new Error(`Invalid MetricResult format: ${JSON.stringify(resultValue)}`);
|
|
1039
|
-
}
|
|
1040
|
-
|
|
1041
|
-
return {
|
|
1042
|
-
input: row.input as string,
|
|
1043
|
-
output: row.output as string,
|
|
1044
|
-
result: resultValue as MetricResult,
|
|
1045
|
-
agentName: row.agent_name as string,
|
|
1046
|
-
metricName: row.metric_name as string,
|
|
1047
|
-
instructions: row.instructions as string,
|
|
1048
|
-
testInfo: testInfoValue as TestInfo,
|
|
1049
|
-
globalRunId: row.global_run_id as string,
|
|
1050
|
-
runId: row.run_id as string,
|
|
1051
|
-
createdAt: row.created_at as string,
|
|
1052
|
-
};
|
|
246
|
+
return this.stores.memory.updateMessages({ messages });
|
|
1053
247
|
}
|
|
1054
248
|
|
|
1055
249
|
/** @deprecated use getEvals instead */
|
|
1056
250
|
async getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]> {
|
|
1057
|
-
|
|
1058
|
-
const baseQuery = `SELECT * FROM ${TABLE_EVALS} WHERE agent_name = ?`;
|
|
1059
|
-
const typeCondition =
|
|
1060
|
-
type === 'test'
|
|
1061
|
-
? " AND test_info IS NOT NULL AND test_info->>'testPath' IS NOT NULL"
|
|
1062
|
-
: type === 'live'
|
|
1063
|
-
? " AND (test_info IS NULL OR test_info->>'testPath' IS NULL)"
|
|
1064
|
-
: '';
|
|
1065
|
-
|
|
1066
|
-
const result = await this.client.execute({
|
|
1067
|
-
sql: `${baseQuery}${typeCondition} ORDER BY created_at DESC`,
|
|
1068
|
-
args: [agentName],
|
|
1069
|
-
});
|
|
1070
|
-
|
|
1071
|
-
return result.rows?.map(row => this.transformEvalRow(row)) ?? [];
|
|
1072
|
-
} catch (error) {
|
|
1073
|
-
// Handle case where table doesn't exist yet
|
|
1074
|
-
if (error instanceof Error && error.message.includes('no such table')) {
|
|
1075
|
-
return [];
|
|
1076
|
-
}
|
|
1077
|
-
throw new MastraError(
|
|
1078
|
-
{
|
|
1079
|
-
id: 'LIBSQL_STORE_GET_EVALS_BY_AGENT_NAME_FAILED',
|
|
1080
|
-
domain: ErrorDomain.STORAGE,
|
|
1081
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1082
|
-
details: { agentName },
|
|
1083
|
-
},
|
|
1084
|
-
error,
|
|
1085
|
-
);
|
|
1086
|
-
}
|
|
251
|
+
return this.stores.legacyEvals.getEvalsByAgentName(agentName, type);
|
|
1087
252
|
}
|
|
1088
253
|
|
|
1089
254
|
async getEvals(
|
|
@@ -1092,221 +257,96 @@ export class LibSQLStore extends MastraStorage {
|
|
|
1092
257
|
type?: 'test' | 'live';
|
|
1093
258
|
} & PaginationArgs = {},
|
|
1094
259
|
): Promise<PaginationInfo & { evals: EvalRow[] }> {
|
|
1095
|
-
|
|
1096
|
-
|
|
1097
|
-
const toDate = dateRange?.end;
|
|
1098
|
-
|
|
1099
|
-
const conditions: string[] = [];
|
|
1100
|
-
const queryParams: InValue[] = [];
|
|
1101
|
-
|
|
1102
|
-
if (agentName) {
|
|
1103
|
-
conditions.push(`agent_name = ?`);
|
|
1104
|
-
queryParams.push(agentName);
|
|
1105
|
-
}
|
|
1106
|
-
|
|
1107
|
-
if (type === 'test') {
|
|
1108
|
-
conditions.push(`(test_info IS NOT NULL AND json_extract(test_info, '$.testPath') IS NOT NULL)`);
|
|
1109
|
-
} else if (type === 'live') {
|
|
1110
|
-
conditions.push(`(test_info IS NULL OR json_extract(test_info, '$.testPath') IS NULL)`);
|
|
1111
|
-
}
|
|
1112
|
-
|
|
1113
|
-
if (fromDate) {
|
|
1114
|
-
conditions.push(`created_at >= ?`);
|
|
1115
|
-
queryParams.push(fromDate.toISOString());
|
|
1116
|
-
}
|
|
1117
|
-
|
|
1118
|
-
if (toDate) {
|
|
1119
|
-
conditions.push(`created_at <= ?`);
|
|
1120
|
-
queryParams.push(toDate.toISOString());
|
|
1121
|
-
}
|
|
1122
|
-
|
|
1123
|
-
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
260
|
+
return this.stores.legacyEvals.getEvals(options);
|
|
261
|
+
}
|
|
1124
262
|
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1128
|
-
args: queryParams,
|
|
1129
|
-
});
|
|
1130
|
-
const total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
263
|
+
async getScoreById({ id }: { id: string }): Promise<ScoreRowData | null> {
|
|
264
|
+
return this.stores.scores.getScoreById({ id });
|
|
265
|
+
}
|
|
1131
266
|
|
|
1132
|
-
|
|
1133
|
-
|
|
267
|
+
async saveScore(score: Omit<ScoreRowData, 'id' | 'createdAt' | 'updatedAt'>): Promise<{ score: ScoreRowData }> {
|
|
268
|
+
return this.stores.scores.saveScore(score);
|
|
269
|
+
}
|
|
1134
270
|
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1138
|
-
|
|
1139
|
-
|
|
1140
|
-
|
|
1141
|
-
|
|
1142
|
-
|
|
1143
|
-
|
|
271
|
+
async getScoresByScorerId({
|
|
272
|
+
scorerId,
|
|
273
|
+
entityId,
|
|
274
|
+
entityType,
|
|
275
|
+
pagination,
|
|
276
|
+
}: {
|
|
277
|
+
scorerId: string;
|
|
278
|
+
entityId?: string;
|
|
279
|
+
entityType?: string;
|
|
280
|
+
pagination: StoragePagination;
|
|
281
|
+
}): Promise<{ pagination: PaginationInfo; scores: ScoreRowData[] }> {
|
|
282
|
+
return this.stores.scores.getScoresByScorerId({ scorerId, entityId, entityType, pagination });
|
|
283
|
+
}
|
|
1144
284
|
|
|
1145
|
-
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
285
|
+
async getScoresByRunId({
|
|
286
|
+
runId,
|
|
287
|
+
pagination,
|
|
288
|
+
}: {
|
|
289
|
+
runId: string;
|
|
290
|
+
pagination: StoragePagination;
|
|
291
|
+
}): Promise<{ pagination: PaginationInfo; scores: ScoreRowData[] }> {
|
|
292
|
+
return this.stores.scores.getScoresByRunId({ runId, pagination });
|
|
293
|
+
}
|
|
1149
294
|
|
|
1150
|
-
|
|
1151
|
-
|
|
1152
|
-
|
|
1153
|
-
|
|
1154
|
-
|
|
1155
|
-
|
|
1156
|
-
|
|
1157
|
-
|
|
1158
|
-
|
|
1159
|
-
|
|
1160
|
-
id: 'LIBSQL_STORE_GET_EVALS_FAILED',
|
|
1161
|
-
domain: ErrorDomain.STORAGE,
|
|
1162
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1163
|
-
},
|
|
1164
|
-
error,
|
|
1165
|
-
);
|
|
1166
|
-
}
|
|
295
|
+
async getScoresByEntityId({
|
|
296
|
+
entityId,
|
|
297
|
+
entityType,
|
|
298
|
+
pagination,
|
|
299
|
+
}: {
|
|
300
|
+
pagination: StoragePagination;
|
|
301
|
+
entityId: string;
|
|
302
|
+
entityType: string;
|
|
303
|
+
}): Promise<{ pagination: PaginationInfo; scores: ScoreRowData[] }> {
|
|
304
|
+
return this.stores.scores.getScoresByEntityId({ entityId, entityType, pagination });
|
|
1167
305
|
}
|
|
1168
306
|
|
|
307
|
+
/**
|
|
308
|
+
* TRACES
|
|
309
|
+
*/
|
|
310
|
+
|
|
1169
311
|
/**
|
|
1170
312
|
* @deprecated use getTracesPaginated instead.
|
|
1171
313
|
*/
|
|
1172
|
-
|
|
1173
|
-
|
|
1174
|
-
scope?: string;
|
|
1175
|
-
page: number;
|
|
1176
|
-
perPage: number;
|
|
1177
|
-
attributes?: Record<string, string>;
|
|
1178
|
-
filters?: Record<string, any>;
|
|
1179
|
-
fromDate?: Date;
|
|
1180
|
-
toDate?: Date;
|
|
1181
|
-
}): Promise<Trace[]> {
|
|
1182
|
-
if (args.fromDate || args.toDate) {
|
|
1183
|
-
(args as any).dateRange = {
|
|
1184
|
-
start: args.fromDate,
|
|
1185
|
-
end: args.toDate,
|
|
1186
|
-
};
|
|
1187
|
-
}
|
|
1188
|
-
try {
|
|
1189
|
-
const result = await this.getTracesPaginated(args);
|
|
1190
|
-
return result.traces;
|
|
1191
|
-
} catch (error) {
|
|
1192
|
-
throw new MastraError(
|
|
1193
|
-
{
|
|
1194
|
-
id: 'LIBSQL_STORE_GET_TRACES_FAILED',
|
|
1195
|
-
domain: ErrorDomain.STORAGE,
|
|
1196
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1197
|
-
},
|
|
1198
|
-
error,
|
|
1199
|
-
);
|
|
1200
|
-
}
|
|
314
|
+
async getTraces(args: StorageGetTracesArg): Promise<Trace[]> {
|
|
315
|
+
return this.stores.traces.getTraces(args);
|
|
1201
316
|
}
|
|
1202
317
|
|
|
1203
|
-
|
|
1204
|
-
args
|
|
1205
|
-
|
|
1206
|
-
scope?: string;
|
|
1207
|
-
attributes?: Record<string, string>;
|
|
1208
|
-
filters?: Record<string, any>;
|
|
1209
|
-
} & PaginationArgs,
|
|
1210
|
-
): Promise<PaginationInfo & { traces: Trace[] }> {
|
|
1211
|
-
const { name, scope, page = 0, perPage = 100, attributes, filters, dateRange } = args;
|
|
1212
|
-
const fromDate = dateRange?.start;
|
|
1213
|
-
const toDate = dateRange?.end;
|
|
1214
|
-
const currentOffset = page * perPage;
|
|
1215
|
-
|
|
1216
|
-
const queryArgs: InValue[] = [];
|
|
1217
|
-
const conditions: string[] = [];
|
|
1218
|
-
|
|
1219
|
-
if (name) {
|
|
1220
|
-
conditions.push('name LIKE ?');
|
|
1221
|
-
queryArgs.push(`${name}%`);
|
|
1222
|
-
}
|
|
1223
|
-
if (scope) {
|
|
1224
|
-
conditions.push('scope = ?');
|
|
1225
|
-
queryArgs.push(scope);
|
|
1226
|
-
}
|
|
1227
|
-
if (attributes) {
|
|
1228
|
-
Object.entries(attributes).forEach(([key, value]) => {
|
|
1229
|
-
conditions.push(`json_extract(attributes, '$.${key}') = ?`);
|
|
1230
|
-
queryArgs.push(value);
|
|
1231
|
-
});
|
|
1232
|
-
}
|
|
1233
|
-
if (filters) {
|
|
1234
|
-
Object.entries(filters).forEach(([key, value]) => {
|
|
1235
|
-
conditions.push(`${parseSqlIdentifier(key, 'filter key')} = ?`);
|
|
1236
|
-
queryArgs.push(value);
|
|
1237
|
-
});
|
|
1238
|
-
}
|
|
1239
|
-
if (fromDate) {
|
|
1240
|
-
conditions.push('createdAt >= ?');
|
|
1241
|
-
queryArgs.push(fromDate.toISOString());
|
|
1242
|
-
}
|
|
1243
|
-
if (toDate) {
|
|
1244
|
-
conditions.push('createdAt <= ?');
|
|
1245
|
-
queryArgs.push(toDate.toISOString());
|
|
1246
|
-
}
|
|
1247
|
-
|
|
1248
|
-
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
1249
|
-
|
|
1250
|
-
try {
|
|
1251
|
-
const countResult = await this.client.execute({
|
|
1252
|
-
sql: `SELECT COUNT(*) as count FROM ${TABLE_TRACES} ${whereClause}`,
|
|
1253
|
-
args: queryArgs,
|
|
1254
|
-
});
|
|
1255
|
-
const total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
318
|
+
async getTracesPaginated(args: StorageGetTracesArg): Promise<PaginationInfo & { traces: Trace[] }> {
|
|
319
|
+
return this.stores.traces.getTracesPaginated(args);
|
|
320
|
+
}
|
|
1256
321
|
|
|
1257
|
-
|
|
1258
|
-
|
|
1259
|
-
|
|
1260
|
-
total: 0,
|
|
1261
|
-
page,
|
|
1262
|
-
perPage,
|
|
1263
|
-
hasMore: false,
|
|
1264
|
-
};
|
|
1265
|
-
}
|
|
322
|
+
async batchTraceInsert(args: { records: Record<string, any>[] }): Promise<void> {
|
|
323
|
+
return this.stores.traces.batchTraceInsert(args);
|
|
324
|
+
}
|
|
1266
325
|
|
|
1267
|
-
|
|
1268
|
-
|
|
1269
|
-
|
|
1270
|
-
});
|
|
326
|
+
/**
|
|
327
|
+
* WORKFLOWS
|
|
328
|
+
*/
|
|
1271
329
|
|
|
1272
|
-
|
|
1273
|
-
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
|
|
1277
|
-
|
|
1278
|
-
|
|
1279
|
-
|
|
1280
|
-
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
events: safelyParseJSON(row.events as string),
|
|
1284
|
-
links: safelyParseJSON(row.links as string),
|
|
1285
|
-
attributes: safelyParseJSON(row.attributes as string),
|
|
1286
|
-
startTime: row.startTime,
|
|
1287
|
-
endTime: row.endTime,
|
|
1288
|
-
other: safelyParseJSON(row.other as string),
|
|
1289
|
-
createdAt: row.createdAt,
|
|
1290
|
-
}) as Trace,
|
|
1291
|
-
) ?? [];
|
|
330
|
+
async persistWorkflowSnapshot({
|
|
331
|
+
workflowName,
|
|
332
|
+
runId,
|
|
333
|
+
snapshot,
|
|
334
|
+
}: {
|
|
335
|
+
workflowName: string;
|
|
336
|
+
runId: string;
|
|
337
|
+
snapshot: WorkflowRunState;
|
|
338
|
+
}): Promise<void> {
|
|
339
|
+
return this.stores.workflows.persistWorkflowSnapshot({ workflowName, runId, snapshot });
|
|
340
|
+
}
|
|
1292
341
|
|
|
1293
|
-
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
|
|
1297
|
-
|
|
1298
|
-
|
|
1299
|
-
|
|
1300
|
-
|
|
1301
|
-
throw new MastraError(
|
|
1302
|
-
{
|
|
1303
|
-
id: 'LIBSQL_STORE_GET_TRACES_PAGINATED_FAILED',
|
|
1304
|
-
domain: ErrorDomain.STORAGE,
|
|
1305
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1306
|
-
},
|
|
1307
|
-
error,
|
|
1308
|
-
);
|
|
1309
|
-
}
|
|
342
|
+
async loadWorkflowSnapshot({
|
|
343
|
+
workflowName,
|
|
344
|
+
runId,
|
|
345
|
+
}: {
|
|
346
|
+
workflowName: string;
|
|
347
|
+
runId: string;
|
|
348
|
+
}): Promise<WorkflowRunState | null> {
|
|
349
|
+
return this.stores.workflows.loadWorkflowSnapshot({ workflowName, runId });
|
|
1310
350
|
}
|
|
1311
351
|
|
|
1312
352
|
async getWorkflowRuns({
|
|
@@ -1324,67 +364,7 @@ export class LibSQLStore extends MastraStorage {
|
|
|
1324
364
|
offset?: number;
|
|
1325
365
|
resourceId?: string;
|
|
1326
366
|
} = {}): Promise<WorkflowRuns> {
|
|
1327
|
-
|
|
1328
|
-
const conditions: string[] = [];
|
|
1329
|
-
const args: InValue[] = [];
|
|
1330
|
-
|
|
1331
|
-
if (workflowName) {
|
|
1332
|
-
conditions.push('workflow_name = ?');
|
|
1333
|
-
args.push(workflowName);
|
|
1334
|
-
}
|
|
1335
|
-
|
|
1336
|
-
if (fromDate) {
|
|
1337
|
-
conditions.push('createdAt >= ?');
|
|
1338
|
-
args.push(fromDate.toISOString());
|
|
1339
|
-
}
|
|
1340
|
-
|
|
1341
|
-
if (toDate) {
|
|
1342
|
-
conditions.push('createdAt <= ?');
|
|
1343
|
-
args.push(toDate.toISOString());
|
|
1344
|
-
}
|
|
1345
|
-
|
|
1346
|
-
if (resourceId) {
|
|
1347
|
-
const hasResourceId = await this.hasColumn(TABLE_WORKFLOW_SNAPSHOT, 'resourceId');
|
|
1348
|
-
if (hasResourceId) {
|
|
1349
|
-
conditions.push('resourceId = ?');
|
|
1350
|
-
args.push(resourceId);
|
|
1351
|
-
} else {
|
|
1352
|
-
console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
|
|
1353
|
-
}
|
|
1354
|
-
}
|
|
1355
|
-
|
|
1356
|
-
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
1357
|
-
|
|
1358
|
-
let total = 0;
|
|
1359
|
-
// Only get total count when using pagination
|
|
1360
|
-
if (limit !== undefined && offset !== undefined) {
|
|
1361
|
-
const countResult = await this.client.execute({
|
|
1362
|
-
sql: `SELECT COUNT(*) as count FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
|
|
1363
|
-
args,
|
|
1364
|
-
});
|
|
1365
|
-
total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
1366
|
-
}
|
|
1367
|
-
|
|
1368
|
-
// Get results
|
|
1369
|
-
const result = await this.client.execute({
|
|
1370
|
-
sql: `SELECT * FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC${limit !== undefined && offset !== undefined ? ` LIMIT ? OFFSET ?` : ''}`,
|
|
1371
|
-
args: limit !== undefined && offset !== undefined ? [...args, limit, offset] : args,
|
|
1372
|
-
});
|
|
1373
|
-
|
|
1374
|
-
const runs = (result.rows || []).map(row => this.parseWorkflowRun(row));
|
|
1375
|
-
|
|
1376
|
-
// Use runs.length as total when not paginating
|
|
1377
|
-
return { runs, total: total || runs.length };
|
|
1378
|
-
} catch (error) {
|
|
1379
|
-
throw new MastraError(
|
|
1380
|
-
{
|
|
1381
|
-
id: 'LIBSQL_STORE_GET_WORKFLOW_RUNS_FAILED',
|
|
1382
|
-
domain: ErrorDomain.STORAGE,
|
|
1383
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1384
|
-
},
|
|
1385
|
-
error,
|
|
1386
|
-
);
|
|
1387
|
-
}
|
|
367
|
+
return this.stores.workflows.getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId });
|
|
1388
368
|
}
|
|
1389
369
|
|
|
1390
370
|
async getWorkflowRunById({
|
|
@@ -1394,73 +374,15 @@ export class LibSQLStore extends MastraStorage {
|
|
|
1394
374
|
runId: string;
|
|
1395
375
|
workflowName?: string;
|
|
1396
376
|
}): Promise<WorkflowRun | null> {
|
|
1397
|
-
|
|
1398
|
-
const args: (string | number)[] = [];
|
|
1399
|
-
|
|
1400
|
-
if (runId) {
|
|
1401
|
-
conditions.push('run_id = ?');
|
|
1402
|
-
args.push(runId);
|
|
1403
|
-
}
|
|
1404
|
-
|
|
1405
|
-
if (workflowName) {
|
|
1406
|
-
conditions.push('workflow_name = ?');
|
|
1407
|
-
args.push(workflowName);
|
|
1408
|
-
}
|
|
1409
|
-
|
|
1410
|
-
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
1411
|
-
|
|
1412
|
-
try {
|
|
1413
|
-
const result = await this.client.execute({
|
|
1414
|
-
sql: `SELECT * FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
|
|
1415
|
-
args,
|
|
1416
|
-
});
|
|
1417
|
-
|
|
1418
|
-
if (!result.rows?.[0]) {
|
|
1419
|
-
return null;
|
|
1420
|
-
}
|
|
1421
|
-
|
|
1422
|
-
return this.parseWorkflowRun(result.rows[0]);
|
|
1423
|
-
} catch (error) {
|
|
1424
|
-
throw new MastraError(
|
|
1425
|
-
{
|
|
1426
|
-
id: 'LIBSQL_STORE_GET_WORKFLOW_RUN_BY_ID_FAILED',
|
|
1427
|
-
domain: ErrorDomain.STORAGE,
|
|
1428
|
-
category: ErrorCategory.THIRD_PARTY,
|
|
1429
|
-
},
|
|
1430
|
-
error,
|
|
1431
|
-
);
|
|
1432
|
-
}
|
|
377
|
+
return this.stores.workflows.getWorkflowRunById({ runId, workflowName });
|
|
1433
378
|
}
|
|
1434
379
|
|
|
1435
380
|
async getResourceById({ resourceId }: { resourceId: string }): Promise<StorageResourceType | null> {
|
|
1436
|
-
|
|
1437
|
-
tableName: TABLE_RESOURCES,
|
|
1438
|
-
keys: { id: resourceId },
|
|
1439
|
-
});
|
|
1440
|
-
|
|
1441
|
-
if (!result) {
|
|
1442
|
-
return null;
|
|
1443
|
-
}
|
|
1444
|
-
|
|
1445
|
-
return {
|
|
1446
|
-
...result,
|
|
1447
|
-
// Ensure workingMemory is always returned as a string, even if auto-parsed as JSON
|
|
1448
|
-
workingMemory:
|
|
1449
|
-
typeof result.workingMemory === 'object' ? JSON.stringify(result.workingMemory) : result.workingMemory,
|
|
1450
|
-
metadata: typeof result.metadata === 'string' ? JSON.parse(result.metadata) : result.metadata,
|
|
1451
|
-
};
|
|
381
|
+
return this.stores.memory.getResourceById({ resourceId });
|
|
1452
382
|
}
|
|
1453
383
|
|
|
1454
384
|
async saveResource({ resource }: { resource: StorageResourceType }): Promise<StorageResourceType> {
|
|
1455
|
-
|
|
1456
|
-
tableName: TABLE_RESOURCES,
|
|
1457
|
-
record: {
|
|
1458
|
-
...resource,
|
|
1459
|
-
metadata: JSON.stringify(resource.metadata),
|
|
1460
|
-
},
|
|
1461
|
-
});
|
|
1462
|
-
|
|
1463
|
-
return resource;
|
|
385
|
+
return this.stores.memory.saveResource({ resource });
|
|
1464
386
|
}
|
|
1465
387
|
|
|
1466
388
|
async updateResource({
|
|
@@ -1472,81 +394,7 @@ export class LibSQLStore extends MastraStorage {
|
|
|
1472
394
|
workingMemory?: string;
|
|
1473
395
|
metadata?: Record<string, unknown>;
|
|
1474
396
|
}): Promise<StorageResourceType> {
|
|
1475
|
-
|
|
1476
|
-
|
|
1477
|
-
if (!existingResource) {
|
|
1478
|
-
// Create new resource if it doesn't exist
|
|
1479
|
-
const newResource: StorageResourceType = {
|
|
1480
|
-
id: resourceId,
|
|
1481
|
-
workingMemory,
|
|
1482
|
-
metadata: metadata || {},
|
|
1483
|
-
createdAt: new Date(),
|
|
1484
|
-
updatedAt: new Date(),
|
|
1485
|
-
};
|
|
1486
|
-
return this.saveResource({ resource: newResource });
|
|
1487
|
-
}
|
|
1488
|
-
|
|
1489
|
-
const updatedResource = {
|
|
1490
|
-
...existingResource,
|
|
1491
|
-
workingMemory: workingMemory !== undefined ? workingMemory : existingResource.workingMemory,
|
|
1492
|
-
metadata: {
|
|
1493
|
-
...existingResource.metadata,
|
|
1494
|
-
...metadata,
|
|
1495
|
-
},
|
|
1496
|
-
updatedAt: new Date(),
|
|
1497
|
-
};
|
|
1498
|
-
|
|
1499
|
-
const updates: string[] = [];
|
|
1500
|
-
const values: InValue[] = [];
|
|
1501
|
-
|
|
1502
|
-
if (workingMemory !== undefined) {
|
|
1503
|
-
updates.push('workingMemory = ?');
|
|
1504
|
-
values.push(workingMemory);
|
|
1505
|
-
}
|
|
1506
|
-
|
|
1507
|
-
if (metadata) {
|
|
1508
|
-
updates.push('metadata = ?');
|
|
1509
|
-
values.push(JSON.stringify(updatedResource.metadata));
|
|
1510
|
-
}
|
|
1511
|
-
|
|
1512
|
-
updates.push('updatedAt = ?');
|
|
1513
|
-
values.push(updatedResource.updatedAt.toISOString());
|
|
1514
|
-
|
|
1515
|
-
values.push(resourceId);
|
|
1516
|
-
|
|
1517
|
-
await this.client.execute({
|
|
1518
|
-
sql: `UPDATE ${TABLE_RESOURCES} SET ${updates.join(', ')} WHERE id = ?`,
|
|
1519
|
-
args: values,
|
|
1520
|
-
});
|
|
1521
|
-
|
|
1522
|
-
return updatedResource;
|
|
1523
|
-
}
|
|
1524
|
-
|
|
1525
|
-
private async hasColumn(table: string, column: string): Promise<boolean> {
|
|
1526
|
-
const result = await this.client.execute({
|
|
1527
|
-
sql: `PRAGMA table_info(${table})`,
|
|
1528
|
-
});
|
|
1529
|
-
return (await result.rows)?.some((row: any) => row.name === column);
|
|
1530
|
-
}
|
|
1531
|
-
|
|
1532
|
-
private parseWorkflowRun(row: Record<string, any>): WorkflowRun {
|
|
1533
|
-
let parsedSnapshot: WorkflowRunState | string = row.snapshot as string;
|
|
1534
|
-
if (typeof parsedSnapshot === 'string') {
|
|
1535
|
-
try {
|
|
1536
|
-
parsedSnapshot = JSON.parse(row.snapshot as string) as WorkflowRunState;
|
|
1537
|
-
} catch (e) {
|
|
1538
|
-
// If parsing fails, return the raw snapshot string
|
|
1539
|
-
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
1540
|
-
}
|
|
1541
|
-
}
|
|
1542
|
-
return {
|
|
1543
|
-
workflowName: row.workflow_name as string,
|
|
1544
|
-
runId: row.run_id as string,
|
|
1545
|
-
snapshot: parsedSnapshot,
|
|
1546
|
-
resourceId: row.resourceId as string,
|
|
1547
|
-
createdAt: new Date(row.createdAt as string),
|
|
1548
|
-
updatedAt: new Date(row.updatedAt as string),
|
|
1549
|
-
};
|
|
397
|
+
return this.stores.memory.updateResource({ resourceId, workingMemory, metadata });
|
|
1550
398
|
}
|
|
1551
399
|
}
|
|
1552
400
|
|