@mastra/cloudflare-d1 0.0.0-taofeeqInngest-20250603090617 → 0.0.0-transpile-packages-20250724123433
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE.md +11 -42
- package/README.md +15 -0
- package/dist/_tsup-dts-rollup.d.cts +414 -67
- package/dist/_tsup-dts-rollup.d.ts +414 -67
- package/dist/index.cjs +1799 -535
- package/dist/index.d.cts +3 -0
- package/dist/index.d.ts +3 -0
- package/dist/index.js +1790 -526
- package/package.json +15 -14
package/LICENSE.md
CHANGED
|
@@ -1,46 +1,15 @@
|
|
|
1
|
-
#
|
|
1
|
+
# Apache License 2.0
|
|
2
2
|
|
|
3
|
-
Copyright (c) 2025
|
|
3
|
+
Copyright (c) 2025 Kepler Software, Inc.
|
|
4
4
|
|
|
5
|
-
|
|
6
|
-
|
|
5
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
6
|
+
you may not use this file except in compliance with the License.
|
|
7
|
+
You may obtain a copy of the License at
|
|
7
8
|
|
|
8
|
-
|
|
9
|
-
The licensor grants you a non-exclusive, royalty-free, worldwide, non-sublicensable, non-transferable license to use, copy, distribute, make available, and prepare derivative works of the software, in each case subject to the limitations and conditions below
|
|
9
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
10
10
|
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
You may not alter, remove, or obscure any licensing, copyright, or other notices of the licensor in the software. Any use of the licensor’s trademarks is subject to applicable law.
|
|
17
|
-
|
|
18
|
-
**Patents**
|
|
19
|
-
The licensor grants you a license, under any patent claims the licensor can license, or becomes able to license, to make, have made, use, sell, offer for sale, import and have imported the software, in each case subject to the limitations and conditions in this license. This license does not cover any patent claims that you cause to be infringed by modifications or additions to the software. If you or your company make any written claim that the software infringes or contributes to infringement of any patent, your patent license for the software granted under these terms ends immediately. If your company makes such a claim, your patent license ends immediately for work on behalf of your company.
|
|
20
|
-
|
|
21
|
-
**Notices**
|
|
22
|
-
You must ensure that anyone who gets a copy of any part of the software from you also gets a copy of these terms.
|
|
23
|
-
|
|
24
|
-
If you modify the software, you must include in any modified copies of the software prominent notices stating that you have modified the software.
|
|
25
|
-
|
|
26
|
-
**No Other Rights**
|
|
27
|
-
These terms do not imply any licenses other than those expressly granted in these terms.
|
|
28
|
-
|
|
29
|
-
**Termination**
|
|
30
|
-
If you use the software in violation of these terms, such use is not licensed, and your licenses will automatically terminate. If the licensor provides you with a notice of your violation, and you cease all violation of this license no later than 30 days after you receive that notice, your licenses will be reinstated retroactively. However, if you violate these terms after such reinstatement, any additional violation of these terms will cause your licenses to terminate automatically and permanently.
|
|
31
|
-
|
|
32
|
-
**No Liability**
|
|
33
|
-
As far as the law allows, the software comes as is, without any warranty or condition, and the licensor will not be liable to you for any damages arising out of these terms or the use or nature of the software, under any kind of legal claim.
|
|
34
|
-
|
|
35
|
-
**Definitions**
|
|
36
|
-
The _licensor_ is the entity offering these terms, and the _software_ is the software the licensor makes available under these terms, including any portion of it.
|
|
37
|
-
|
|
38
|
-
_you_ refers to the individual or entity agreeing to these terms.
|
|
39
|
-
|
|
40
|
-
_your company_ is any legal entity, sole proprietorship, or other kind of organization that you work for, plus all organizations that have control over, are under the control of, or are under common control with that organization. _control_ means ownership of substantially all the assets of an entity, or the power to direct its management and policies by vote, contract, or otherwise. Control can be direct or indirect.
|
|
41
|
-
|
|
42
|
-
_your licenses_ are all the licenses granted to you for the software under these terms.
|
|
43
|
-
|
|
44
|
-
_use_ means anything you do with the software requiring one of your licenses.
|
|
45
|
-
|
|
46
|
-
_trademark_ means trademarks, service marks, and similar rights.
|
|
11
|
+
Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
+
See the License for the specific language governing permissions and
|
|
15
|
+
limitations under the License.
|
package/README.md
CHANGED
|
@@ -54,6 +54,21 @@ const store = new D1Store({
|
|
|
54
54
|
});
|
|
55
55
|
```
|
|
56
56
|
|
|
57
|
+
### Or you can pass any client implementation you want
|
|
58
|
+
|
|
59
|
+
```typescript
|
|
60
|
+
import { D1Store } from '@mastra/cloudflare-d1';
|
|
61
|
+
|
|
62
|
+
const store = new D1Store({
|
|
63
|
+
client: {
|
|
64
|
+
query: ({ sql, params }) => {
|
|
65
|
+
// do something
|
|
66
|
+
},
|
|
67
|
+
},
|
|
68
|
+
tablePrefix: 'mastra_', // optional
|
|
69
|
+
});
|
|
70
|
+
```
|
|
71
|
+
|
|
57
72
|
## Supported Methods
|
|
58
73
|
|
|
59
74
|
### Thread Operations
|
|
@@ -1,43 +1,32 @@
|
|
|
1
|
+
import { default as Cloudflare_2 } from 'cloudflare';
|
|
1
2
|
import type { D1Database as D1Database_2 } from '@cloudflare/workers-types';
|
|
2
3
|
import type { EvalRow } from '@mastra/core/storage';
|
|
4
|
+
import { LegacyEvalsStorage } from '@mastra/core/storage';
|
|
5
|
+
import type { MastraMessageContentV2 } from '@mastra/core/agent';
|
|
3
6
|
import type { MastraMessageV1 } from '@mastra/core/memory';
|
|
4
7
|
import type { MastraMessageV2 } from '@mastra/core/memory';
|
|
5
|
-
import type { MastraMessageV2 as MastraMessageV2_2 } from '@mastra/core';
|
|
6
8
|
import { MastraStorage } from '@mastra/core/storage';
|
|
9
|
+
import { MemoryStorage } from '@mastra/core/storage';
|
|
10
|
+
import type { PaginationArgs } from '@mastra/core/storage';
|
|
11
|
+
import type { PaginationInfo } from '@mastra/core/storage';
|
|
12
|
+
import type { ScoreRowData } from '@mastra/core/scores';
|
|
13
|
+
import { ScoresStorage } from '@mastra/core/storage';
|
|
7
14
|
import type { StorageColumn } from '@mastra/core/storage';
|
|
15
|
+
import type { StorageDomains } from '@mastra/core/storage';
|
|
8
16
|
import type { StorageGetMessagesArg } from '@mastra/core/storage';
|
|
17
|
+
import type { StorageGetTracesArg } from '@mastra/core/storage';
|
|
18
|
+
import type { StorageGetTracesPaginatedArg } from '@mastra/core/storage';
|
|
19
|
+
import type { StoragePagination } from '@mastra/core/storage';
|
|
20
|
+
import type { StorageResourceType } from '@mastra/core/storage';
|
|
9
21
|
import type { StorageThreadType } from '@mastra/core/memory';
|
|
22
|
+
import { StoreOperations } from '@mastra/core/storage';
|
|
10
23
|
import type { TABLE_NAMES } from '@mastra/core/storage';
|
|
24
|
+
import type { Trace } from '@mastra/core/telemetry';
|
|
25
|
+
import { TracesStorage } from '@mastra/core/storage';
|
|
11
26
|
import type { WorkflowRun } from '@mastra/core/storage';
|
|
12
27
|
import type { WorkflowRuns } from '@mastra/core/storage';
|
|
13
28
|
import type { WorkflowRunState } from '@mastra/core/workflows';
|
|
14
|
-
import
|
|
15
|
-
|
|
16
|
-
export declare const checkWorkflowSnapshot: (snapshot: WorkflowRunState_2 | string, stepId: string, status: string) => void;
|
|
17
|
-
|
|
18
|
-
export declare const createSampleMessage: (threadId: string, parts?: MastraMessageV2_2["content"]["parts"]) => MastraMessageV2_2;
|
|
19
|
-
|
|
20
|
-
export declare const createSampleThread: () => {
|
|
21
|
-
id: string;
|
|
22
|
-
resourceId: string;
|
|
23
|
-
title: string;
|
|
24
|
-
createdAt: Date;
|
|
25
|
-
updatedAt: Date;
|
|
26
|
-
metadata: {
|
|
27
|
-
key: string;
|
|
28
|
-
};
|
|
29
|
-
};
|
|
30
|
-
|
|
31
|
-
export declare const createSampleThreadWithParams: (threadId: string, resourceId: string, createdAt: Date, updatedAt: Date) => {
|
|
32
|
-
id: string;
|
|
33
|
-
resourceId: string;
|
|
34
|
-
title: string;
|
|
35
|
-
createdAt: Date;
|
|
36
|
-
updatedAt: Date;
|
|
37
|
-
metadata: {
|
|
38
|
-
key: string;
|
|
39
|
-
};
|
|
40
|
-
};
|
|
29
|
+
import { WorkflowsStorage } from '@mastra/core/storage';
|
|
41
30
|
|
|
42
31
|
export declare const createSampleTrace: (name: string, scope?: string, attributes?: Record<string, string>) => {
|
|
43
32
|
id: string;
|
|
@@ -56,14 +45,46 @@ export declare const createSampleTrace: (name: string, scope?: string, attribute
|
|
|
56
45
|
createdAt: string;
|
|
57
46
|
};
|
|
58
47
|
|
|
59
|
-
export declare const createSampleWorkflowSnapshot: (threadId: string, status: string, createdAt?: Date) => {
|
|
60
|
-
snapshot: WorkflowRunState_2;
|
|
61
|
-
runId: string;
|
|
62
|
-
stepId: string;
|
|
63
|
-
};
|
|
64
|
-
|
|
65
48
|
export declare function createSqlBuilder(): SqlBuilder;
|
|
66
49
|
|
|
50
|
+
declare interface D1Client {
|
|
51
|
+
query(args: {
|
|
52
|
+
sql: string;
|
|
53
|
+
params: string[];
|
|
54
|
+
}): Promise<{
|
|
55
|
+
result: D1QueryResult;
|
|
56
|
+
}>;
|
|
57
|
+
}
|
|
58
|
+
export { D1Client }
|
|
59
|
+
export { D1Client as D1Client_alias_1 }
|
|
60
|
+
|
|
61
|
+
export declare interface D1Client_alias_2 {
|
|
62
|
+
query(args: {
|
|
63
|
+
sql: string;
|
|
64
|
+
params: string[];
|
|
65
|
+
}): Promise<{
|
|
66
|
+
result: D1QueryResult_alias_2;
|
|
67
|
+
}>;
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
export declare interface D1Client_alias_3 {
|
|
71
|
+
query(args: {
|
|
72
|
+
sql: string;
|
|
73
|
+
params: string[];
|
|
74
|
+
}): Promise<{
|
|
75
|
+
result: D1QueryResult_alias_3;
|
|
76
|
+
}>;
|
|
77
|
+
}
|
|
78
|
+
|
|
79
|
+
declare interface D1ClientConfig {
|
|
80
|
+
/** Optional prefix for table names */
|
|
81
|
+
tablePrefix?: string;
|
|
82
|
+
/** D1 Client */
|
|
83
|
+
client: D1Client;
|
|
84
|
+
}
|
|
85
|
+
export { D1ClientConfig }
|
|
86
|
+
export { D1ClientConfig as D1ClientConfig_alias_1 }
|
|
87
|
+
|
|
67
88
|
/**
|
|
68
89
|
* Configuration for D1 using the REST API
|
|
69
90
|
*/
|
|
@@ -80,40 +101,52 @@ declare interface D1Config {
|
|
|
80
101
|
export { D1Config }
|
|
81
102
|
export { D1Config as D1Config_alias_1 }
|
|
82
103
|
|
|
104
|
+
declare type D1QueryResult = Awaited<ReturnType<Cloudflare_2['d1']['database']['query']>>['result'];
|
|
105
|
+
export { D1QueryResult }
|
|
106
|
+
export { D1QueryResult as D1QueryResult_alias_1 }
|
|
107
|
+
|
|
108
|
+
export declare type D1QueryResult_alias_2 = Awaited<ReturnType<Cloudflare_2['d1']['database']['query']>>['result'];
|
|
109
|
+
|
|
110
|
+
export declare type D1QueryResult_alias_3 = Awaited<ReturnType<Cloudflare_2['d1']['database']['query']>>['result'];
|
|
111
|
+
|
|
83
112
|
declare class D1Store extends MastraStorage {
|
|
84
113
|
private client?;
|
|
85
|
-
private accountId?;
|
|
86
|
-
private databaseId?;
|
|
87
114
|
private binding?;
|
|
88
115
|
private tablePrefix;
|
|
116
|
+
stores: StorageDomains;
|
|
89
117
|
/**
|
|
90
118
|
* Creates a new D1Store instance
|
|
91
119
|
* @param config Configuration for D1 access (either REST API or Workers Binding API)
|
|
92
120
|
*/
|
|
93
121
|
constructor(config: D1StoreConfig);
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
122
|
+
get supports(): {
|
|
123
|
+
selectByIncludeResourceScope: boolean;
|
|
124
|
+
resourceWorkingMemory: boolean;
|
|
125
|
+
hasColumn: boolean;
|
|
126
|
+
createTable: boolean;
|
|
127
|
+
};
|
|
128
|
+
createTable({ tableName, schema, }: {
|
|
129
|
+
tableName: TABLE_NAMES;
|
|
130
|
+
schema: Record<string, StorageColumn>;
|
|
131
|
+
}): Promise<void>;
|
|
98
132
|
/**
|
|
99
|
-
*
|
|
100
|
-
* @param
|
|
101
|
-
* @
|
|
133
|
+
* Alters table schema to add columns if they don't exist
|
|
134
|
+
* @param tableName Name of the table
|
|
135
|
+
* @param schema Schema of the table
|
|
136
|
+
* @param ifNotExists Array of column names to add if they don't exist
|
|
102
137
|
*/
|
|
103
|
-
|
|
104
|
-
private getSqlType;
|
|
105
|
-
private ensureDate;
|
|
106
|
-
private serializeDate;
|
|
107
|
-
private serializeValue;
|
|
108
|
-
private deserializeValue;
|
|
109
|
-
createTable({ tableName, schema, }: {
|
|
138
|
+
alterTable({ tableName, schema, ifNotExists, }: {
|
|
110
139
|
tableName: TABLE_NAMES;
|
|
111
140
|
schema: Record<string, StorageColumn>;
|
|
141
|
+
ifNotExists: string[];
|
|
112
142
|
}): Promise<void>;
|
|
113
143
|
clearTable({ tableName }: {
|
|
114
144
|
tableName: TABLE_NAMES;
|
|
115
145
|
}): Promise<void>;
|
|
116
|
-
|
|
146
|
+
dropTable({ tableName }: {
|
|
147
|
+
tableName: TABLE_NAMES;
|
|
148
|
+
}): Promise<void>;
|
|
149
|
+
hasColumn(table: string, column: string): Promise<boolean>;
|
|
117
150
|
insert({ tableName, record }: {
|
|
118
151
|
tableName: TABLE_NAMES;
|
|
119
152
|
record: Record<string, any>;
|
|
@@ -125,9 +158,19 @@ declare class D1Store extends MastraStorage {
|
|
|
125
158
|
getThreadById({ threadId }: {
|
|
126
159
|
threadId: string;
|
|
127
160
|
}): Promise<StorageThreadType | null>;
|
|
161
|
+
/**
|
|
162
|
+
* @deprecated use getThreadsByResourceIdPaginated instead
|
|
163
|
+
*/
|
|
128
164
|
getThreadsByResourceId({ resourceId }: {
|
|
129
165
|
resourceId: string;
|
|
130
166
|
}): Promise<StorageThreadType[]>;
|
|
167
|
+
getThreadsByResourceIdPaginated(args: {
|
|
168
|
+
resourceId: string;
|
|
169
|
+
page: number;
|
|
170
|
+
perPage: number;
|
|
171
|
+
}): Promise<PaginationInfo & {
|
|
172
|
+
threads: StorageThreadType[];
|
|
173
|
+
}>;
|
|
131
174
|
saveThread({ thread }: {
|
|
132
175
|
thread: StorageThreadType;
|
|
133
176
|
}): Promise<StorageThreadType>;
|
|
@@ -147,12 +190,20 @@ declare class D1Store extends MastraStorage {
|
|
|
147
190
|
messages: MastraMessageV2[];
|
|
148
191
|
format: 'v2';
|
|
149
192
|
}): Promise<MastraMessageV2[]>;
|
|
193
|
+
/**
|
|
194
|
+
* @deprecated use getMessagesPaginated instead
|
|
195
|
+
*/
|
|
150
196
|
getMessages(args: StorageGetMessagesArg & {
|
|
151
197
|
format?: 'v1';
|
|
152
198
|
}): Promise<MastraMessageV1[]>;
|
|
153
199
|
getMessages(args: StorageGetMessagesArg & {
|
|
154
200
|
format: 'v2';
|
|
155
201
|
}): Promise<MastraMessageV2[]>;
|
|
202
|
+
getMessagesPaginated({ threadId, selectBy, format, }: StorageGetMessagesArg & {
|
|
203
|
+
format?: 'v1' | 'v2';
|
|
204
|
+
}): Promise<PaginationInfo & {
|
|
205
|
+
messages: MastraMessageV1[] | MastraMessageV2[];
|
|
206
|
+
}>;
|
|
156
207
|
persistWorkflowSnapshot({ workflowName, runId, snapshot, }: {
|
|
157
208
|
workflowName: string;
|
|
158
209
|
runId: string;
|
|
@@ -162,6 +213,18 @@ declare class D1Store extends MastraStorage {
|
|
|
162
213
|
workflowName: string;
|
|
163
214
|
runId: string;
|
|
164
215
|
}): Promise<WorkflowRunState | null>;
|
|
216
|
+
getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
|
|
217
|
+
workflowName?: string;
|
|
218
|
+
fromDate?: Date;
|
|
219
|
+
toDate?: Date;
|
|
220
|
+
limit?: number;
|
|
221
|
+
offset?: number;
|
|
222
|
+
resourceId?: string;
|
|
223
|
+
}): Promise<WorkflowRuns>;
|
|
224
|
+
getWorkflowRunById({ runId, workflowName, }: {
|
|
225
|
+
runId: string;
|
|
226
|
+
workflowName?: string;
|
|
227
|
+
}): Promise<WorkflowRun | null>;
|
|
165
228
|
/**
|
|
166
229
|
* Insert multiple records in a batch operation
|
|
167
230
|
* @param tableName The table to insert into
|
|
@@ -171,7 +234,10 @@ declare class D1Store extends MastraStorage {
|
|
|
171
234
|
tableName: TABLE_NAMES;
|
|
172
235
|
records: Record<string, any>[];
|
|
173
236
|
}): Promise<void>;
|
|
174
|
-
|
|
237
|
+
/**
|
|
238
|
+
* @deprecated use getTracesPaginated instead
|
|
239
|
+
*/
|
|
240
|
+
getTraces(args: {
|
|
175
241
|
name?: string;
|
|
176
242
|
scope?: string;
|
|
177
243
|
page: number;
|
|
@@ -179,22 +245,68 @@ declare class D1Store extends MastraStorage {
|
|
|
179
245
|
attributes?: Record<string, string>;
|
|
180
246
|
fromDate?: Date;
|
|
181
247
|
toDate?: Date;
|
|
182
|
-
}): Promise<
|
|
248
|
+
}): Promise<Trace[]>;
|
|
249
|
+
getTracesPaginated(args: StorageGetTracesPaginatedArg): Promise<PaginationInfo & {
|
|
250
|
+
traces: Trace[];
|
|
251
|
+
}>;
|
|
252
|
+
/**
|
|
253
|
+
* @deprecated use getEvals instead
|
|
254
|
+
*/
|
|
183
255
|
getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]>;
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
|
|
256
|
+
getEvals(options: {
|
|
257
|
+
agentName?: string;
|
|
258
|
+
type?: 'test' | 'live';
|
|
259
|
+
} & PaginationArgs): Promise<PaginationInfo & {
|
|
260
|
+
evals: EvalRow[];
|
|
261
|
+
}>;
|
|
262
|
+
updateMessages(_args: {
|
|
263
|
+
messages: Partial<Omit<MastraMessageV2, 'createdAt'>> & {
|
|
264
|
+
id: string;
|
|
265
|
+
content?: {
|
|
266
|
+
metadata?: MastraMessageContentV2['metadata'];
|
|
267
|
+
content?: MastraMessageContentV2['content'];
|
|
268
|
+
};
|
|
269
|
+
}[];
|
|
270
|
+
}): Promise<MastraMessageV2[]>;
|
|
271
|
+
getResourceById({ resourceId }: {
|
|
272
|
+
resourceId: string;
|
|
273
|
+
}): Promise<StorageResourceType | null>;
|
|
274
|
+
saveResource({ resource }: {
|
|
275
|
+
resource: StorageResourceType;
|
|
276
|
+
}): Promise<StorageResourceType>;
|
|
277
|
+
updateResource({ resourceId, workingMemory, metadata, }: {
|
|
278
|
+
resourceId: string;
|
|
279
|
+
workingMemory?: string;
|
|
280
|
+
metadata?: Record<string, unknown>;
|
|
281
|
+
}): Promise<StorageResourceType>;
|
|
282
|
+
getScoreById({ id: _id }: {
|
|
283
|
+
id: string;
|
|
284
|
+
}): Promise<ScoreRowData | null>;
|
|
285
|
+
saveScore(_score: ScoreRowData): Promise<{
|
|
286
|
+
score: ScoreRowData;
|
|
287
|
+
}>;
|
|
288
|
+
getScoresByRunId({ runId: _runId, pagination: _pagination, }: {
|
|
195
289
|
runId: string;
|
|
196
|
-
|
|
197
|
-
}): Promise<
|
|
290
|
+
pagination: StoragePagination;
|
|
291
|
+
}): Promise<{
|
|
292
|
+
pagination: PaginationInfo;
|
|
293
|
+
scores: ScoreRowData[];
|
|
294
|
+
}>;
|
|
295
|
+
getScoresByEntityId({ entityId: _entityId, entityType: _entityType, pagination: _pagination, }: {
|
|
296
|
+
pagination: StoragePagination;
|
|
297
|
+
entityId: string;
|
|
298
|
+
entityType: string;
|
|
299
|
+
}): Promise<{
|
|
300
|
+
pagination: PaginationInfo;
|
|
301
|
+
scores: ScoreRowData[];
|
|
302
|
+
}>;
|
|
303
|
+
getScoresByScorerId({ scorerId: _scorerId, pagination: _pagination, }: {
|
|
304
|
+
scorerId: string;
|
|
305
|
+
pagination: StoragePagination;
|
|
306
|
+
}): Promise<{
|
|
307
|
+
pagination: PaginationInfo;
|
|
308
|
+
scores: ScoreRowData[];
|
|
309
|
+
}>;
|
|
198
310
|
/**
|
|
199
311
|
* Close the database connection
|
|
200
312
|
* No explicit cleanup needed for D1 in either REST or Workers Binding mode
|
|
@@ -207,7 +319,7 @@ export { D1Store as D1Store_alias_1 }
|
|
|
207
319
|
/**
|
|
208
320
|
* Combined configuration type supporting both REST API and Workers Binding API
|
|
209
321
|
*/
|
|
210
|
-
declare type D1StoreConfig = D1Config | D1WorkersConfig;
|
|
322
|
+
declare type D1StoreConfig = D1Config | D1WorkersConfig | D1ClientConfig;
|
|
211
323
|
export { D1StoreConfig }
|
|
212
324
|
export { D1StoreConfig as D1StoreConfig_alias_1 }
|
|
213
325
|
|
|
@@ -223,6 +335,104 @@ declare interface D1WorkersConfig {
|
|
|
223
335
|
export { D1WorkersConfig }
|
|
224
336
|
export { D1WorkersConfig as D1WorkersConfig_alias_1 }
|
|
225
337
|
|
|
338
|
+
export declare function deserializeValue(value: any, type?: string): any;
|
|
339
|
+
|
|
340
|
+
export declare function isArrayOfRecords(value: any): value is Record<string, any>[];
|
|
341
|
+
|
|
342
|
+
export declare class LegacyEvalsStorageD1 extends LegacyEvalsStorage {
|
|
343
|
+
private operations;
|
|
344
|
+
constructor({ operations }: {
|
|
345
|
+
operations: StoreOperationsD1;
|
|
346
|
+
});
|
|
347
|
+
getEvals(options: {
|
|
348
|
+
agentName?: string;
|
|
349
|
+
type?: 'test' | 'live';
|
|
350
|
+
} & PaginationArgs): Promise<PaginationInfo & {
|
|
351
|
+
evals: EvalRow[];
|
|
352
|
+
}>;
|
|
353
|
+
/**
|
|
354
|
+
* @deprecated use getEvals instead
|
|
355
|
+
*/
|
|
356
|
+
getEvalsByAgentName(agentName: string, type?: 'test' | 'live'): Promise<EvalRow[]>;
|
|
357
|
+
}
|
|
358
|
+
|
|
359
|
+
export declare class MemoryStorageD1 extends MemoryStorage {
|
|
360
|
+
private operations;
|
|
361
|
+
constructor({ operations }: {
|
|
362
|
+
operations: StoreOperationsD1;
|
|
363
|
+
});
|
|
364
|
+
getResourceById({ resourceId }: {
|
|
365
|
+
resourceId: string;
|
|
366
|
+
}): Promise<StorageResourceType | null>;
|
|
367
|
+
saveResource({ resource }: {
|
|
368
|
+
resource: StorageResourceType;
|
|
369
|
+
}): Promise<StorageResourceType>;
|
|
370
|
+
updateResource({ resourceId, workingMemory, metadata, }: {
|
|
371
|
+
resourceId: string;
|
|
372
|
+
workingMemory?: string;
|
|
373
|
+
metadata?: Record<string, unknown>;
|
|
374
|
+
}): Promise<StorageResourceType>;
|
|
375
|
+
getThreadById({ threadId }: {
|
|
376
|
+
threadId: string;
|
|
377
|
+
}): Promise<StorageThreadType | null>;
|
|
378
|
+
/**
|
|
379
|
+
* @deprecated use getThreadsByResourceIdPaginated instead
|
|
380
|
+
*/
|
|
381
|
+
getThreadsByResourceId({ resourceId }: {
|
|
382
|
+
resourceId: string;
|
|
383
|
+
}): Promise<StorageThreadType[]>;
|
|
384
|
+
getThreadsByResourceIdPaginated(args: {
|
|
385
|
+
resourceId: string;
|
|
386
|
+
page: number;
|
|
387
|
+
perPage: number;
|
|
388
|
+
}): Promise<PaginationInfo & {
|
|
389
|
+
threads: StorageThreadType[];
|
|
390
|
+
}>;
|
|
391
|
+
saveThread({ thread }: {
|
|
392
|
+
thread: StorageThreadType;
|
|
393
|
+
}): Promise<StorageThreadType>;
|
|
394
|
+
updateThread({ id, title, metadata, }: {
|
|
395
|
+
id: string;
|
|
396
|
+
title: string;
|
|
397
|
+
metadata: Record<string, unknown>;
|
|
398
|
+
}): Promise<StorageThreadType>;
|
|
399
|
+
deleteThread({ threadId }: {
|
|
400
|
+
threadId: string;
|
|
401
|
+
}): Promise<void>;
|
|
402
|
+
saveMessages(args: {
|
|
403
|
+
messages: MastraMessageV1[];
|
|
404
|
+
format?: undefined | 'v1';
|
|
405
|
+
}): Promise<MastraMessageV1[]>;
|
|
406
|
+
saveMessages(args: {
|
|
407
|
+
messages: MastraMessageV2[];
|
|
408
|
+
format: 'v2';
|
|
409
|
+
}): Promise<MastraMessageV2[]>;
|
|
410
|
+
private _getIncludedMessages;
|
|
411
|
+
/**
|
|
412
|
+
* @deprecated use getMessagesPaginated instead
|
|
413
|
+
*/
|
|
414
|
+
getMessages(args: StorageGetMessagesArg & {
|
|
415
|
+
format?: 'v1';
|
|
416
|
+
}): Promise<MastraMessageV1[]>;
|
|
417
|
+
getMessages(args: StorageGetMessagesArg & {
|
|
418
|
+
format: 'v2';
|
|
419
|
+
}): Promise<MastraMessageV2[]>;
|
|
420
|
+
getMessagesPaginated({ threadId, selectBy, format, }: StorageGetMessagesArg & {
|
|
421
|
+
format?: 'v1' | 'v2';
|
|
422
|
+
}): Promise<PaginationInfo & {
|
|
423
|
+
messages: MastraMessageV1[] | MastraMessageV2[];
|
|
424
|
+
}>;
|
|
425
|
+
updateMessages(args: {
|
|
426
|
+
messages: Partial<Omit<MastraMessageV2, 'createdAt'>> & {
|
|
427
|
+
id: string;
|
|
428
|
+
content?: {
|
|
429
|
+
metadata?: MastraMessageContentV2['metadata'];
|
|
430
|
+
content?: MastraMessageContentV2['content'];
|
|
431
|
+
};
|
|
432
|
+
}[];
|
|
433
|
+
}): Promise<MastraMessageV2[]>;
|
|
434
|
+
}
|
|
435
|
+
|
|
226
436
|
/**
|
|
227
437
|
* Parses and returns a valid SQL SELECT column identifier.
|
|
228
438
|
* Allows a single identifier (letters, numbers, underscores), or '*', optionally with 'AS alias'.
|
|
@@ -242,6 +452,41 @@ export declare function parseSelectIdentifier(column: string): SelectIdentifier;
|
|
|
242
452
|
export declare const retryUntil: <T>(fn: () => Promise<T>, condition: (result: T) => boolean, timeout?: number, // REST API needs longer timeout due to higher latency
|
|
243
453
|
interval?: number) => Promise<T>;
|
|
244
454
|
|
|
455
|
+
export declare class ScoresStorageD1 extends ScoresStorage {
|
|
456
|
+
private operations;
|
|
457
|
+
constructor({ operations }: {
|
|
458
|
+
operations: StoreOperationsD1;
|
|
459
|
+
});
|
|
460
|
+
getScoreById({ id }: {
|
|
461
|
+
id: string;
|
|
462
|
+
}): Promise<ScoreRowData | null>;
|
|
463
|
+
saveScore(score: Omit<ScoreRowData, 'createdAt' | 'updatedAt'>): Promise<{
|
|
464
|
+
score: ScoreRowData;
|
|
465
|
+
}>;
|
|
466
|
+
getScoresByScorerId({ scorerId, pagination, }: {
|
|
467
|
+
scorerId: string;
|
|
468
|
+
pagination: StoragePagination;
|
|
469
|
+
}): Promise<{
|
|
470
|
+
pagination: PaginationInfo;
|
|
471
|
+
scores: ScoreRowData[];
|
|
472
|
+
}>;
|
|
473
|
+
getScoresByRunId({ runId, pagination, }: {
|
|
474
|
+
runId: string;
|
|
475
|
+
pagination: StoragePagination;
|
|
476
|
+
}): Promise<{
|
|
477
|
+
pagination: PaginationInfo;
|
|
478
|
+
scores: ScoreRowData[];
|
|
479
|
+
}>;
|
|
480
|
+
getScoresByEntityId({ entityId, entityType, pagination, }: {
|
|
481
|
+
pagination: StoragePagination;
|
|
482
|
+
entityId: string;
|
|
483
|
+
entityType: string;
|
|
484
|
+
}): Promise<{
|
|
485
|
+
pagination: PaginationInfo;
|
|
486
|
+
scores: ScoreRowData[];
|
|
487
|
+
}>;
|
|
488
|
+
}
|
|
489
|
+
|
|
245
490
|
/** Represents a validated SQL SELECT column identifier (or '*', optionally with 'AS alias'). */
|
|
246
491
|
declare type SelectIdentifier = string & {
|
|
247
492
|
__brand: 'SelectIdentifier';
|
|
@@ -356,4 +601,106 @@ export declare interface SqlQueryOptions {
|
|
|
356
601
|
first?: boolean;
|
|
357
602
|
}
|
|
358
603
|
|
|
604
|
+
export declare class StoreOperationsD1 extends StoreOperations {
|
|
605
|
+
private client?;
|
|
606
|
+
private binding?;
|
|
607
|
+
private tablePrefix;
|
|
608
|
+
constructor(config: StoreOperationsD1Config);
|
|
609
|
+
hasColumn(table: string, column: string): Promise<boolean>;
|
|
610
|
+
getTableName(tableName: TABLE_NAMES): string;
|
|
611
|
+
private formatSqlParams;
|
|
612
|
+
private executeWorkersBindingQuery;
|
|
613
|
+
private executeRestQuery;
|
|
614
|
+
executeQuery(options: SqlQueryOptions): Promise<Record<string, any>[] | Record<string, any> | null>;
|
|
615
|
+
private getTableColumns;
|
|
616
|
+
private serializeValue;
|
|
617
|
+
protected getSqlType(type: StorageColumn['type']): string;
|
|
618
|
+
createTable({ tableName, schema, }: {
|
|
619
|
+
tableName: TABLE_NAMES;
|
|
620
|
+
schema: Record<string, StorageColumn>;
|
|
621
|
+
}): Promise<void>;
|
|
622
|
+
clearTable({ tableName }: {
|
|
623
|
+
tableName: TABLE_NAMES;
|
|
624
|
+
}): Promise<void>;
|
|
625
|
+
dropTable({ tableName }: {
|
|
626
|
+
tableName: TABLE_NAMES;
|
|
627
|
+
}): Promise<void>;
|
|
628
|
+
alterTable(args: {
|
|
629
|
+
tableName: TABLE_NAMES;
|
|
630
|
+
schema: Record<string, StorageColumn>;
|
|
631
|
+
ifNotExists: string[];
|
|
632
|
+
}): Promise<void>;
|
|
633
|
+
insert({ tableName, record }: {
|
|
634
|
+
tableName: TABLE_NAMES;
|
|
635
|
+
record: Record<string, any>;
|
|
636
|
+
}): Promise<void>;
|
|
637
|
+
batchInsert({ tableName, records }: {
|
|
638
|
+
tableName: TABLE_NAMES;
|
|
639
|
+
records: Record<string, any>[];
|
|
640
|
+
}): Promise<void>;
|
|
641
|
+
load<R>({ tableName, keys }: {
|
|
642
|
+
tableName: TABLE_NAMES;
|
|
643
|
+
keys: Record<string, string>;
|
|
644
|
+
}): Promise<R | null>;
|
|
645
|
+
processRecord(record: Record<string, any>): Promise<Record<string, any>>;
|
|
646
|
+
/**
|
|
647
|
+
* Upsert multiple records in a batch operation
|
|
648
|
+
* @param tableName The table to insert into
|
|
649
|
+
* @param records The records to insert
|
|
650
|
+
*/
|
|
651
|
+
batchUpsert({ tableName, records }: {
|
|
652
|
+
tableName: TABLE_NAMES;
|
|
653
|
+
records: Record<string, any>[];
|
|
654
|
+
}): Promise<void>;
|
|
655
|
+
}
|
|
656
|
+
|
|
657
|
+
export declare interface StoreOperationsD1Config {
|
|
658
|
+
client?: D1Client_alias_2;
|
|
659
|
+
binding?: D1Database_2;
|
|
660
|
+
tablePrefix?: string;
|
|
661
|
+
}
|
|
662
|
+
|
|
663
|
+
export declare class TracesStorageD1 extends TracesStorage {
|
|
664
|
+
private operations;
|
|
665
|
+
constructor({ operations }: {
|
|
666
|
+
operations: StoreOperationsD1;
|
|
667
|
+
});
|
|
668
|
+
getTraces(args: StorageGetTracesArg): Promise<Trace[]>;
|
|
669
|
+
getTracesPaginated(args: StorageGetTracesPaginatedArg): Promise<PaginationInfo & {
|
|
670
|
+
traces: Trace[];
|
|
671
|
+
}>;
|
|
672
|
+
batchTraceInsert({ records }: {
|
|
673
|
+
records: Record<string, any>[];
|
|
674
|
+
}): Promise<void>;
|
|
675
|
+
}
|
|
676
|
+
|
|
677
|
+
export declare class WorkflowsStorageD1 extends WorkflowsStorage {
|
|
678
|
+
private operations;
|
|
679
|
+
constructor({ operations }: {
|
|
680
|
+
operations: StoreOperationsD1;
|
|
681
|
+
});
|
|
682
|
+
persistWorkflowSnapshot({ workflowName, runId, snapshot, }: {
|
|
683
|
+
workflowName: string;
|
|
684
|
+
runId: string;
|
|
685
|
+
snapshot: WorkflowRunState;
|
|
686
|
+
}): Promise<void>;
|
|
687
|
+
loadWorkflowSnapshot(params: {
|
|
688
|
+
workflowName: string;
|
|
689
|
+
runId: string;
|
|
690
|
+
}): Promise<WorkflowRunState | null>;
|
|
691
|
+
private parseWorkflowRun;
|
|
692
|
+
getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
|
|
693
|
+
workflowName?: string;
|
|
694
|
+
fromDate?: Date;
|
|
695
|
+
toDate?: Date;
|
|
696
|
+
limit?: number;
|
|
697
|
+
offset?: number;
|
|
698
|
+
resourceId?: string;
|
|
699
|
+
}): Promise<WorkflowRuns>;
|
|
700
|
+
getWorkflowRunById({ runId, workflowName, }: {
|
|
701
|
+
runId: string;
|
|
702
|
+
workflowName?: string;
|
|
703
|
+
}): Promise<WorkflowRun | null>;
|
|
704
|
+
}
|
|
705
|
+
|
|
359
706
|
export { }
|