@mastra/libsql 0.0.1-alpha.3 → 0.0.1-alpha.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.turbo/turbo-build.log +9 -9
- package/CHANGELOG.md +9 -0
- package/dist/_tsup-dts-rollup.d.cts +11 -12
- package/dist/_tsup-dts-rollup.d.ts +11 -12
- package/dist/index.cjs +82 -35
- package/dist/index.js +82 -35
- package/package.json +8 -8
- package/src/storage/index.ts +112 -54
package/.turbo/turbo-build.log
CHANGED
|
@@ -1,23 +1,23 @@
|
|
|
1
1
|
|
|
2
|
-
> @mastra/libsql@0.0.1-alpha.
|
|
2
|
+
> @mastra/libsql@0.0.1-alpha.4 build /home/runner/work/mastra/mastra/stores/libsql
|
|
3
3
|
> tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting
|
|
4
4
|
|
|
5
5
|
[34mCLI[39m Building entry: src/index.ts
|
|
6
6
|
[34mCLI[39m Using tsconfig: tsconfig.json
|
|
7
7
|
[34mCLI[39m tsup v8.4.0
|
|
8
8
|
[34mTSC[39m Build start
|
|
9
|
-
[32mTSC[39m ⚡️ Build success in
|
|
9
|
+
[32mTSC[39m ⚡️ Build success in 6559ms
|
|
10
10
|
[34mDTS[39m Build start
|
|
11
11
|
[34mCLI[39m Target: es2022
|
|
12
|
-
Analysis will use the bundled TypeScript version 5.8.
|
|
12
|
+
Analysis will use the bundled TypeScript version 5.8.3
|
|
13
13
|
[36mWriting package typings: /home/runner/work/mastra/mastra/stores/libsql/dist/_tsup-dts-rollup.d.ts[39m
|
|
14
|
-
Analysis will use the bundled TypeScript version 5.8.
|
|
14
|
+
Analysis will use the bundled TypeScript version 5.8.3
|
|
15
15
|
[36mWriting package typings: /home/runner/work/mastra/mastra/stores/libsql/dist/_tsup-dts-rollup.d.cts[39m
|
|
16
|
-
[32mDTS[39m ⚡️ Build success in
|
|
16
|
+
[32mDTS[39m ⚡️ Build success in 8932ms
|
|
17
17
|
[34mCLI[39m Cleaning output folder
|
|
18
18
|
[34mESM[39m Build start
|
|
19
19
|
[34mCJS[39m Build start
|
|
20
|
-
[32mCJS[39m [1mdist/index.cjs [22m[
|
|
21
|
-
[32mCJS[39m ⚡️ Build success in
|
|
22
|
-
[32mESM[39m [1mdist/index.js [22m[
|
|
23
|
-
[32mESM[39m ⚡️ Build success in
|
|
20
|
+
[32mCJS[39m [1mdist/index.cjs [22m[32m37.37 KB[39m
|
|
21
|
+
[32mCJS[39m ⚡️ Build success in 804ms
|
|
22
|
+
[32mESM[39m [1mdist/index.js [22m[32m37.26 KB[39m
|
|
23
|
+
[32mESM[39m ⚡️ Build success in 804ms
|
package/CHANGELOG.md
CHANGED
|
@@ -1,5 +1,14 @@
|
|
|
1
1
|
# @mastra/libsql
|
|
2
2
|
|
|
3
|
+
## 0.0.1-alpha.4
|
|
4
|
+
|
|
5
|
+
### Patch Changes
|
|
6
|
+
|
|
7
|
+
- 479f490: [MASTRA-3131] Add getWorkflowRunByID and add resourceId as filter for getWorkflowRuns
|
|
8
|
+
- Updated dependencies [e4943b8]
|
|
9
|
+
- Updated dependencies [479f490]
|
|
10
|
+
- @mastra/core@0.9.1-alpha.4
|
|
11
|
+
|
|
3
12
|
## 0.0.1-alpha.3
|
|
4
13
|
|
|
5
14
|
### Patch Changes
|
|
@@ -23,7 +23,8 @@ import type { StorageThreadType } from '@mastra/core/memory';
|
|
|
23
23
|
import type { TABLE_NAMES } from '@mastra/core/storage';
|
|
24
24
|
import type { UpsertVectorParams } from '@mastra/core/vector';
|
|
25
25
|
import type { VectorFilter } from '@mastra/core/vector/filter';
|
|
26
|
-
import type {
|
|
26
|
+
import type { WorkflowRun } from '@mastra/core/storage';
|
|
27
|
+
import type { WorkflowRuns } from '@mastra/core/storage';
|
|
27
28
|
|
|
28
29
|
export declare function buildFilterQuery(filter: VectorFilter): FilterResult;
|
|
29
30
|
|
|
@@ -127,22 +128,20 @@ declare class LibSQLStore extends MastraStorage {
|
|
|
127
128
|
attributes?: Record<string, string>;
|
|
128
129
|
filters?: Record<string, any>;
|
|
129
130
|
}): Promise<any[]>;
|
|
130
|
-
getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, }?: {
|
|
131
|
+
getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
|
|
131
132
|
workflowName?: string;
|
|
132
133
|
fromDate?: Date;
|
|
133
134
|
toDate?: Date;
|
|
134
135
|
limit?: number;
|
|
135
136
|
offset?: number;
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
total: number;
|
|
145
|
-
}>;
|
|
137
|
+
resourceId?: string;
|
|
138
|
+
}): Promise<WorkflowRuns>;
|
|
139
|
+
getWorkflowRunById({ runId, workflowName, }: {
|
|
140
|
+
runId: string;
|
|
141
|
+
workflowName?: string;
|
|
142
|
+
}): Promise<WorkflowRun | null>;
|
|
143
|
+
private hasColumn;
|
|
144
|
+
private parseWorkflowRun;
|
|
146
145
|
}
|
|
147
146
|
export { LibSQLStore as DefaultStorage }
|
|
148
147
|
export { LibSQLStore as DefaultStorage_alias_1 }
|
|
@@ -23,7 +23,8 @@ import type { StorageThreadType } from '@mastra/core/memory';
|
|
|
23
23
|
import type { TABLE_NAMES } from '@mastra/core/storage';
|
|
24
24
|
import type { UpsertVectorParams } from '@mastra/core/vector';
|
|
25
25
|
import type { VectorFilter } from '@mastra/core/vector/filter';
|
|
26
|
-
import type {
|
|
26
|
+
import type { WorkflowRun } from '@mastra/core/storage';
|
|
27
|
+
import type { WorkflowRuns } from '@mastra/core/storage';
|
|
27
28
|
|
|
28
29
|
export declare function buildFilterQuery(filter: VectorFilter): FilterResult;
|
|
29
30
|
|
|
@@ -127,22 +128,20 @@ declare class LibSQLStore extends MastraStorage {
|
|
|
127
128
|
attributes?: Record<string, string>;
|
|
128
129
|
filters?: Record<string, any>;
|
|
129
130
|
}): Promise<any[]>;
|
|
130
|
-
getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, }?: {
|
|
131
|
+
getWorkflowRuns({ workflowName, fromDate, toDate, limit, offset, resourceId, }?: {
|
|
131
132
|
workflowName?: string;
|
|
132
133
|
fromDate?: Date;
|
|
133
134
|
toDate?: Date;
|
|
134
135
|
limit?: number;
|
|
135
136
|
offset?: number;
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
|
|
141
|
-
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
total: number;
|
|
145
|
-
}>;
|
|
137
|
+
resourceId?: string;
|
|
138
|
+
}): Promise<WorkflowRuns>;
|
|
139
|
+
getWorkflowRunById({ runId, workflowName, }: {
|
|
140
|
+
runId: string;
|
|
141
|
+
workflowName?: string;
|
|
142
|
+
}): Promise<WorkflowRun | null>;
|
|
143
|
+
private hasColumn;
|
|
144
|
+
private parseWorkflowRun;
|
|
146
145
|
}
|
|
147
146
|
export { LibSQLStore as DefaultStorage }
|
|
148
147
|
export { LibSQLStore as DefaultStorage_alias_1 }
|
package/dist/index.cjs
CHANGED
|
@@ -1088,53 +1088,100 @@ var LibSQLStore = class extends storage.MastraStorage {
|
|
|
1088
1088
|
fromDate,
|
|
1089
1089
|
toDate,
|
|
1090
1090
|
limit,
|
|
1091
|
-
offset
|
|
1091
|
+
offset,
|
|
1092
|
+
resourceId
|
|
1092
1093
|
} = {}) {
|
|
1094
|
+
try {
|
|
1095
|
+
const conditions = [];
|
|
1096
|
+
const args = [];
|
|
1097
|
+
if (workflowName) {
|
|
1098
|
+
conditions.push("workflow_name = ?");
|
|
1099
|
+
args.push(workflowName);
|
|
1100
|
+
}
|
|
1101
|
+
if (fromDate) {
|
|
1102
|
+
conditions.push("createdAt >= ?");
|
|
1103
|
+
args.push(fromDate.toISOString());
|
|
1104
|
+
}
|
|
1105
|
+
if (toDate) {
|
|
1106
|
+
conditions.push("createdAt <= ?");
|
|
1107
|
+
args.push(toDate.toISOString());
|
|
1108
|
+
}
|
|
1109
|
+
if (resourceId) {
|
|
1110
|
+
const hasResourceId = await this.hasColumn(storage.TABLE_WORKFLOW_SNAPSHOT, "resourceId");
|
|
1111
|
+
if (hasResourceId) {
|
|
1112
|
+
conditions.push("resourceId = ?");
|
|
1113
|
+
args.push(resourceId);
|
|
1114
|
+
} else {
|
|
1115
|
+
console.warn(`[${storage.TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
|
|
1116
|
+
}
|
|
1117
|
+
}
|
|
1118
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
1119
|
+
let total = 0;
|
|
1120
|
+
if (limit !== void 0 && offset !== void 0) {
|
|
1121
|
+
const countResult = await this.client.execute({
|
|
1122
|
+
sql: `SELECT COUNT(*) as count FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
|
|
1123
|
+
args
|
|
1124
|
+
});
|
|
1125
|
+
total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
1126
|
+
}
|
|
1127
|
+
const result = await this.client.execute({
|
|
1128
|
+
sql: `SELECT * FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC${limit !== void 0 && offset !== void 0 ? ` LIMIT ? OFFSET ?` : ""}`,
|
|
1129
|
+
args: limit !== void 0 && offset !== void 0 ? [...args, limit, offset] : args
|
|
1130
|
+
});
|
|
1131
|
+
const runs = (result.rows || []).map((row) => this.parseWorkflowRun(row));
|
|
1132
|
+
return { runs, total: total || runs.length };
|
|
1133
|
+
} catch (error) {
|
|
1134
|
+
console.error("Error getting workflow runs:", error);
|
|
1135
|
+
throw error;
|
|
1136
|
+
}
|
|
1137
|
+
}
|
|
1138
|
+
async getWorkflowRunById({
|
|
1139
|
+
runId,
|
|
1140
|
+
workflowName
|
|
1141
|
+
}) {
|
|
1093
1142
|
const conditions = [];
|
|
1094
1143
|
const args = [];
|
|
1144
|
+
if (runId) {
|
|
1145
|
+
conditions.push("run_id = ?");
|
|
1146
|
+
args.push(runId);
|
|
1147
|
+
}
|
|
1095
1148
|
if (workflowName) {
|
|
1096
1149
|
conditions.push("workflow_name = ?");
|
|
1097
1150
|
args.push(workflowName);
|
|
1098
1151
|
}
|
|
1099
|
-
if (fromDate) {
|
|
1100
|
-
conditions.push("createdAt >= ?");
|
|
1101
|
-
args.push(fromDate.toISOString());
|
|
1102
|
-
}
|
|
1103
|
-
if (toDate) {
|
|
1104
|
-
conditions.push("createdAt <= ?");
|
|
1105
|
-
args.push(toDate.toISOString());
|
|
1106
|
-
}
|
|
1107
1152
|
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
|
|
1114
|
-
total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
1153
|
+
const result = await this.client.execute({
|
|
1154
|
+
sql: `SELECT * FROM ${storage.TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
|
|
1155
|
+
args
|
|
1156
|
+
});
|
|
1157
|
+
if (!result.rows?.[0]) {
|
|
1158
|
+
return null;
|
|
1115
1159
|
}
|
|
1160
|
+
return this.parseWorkflowRun(result.rows[0]);
|
|
1161
|
+
}
|
|
1162
|
+
async hasColumn(table, column) {
|
|
1116
1163
|
const result = await this.client.execute({
|
|
1117
|
-
sql: `
|
|
1118
|
-
args: limit !== void 0 && offset !== void 0 ? [...args, limit, offset] : args
|
|
1164
|
+
sql: `PRAGMA table_info(${table})`
|
|
1119
1165
|
});
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1166
|
+
return (await result.rows)?.some((row) => row.name === column);
|
|
1167
|
+
}
|
|
1168
|
+
parseWorkflowRun(row) {
|
|
1169
|
+
let parsedSnapshot = row.snapshot;
|
|
1170
|
+
if (typeof parsedSnapshot === "string") {
|
|
1171
|
+
try {
|
|
1172
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
1173
|
+
} catch (e) {
|
|
1174
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
1128
1175
|
}
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1136
|
-
|
|
1137
|
-
|
|
1176
|
+
}
|
|
1177
|
+
return {
|
|
1178
|
+
workflowName: row.workflow_name,
|
|
1179
|
+
runId: row.run_id,
|
|
1180
|
+
snapshot: parsedSnapshot,
|
|
1181
|
+
resourceId: row.resourceId,
|
|
1182
|
+
createdAt: new Date(row.created_at),
|
|
1183
|
+
updatedAt: new Date(row.updated_at)
|
|
1184
|
+
};
|
|
1138
1185
|
}
|
|
1139
1186
|
};
|
|
1140
1187
|
|
package/dist/index.js
CHANGED
|
@@ -1086,53 +1086,100 @@ var LibSQLStore = class extends MastraStorage {
|
|
|
1086
1086
|
fromDate,
|
|
1087
1087
|
toDate,
|
|
1088
1088
|
limit,
|
|
1089
|
-
offset
|
|
1089
|
+
offset,
|
|
1090
|
+
resourceId
|
|
1090
1091
|
} = {}) {
|
|
1092
|
+
try {
|
|
1093
|
+
const conditions = [];
|
|
1094
|
+
const args = [];
|
|
1095
|
+
if (workflowName) {
|
|
1096
|
+
conditions.push("workflow_name = ?");
|
|
1097
|
+
args.push(workflowName);
|
|
1098
|
+
}
|
|
1099
|
+
if (fromDate) {
|
|
1100
|
+
conditions.push("createdAt >= ?");
|
|
1101
|
+
args.push(fromDate.toISOString());
|
|
1102
|
+
}
|
|
1103
|
+
if (toDate) {
|
|
1104
|
+
conditions.push("createdAt <= ?");
|
|
1105
|
+
args.push(toDate.toISOString());
|
|
1106
|
+
}
|
|
1107
|
+
if (resourceId) {
|
|
1108
|
+
const hasResourceId = await this.hasColumn(TABLE_WORKFLOW_SNAPSHOT, "resourceId");
|
|
1109
|
+
if (hasResourceId) {
|
|
1110
|
+
conditions.push("resourceId = ?");
|
|
1111
|
+
args.push(resourceId);
|
|
1112
|
+
} else {
|
|
1113
|
+
console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
|
|
1114
|
+
}
|
|
1115
|
+
}
|
|
1116
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
1117
|
+
let total = 0;
|
|
1118
|
+
if (limit !== void 0 && offset !== void 0) {
|
|
1119
|
+
const countResult = await this.client.execute({
|
|
1120
|
+
sql: `SELECT COUNT(*) as count FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
|
|
1121
|
+
args
|
|
1122
|
+
});
|
|
1123
|
+
total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
1124
|
+
}
|
|
1125
|
+
const result = await this.client.execute({
|
|
1126
|
+
sql: `SELECT * FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC${limit !== void 0 && offset !== void 0 ? ` LIMIT ? OFFSET ?` : ""}`,
|
|
1127
|
+
args: limit !== void 0 && offset !== void 0 ? [...args, limit, offset] : args
|
|
1128
|
+
});
|
|
1129
|
+
const runs = (result.rows || []).map((row) => this.parseWorkflowRun(row));
|
|
1130
|
+
return { runs, total: total || runs.length };
|
|
1131
|
+
} catch (error) {
|
|
1132
|
+
console.error("Error getting workflow runs:", error);
|
|
1133
|
+
throw error;
|
|
1134
|
+
}
|
|
1135
|
+
}
|
|
1136
|
+
async getWorkflowRunById({
|
|
1137
|
+
runId,
|
|
1138
|
+
workflowName
|
|
1139
|
+
}) {
|
|
1091
1140
|
const conditions = [];
|
|
1092
1141
|
const args = [];
|
|
1142
|
+
if (runId) {
|
|
1143
|
+
conditions.push("run_id = ?");
|
|
1144
|
+
args.push(runId);
|
|
1145
|
+
}
|
|
1093
1146
|
if (workflowName) {
|
|
1094
1147
|
conditions.push("workflow_name = ?");
|
|
1095
1148
|
args.push(workflowName);
|
|
1096
1149
|
}
|
|
1097
|
-
if (fromDate) {
|
|
1098
|
-
conditions.push("createdAt >= ?");
|
|
1099
|
-
args.push(fromDate.toISOString());
|
|
1100
|
-
}
|
|
1101
|
-
if (toDate) {
|
|
1102
|
-
conditions.push("createdAt <= ?");
|
|
1103
|
-
args.push(toDate.toISOString());
|
|
1104
|
-
}
|
|
1105
1150
|
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(" AND ")}` : "";
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
1151
|
+
const result = await this.client.execute({
|
|
1152
|
+
sql: `SELECT * FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
|
|
1153
|
+
args
|
|
1154
|
+
});
|
|
1155
|
+
if (!result.rows?.[0]) {
|
|
1156
|
+
return null;
|
|
1113
1157
|
}
|
|
1158
|
+
return this.parseWorkflowRun(result.rows[0]);
|
|
1159
|
+
}
|
|
1160
|
+
async hasColumn(table, column) {
|
|
1114
1161
|
const result = await this.client.execute({
|
|
1115
|
-
sql: `
|
|
1116
|
-
args: limit !== void 0 && offset !== void 0 ? [...args, limit, offset] : args
|
|
1162
|
+
sql: `PRAGMA table_info(${table})`
|
|
1117
1163
|
});
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1124
|
-
|
|
1125
|
-
|
|
1164
|
+
return (await result.rows)?.some((row) => row.name === column);
|
|
1165
|
+
}
|
|
1166
|
+
parseWorkflowRun(row) {
|
|
1167
|
+
let parsedSnapshot = row.snapshot;
|
|
1168
|
+
if (typeof parsedSnapshot === "string") {
|
|
1169
|
+
try {
|
|
1170
|
+
parsedSnapshot = JSON.parse(row.snapshot);
|
|
1171
|
+
} catch (e) {
|
|
1172
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
1126
1173
|
}
|
|
1127
|
-
|
|
1128
|
-
|
|
1129
|
-
|
|
1130
|
-
|
|
1131
|
-
|
|
1132
|
-
|
|
1133
|
-
|
|
1134
|
-
|
|
1135
|
-
|
|
1174
|
+
}
|
|
1175
|
+
return {
|
|
1176
|
+
workflowName: row.workflow_name,
|
|
1177
|
+
runId: row.run_id,
|
|
1178
|
+
snapshot: parsedSnapshot,
|
|
1179
|
+
resourceId: row.resourceId,
|
|
1180
|
+
createdAt: new Date(row.created_at),
|
|
1181
|
+
updatedAt: new Date(row.updated_at)
|
|
1182
|
+
};
|
|
1136
1183
|
}
|
|
1137
1184
|
};
|
|
1138
1185
|
|
package/package.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
{
|
|
2
2
|
"name": "@mastra/libsql",
|
|
3
|
-
"version": "0.0.1-alpha.
|
|
3
|
+
"version": "0.0.1-alpha.4",
|
|
4
4
|
"description": "Libsql provider for Mastra - includes both vector and db storage capabilities",
|
|
5
5
|
"type": "module",
|
|
6
6
|
"main": "dist/index.js",
|
|
@@ -21,17 +21,17 @@
|
|
|
21
21
|
"license": "MIT",
|
|
22
22
|
"dependencies": {
|
|
23
23
|
"@libsql/client": "^0.15.4",
|
|
24
|
-
"@mastra/core": "^0.9.1-alpha.
|
|
24
|
+
"@mastra/core": "^0.9.1-alpha.4"
|
|
25
25
|
},
|
|
26
26
|
"devDependencies": {
|
|
27
|
-
"@microsoft/api-extractor": "^7.52.
|
|
28
|
-
"@types/node": "^20.17.
|
|
29
|
-
"eslint": "^9.
|
|
27
|
+
"@microsoft/api-extractor": "^7.52.5",
|
|
28
|
+
"@types/node": "^20.17.32",
|
|
29
|
+
"eslint": "^9.25.1",
|
|
30
30
|
"tsup": "^8.4.0",
|
|
31
|
-
"typescript": "^5.8.
|
|
32
|
-
"vitest": "^3.
|
|
31
|
+
"typescript": "^5.8.3",
|
|
32
|
+
"vitest": "^3.1.2",
|
|
33
33
|
"@internal/lint": "0.0.2",
|
|
34
|
-
"@internal/storage-test-utils": "0.0.1-alpha.
|
|
34
|
+
"@internal/storage-test-utils": "0.0.1-alpha.3"
|
|
35
35
|
},
|
|
36
36
|
"scripts": {
|
|
37
37
|
"build": "tsup src/index.ts --format esm,cjs --experimental-dts --clean --treeshake=smallest --splitting",
|
package/src/storage/index.ts
CHANGED
|
@@ -10,7 +10,14 @@ import {
|
|
|
10
10
|
TABLE_WORKFLOW_SNAPSHOT,
|
|
11
11
|
TABLE_EVALS,
|
|
12
12
|
} from '@mastra/core/storage';
|
|
13
|
-
import type {
|
|
13
|
+
import type {
|
|
14
|
+
EvalRow,
|
|
15
|
+
StorageColumn,
|
|
16
|
+
StorageGetMessagesArg,
|
|
17
|
+
TABLE_NAMES,
|
|
18
|
+
WorkflowRun,
|
|
19
|
+
WorkflowRuns,
|
|
20
|
+
} from '@mastra/core/storage';
|
|
14
21
|
import type { WorkflowRunState } from '@mastra/core/workflows';
|
|
15
22
|
|
|
16
23
|
function safelyParseJSON(jsonString: string): any {
|
|
@@ -544,80 +551,131 @@ export class LibSQLStore extends MastraStorage {
|
|
|
544
551
|
toDate,
|
|
545
552
|
limit,
|
|
546
553
|
offset,
|
|
554
|
+
resourceId,
|
|
547
555
|
}: {
|
|
548
556
|
workflowName?: string;
|
|
549
557
|
fromDate?: Date;
|
|
550
558
|
toDate?: Date;
|
|
551
559
|
limit?: number;
|
|
552
560
|
offset?: number;
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
createdAt: Date;
|
|
559
|
-
updatedAt: Date;
|
|
560
|
-
}>;
|
|
561
|
-
total: number;
|
|
562
|
-
}> {
|
|
563
|
-
const conditions: string[] = [];
|
|
564
|
-
const args: InValue[] = [];
|
|
561
|
+
resourceId?: string;
|
|
562
|
+
} = {}): Promise<WorkflowRuns> {
|
|
563
|
+
try {
|
|
564
|
+
const conditions: string[] = [];
|
|
565
|
+
const args: InValue[] = [];
|
|
565
566
|
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
567
|
+
if (workflowName) {
|
|
568
|
+
conditions.push('workflow_name = ?');
|
|
569
|
+
args.push(workflowName);
|
|
570
|
+
}
|
|
571
|
+
|
|
572
|
+
if (fromDate) {
|
|
573
|
+
conditions.push('createdAt >= ?');
|
|
574
|
+
args.push(fromDate.toISOString());
|
|
575
|
+
}
|
|
576
|
+
|
|
577
|
+
if (toDate) {
|
|
578
|
+
conditions.push('createdAt <= ?');
|
|
579
|
+
args.push(toDate.toISOString());
|
|
580
|
+
}
|
|
581
|
+
|
|
582
|
+
if (resourceId) {
|
|
583
|
+
const hasResourceId = await this.hasColumn(TABLE_WORKFLOW_SNAPSHOT, 'resourceId');
|
|
584
|
+
if (hasResourceId) {
|
|
585
|
+
conditions.push('resourceId = ?');
|
|
586
|
+
args.push(resourceId);
|
|
587
|
+
} else {
|
|
588
|
+
console.warn(`[${TABLE_WORKFLOW_SNAPSHOT}] resourceId column not found. Skipping resourceId filter.`);
|
|
589
|
+
}
|
|
590
|
+
}
|
|
591
|
+
|
|
592
|
+
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
593
|
+
|
|
594
|
+
let total = 0;
|
|
595
|
+
// Only get total count when using pagination
|
|
596
|
+
if (limit !== undefined && offset !== undefined) {
|
|
597
|
+
const countResult = await this.client.execute({
|
|
598
|
+
sql: `SELECT COUNT(*) as count FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
|
|
599
|
+
args,
|
|
600
|
+
});
|
|
601
|
+
total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
602
|
+
}
|
|
603
|
+
|
|
604
|
+
// Get results
|
|
605
|
+
const result = await this.client.execute({
|
|
606
|
+
sql: `SELECT * FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause} ORDER BY createdAt DESC${limit !== undefined && offset !== undefined ? ` LIMIT ? OFFSET ?` : ''}`,
|
|
607
|
+
args: limit !== undefined && offset !== undefined ? [...args, limit, offset] : args,
|
|
608
|
+
});
|
|
609
|
+
|
|
610
|
+
const runs = (result.rows || []).map(row => this.parseWorkflowRun(row));
|
|
611
|
+
|
|
612
|
+
// Use runs.length as total when not paginating
|
|
613
|
+
return { runs, total: total || runs.length };
|
|
614
|
+
} catch (error) {
|
|
615
|
+
console.error('Error getting workflow runs:', error);
|
|
616
|
+
throw error;
|
|
569
617
|
}
|
|
618
|
+
}
|
|
570
619
|
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
620
|
+
async getWorkflowRunById({
|
|
621
|
+
runId,
|
|
622
|
+
workflowName,
|
|
623
|
+
}: {
|
|
624
|
+
runId: string;
|
|
625
|
+
workflowName?: string;
|
|
626
|
+
}): Promise<WorkflowRun | null> {
|
|
627
|
+
const conditions: string[] = [];
|
|
628
|
+
const args: (string | number)[] = [];
|
|
629
|
+
|
|
630
|
+
if (runId) {
|
|
631
|
+
conditions.push('run_id = ?');
|
|
632
|
+
args.push(runId);
|
|
574
633
|
}
|
|
575
634
|
|
|
576
|
-
if (
|
|
577
|
-
conditions.push('
|
|
578
|
-
args.push(
|
|
635
|
+
if (workflowName) {
|
|
636
|
+
conditions.push('workflow_name = ?');
|
|
637
|
+
args.push(workflowName);
|
|
579
638
|
}
|
|
580
639
|
|
|
581
640
|
const whereClause = conditions.length > 0 ? `WHERE ${conditions.join(' AND ')}` : '';
|
|
582
641
|
|
|
583
|
-
let total = 0;
|
|
584
|
-
// Only get total count when using pagination
|
|
585
|
-
if (limit !== undefined && offset !== undefined) {
|
|
586
|
-
const countResult = await this.client.execute({
|
|
587
|
-
sql: `SELECT COUNT(*) as count FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
|
|
588
|
-
args,
|
|
589
|
-
});
|
|
590
|
-
total = Number(countResult.rows?.[0]?.count ?? 0);
|
|
591
|
-
}
|
|
592
|
-
|
|
593
|
-
// Get results
|
|
594
642
|
const result = await this.client.execute({
|
|
595
|
-
sql: `SELECT * FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause}
|
|
596
|
-
args
|
|
643
|
+
sql: `SELECT * FROM ${TABLE_WORKFLOW_SNAPSHOT} ${whereClause}`,
|
|
644
|
+
args,
|
|
597
645
|
});
|
|
598
646
|
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
602
|
-
try {
|
|
603
|
-
parsedSnapshot = JSON.parse(row.snapshot as string) as WorkflowRunState;
|
|
604
|
-
} catch (e) {
|
|
605
|
-
// If parsing fails, return the raw snapshot string
|
|
606
|
-
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
607
|
-
}
|
|
608
|
-
}
|
|
647
|
+
if (!result.rows?.[0]) {
|
|
648
|
+
return null;
|
|
649
|
+
}
|
|
609
650
|
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
|
|
615
|
-
|
|
616
|
-
};
|
|
651
|
+
return this.parseWorkflowRun(result.rows[0]);
|
|
652
|
+
}
|
|
653
|
+
|
|
654
|
+
private async hasColumn(table: string, column: string): Promise<boolean> {
|
|
655
|
+
const result = await this.client.execute({
|
|
656
|
+
sql: `PRAGMA table_info(${table})`,
|
|
617
657
|
});
|
|
658
|
+
return (await result.rows)?.some((row: any) => row.name === column);
|
|
659
|
+
}
|
|
618
660
|
|
|
619
|
-
|
|
620
|
-
|
|
661
|
+
private parseWorkflowRun(row: Record<string, any>): WorkflowRun {
|
|
662
|
+
let parsedSnapshot: WorkflowRunState | string = row.snapshot as string;
|
|
663
|
+
if (typeof parsedSnapshot === 'string') {
|
|
664
|
+
try {
|
|
665
|
+
parsedSnapshot = JSON.parse(row.snapshot as string) as WorkflowRunState;
|
|
666
|
+
} catch (e) {
|
|
667
|
+
// If parsing fails, return the raw snapshot string
|
|
668
|
+
console.warn(`Failed to parse snapshot for workflow ${row.workflow_name}: ${e}`);
|
|
669
|
+
}
|
|
670
|
+
}
|
|
671
|
+
return {
|
|
672
|
+
workflowName: row.workflow_name as string,
|
|
673
|
+
runId: row.run_id as string,
|
|
674
|
+
snapshot: parsedSnapshot,
|
|
675
|
+
resourceId: row.resourceId as string,
|
|
676
|
+
createdAt: new Date(row.created_at as string),
|
|
677
|
+
updatedAt: new Date(row.updated_at as string),
|
|
678
|
+
};
|
|
621
679
|
}
|
|
622
680
|
}
|
|
623
681
|
|