@donkeylabs/server 0.6.3 → 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/package.json +1 -1
- package/src/core/audit.ts +370 -0
- package/src/core/index.ts +37 -0
- package/src/core/job-adapter-kysely.ts +292 -0
- package/src/core/migrations/audit/001_create_audit_log_table.ts +56 -0
- package/src/core/migrations/jobs/001_create_jobs_table.ts +65 -0
- package/src/core/migrations/processes/001_create_processes_table.ts +55 -0
- package/src/core/migrations/workflows/001_create_workflow_instances_table.ts +55 -0
- package/src/core/process-adapter-kysely.ts +260 -0
- package/src/core/websocket.ts +352 -0
- package/src/core/workflow-adapter-kysely.ts +255 -0
- package/src/core.ts +98 -1
- package/src/harness.ts +31 -4
- package/src/server.ts +55 -1
|
@@ -0,0 +1,292 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Kysely Job Adapter
|
|
3
|
+
*
|
|
4
|
+
* Implements the JobAdapter interface using Kysely for the shared app database.
|
|
5
|
+
* This replaces the standalone SqliteJobAdapter that used a separate .donkeylabs/jobs.db file.
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type { Kysely } from "kysely";
|
|
9
|
+
import type { Job, JobAdapter, JobStatus } from "./jobs";
|
|
10
|
+
import type { ExternalJobProcessState } from "./external-jobs";
|
|
11
|
+
|
|
12
|
+
export interface KyselyJobAdapterConfig {
|
|
13
|
+
/** Auto-cleanup completed jobs older than N days (default: 7, 0 to disable) */
|
|
14
|
+
cleanupDays?: number;
|
|
15
|
+
/** Cleanup interval in ms (default: 3600000 = 1 hour) */
|
|
16
|
+
cleanupInterval?: number;
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
// Table type for Kysely
|
|
20
|
+
interface JobsTable {
|
|
21
|
+
id: string;
|
|
22
|
+
name: string;
|
|
23
|
+
data: string;
|
|
24
|
+
status: string;
|
|
25
|
+
created_at: string;
|
|
26
|
+
run_at: string | null;
|
|
27
|
+
started_at: string | null;
|
|
28
|
+
completed_at: string | null;
|
|
29
|
+
result: string | null;
|
|
30
|
+
error: string | null;
|
|
31
|
+
attempts: number;
|
|
32
|
+
max_attempts: number;
|
|
33
|
+
external: number;
|
|
34
|
+
pid: number | null;
|
|
35
|
+
socket_path: string | null;
|
|
36
|
+
tcp_port: number | null;
|
|
37
|
+
last_heartbeat: string | null;
|
|
38
|
+
process_state: string | null;
|
|
39
|
+
}
|
|
40
|
+
|
|
41
|
+
interface Database {
|
|
42
|
+
__donkeylabs_jobs__: JobsTable;
|
|
43
|
+
}
|
|
44
|
+
|
|
45
|
+
export class KyselyJobAdapter implements JobAdapter {
|
|
46
|
+
private db: Kysely<Database>;
|
|
47
|
+
private cleanupTimer?: ReturnType<typeof setInterval>;
|
|
48
|
+
private cleanupDays: number;
|
|
49
|
+
|
|
50
|
+
constructor(db: Kysely<any>, config: KyselyJobAdapterConfig = {}) {
|
|
51
|
+
this.db = db as Kysely<Database>;
|
|
52
|
+
this.cleanupDays = config.cleanupDays ?? 7;
|
|
53
|
+
|
|
54
|
+
// Start cleanup timer
|
|
55
|
+
if (this.cleanupDays > 0) {
|
|
56
|
+
const interval = config.cleanupInterval ?? 3600000; // 1 hour
|
|
57
|
+
this.cleanupTimer = setInterval(() => this.cleanup(), interval);
|
|
58
|
+
// Run cleanup on startup
|
|
59
|
+
this.cleanup();
|
|
60
|
+
}
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
async create(job: Omit<Job, "id">): Promise<Job> {
|
|
64
|
+
const id = `job_${Date.now()}_${Math.random().toString(36).slice(2, 9)}`;
|
|
65
|
+
|
|
66
|
+
await this.db
|
|
67
|
+
.insertInto("__donkeylabs_jobs__")
|
|
68
|
+
.values({
|
|
69
|
+
id,
|
|
70
|
+
name: job.name,
|
|
71
|
+
data: JSON.stringify(job.data),
|
|
72
|
+
status: job.status,
|
|
73
|
+
created_at: job.createdAt.toISOString(),
|
|
74
|
+
run_at: job.runAt?.toISOString() ?? null,
|
|
75
|
+
started_at: null,
|
|
76
|
+
completed_at: null,
|
|
77
|
+
result: null,
|
|
78
|
+
error: null,
|
|
79
|
+
attempts: job.attempts,
|
|
80
|
+
max_attempts: job.maxAttempts,
|
|
81
|
+
external: job.external ? 1 : 0,
|
|
82
|
+
pid: null,
|
|
83
|
+
socket_path: null,
|
|
84
|
+
tcp_port: null,
|
|
85
|
+
last_heartbeat: null,
|
|
86
|
+
process_state: job.processState ?? null,
|
|
87
|
+
})
|
|
88
|
+
.execute();
|
|
89
|
+
|
|
90
|
+
return { ...job, id };
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
async get(jobId: string): Promise<Job | null> {
|
|
94
|
+
const row = await this.db
|
|
95
|
+
.selectFrom("__donkeylabs_jobs__")
|
|
96
|
+
.selectAll()
|
|
97
|
+
.where("id", "=", jobId)
|
|
98
|
+
.executeTakeFirst();
|
|
99
|
+
|
|
100
|
+
if (!row) return null;
|
|
101
|
+
return this.rowToJob(row);
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
async update(jobId: string, updates: Partial<Job>): Promise<void> {
|
|
105
|
+
const updateData: Partial<JobsTable> = {};
|
|
106
|
+
|
|
107
|
+
if (updates.status !== undefined) {
|
|
108
|
+
updateData.status = updates.status;
|
|
109
|
+
}
|
|
110
|
+
if (updates.startedAt !== undefined) {
|
|
111
|
+
updateData.started_at = updates.startedAt?.toISOString() ?? null;
|
|
112
|
+
}
|
|
113
|
+
if (updates.completedAt !== undefined) {
|
|
114
|
+
updateData.completed_at = updates.completedAt?.toISOString() ?? null;
|
|
115
|
+
}
|
|
116
|
+
if (updates.result !== undefined) {
|
|
117
|
+
updateData.result = JSON.stringify(updates.result);
|
|
118
|
+
}
|
|
119
|
+
if (updates.error !== undefined) {
|
|
120
|
+
updateData.error = updates.error;
|
|
121
|
+
}
|
|
122
|
+
if (updates.attempts !== undefined) {
|
|
123
|
+
updateData.attempts = updates.attempts;
|
|
124
|
+
}
|
|
125
|
+
// External job fields
|
|
126
|
+
if (updates.pid !== undefined) {
|
|
127
|
+
updateData.pid = updates.pid;
|
|
128
|
+
}
|
|
129
|
+
if (updates.socketPath !== undefined) {
|
|
130
|
+
updateData.socket_path = updates.socketPath;
|
|
131
|
+
}
|
|
132
|
+
if (updates.tcpPort !== undefined) {
|
|
133
|
+
updateData.tcp_port = updates.tcpPort;
|
|
134
|
+
}
|
|
135
|
+
if (updates.lastHeartbeat !== undefined) {
|
|
136
|
+
updateData.last_heartbeat = updates.lastHeartbeat?.toISOString() ?? null;
|
|
137
|
+
}
|
|
138
|
+
if (updates.processState !== undefined) {
|
|
139
|
+
updateData.process_state = updates.processState;
|
|
140
|
+
}
|
|
141
|
+
|
|
142
|
+
if (Object.keys(updateData).length === 0) return;
|
|
143
|
+
|
|
144
|
+
await this.db
|
|
145
|
+
.updateTable("__donkeylabs_jobs__")
|
|
146
|
+
.set(updateData)
|
|
147
|
+
.where("id", "=", jobId)
|
|
148
|
+
.execute();
|
|
149
|
+
}
|
|
150
|
+
|
|
151
|
+
async delete(jobId: string): Promise<boolean> {
|
|
152
|
+
// Check if exists first since BunSqliteDialect doesn't report numDeletedRows properly
|
|
153
|
+
const exists = await this.db
|
|
154
|
+
.selectFrom("__donkeylabs_jobs__")
|
|
155
|
+
.select("id")
|
|
156
|
+
.where("id", "=", jobId)
|
|
157
|
+
.executeTakeFirst();
|
|
158
|
+
|
|
159
|
+
if (!exists) return false;
|
|
160
|
+
|
|
161
|
+
await this.db
|
|
162
|
+
.deleteFrom("__donkeylabs_jobs__")
|
|
163
|
+
.where("id", "=", jobId)
|
|
164
|
+
.execute();
|
|
165
|
+
|
|
166
|
+
return true;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
async getPending(limit: number = 100): Promise<Job[]> {
|
|
170
|
+
const rows = await this.db
|
|
171
|
+
.selectFrom("__donkeylabs_jobs__")
|
|
172
|
+
.selectAll()
|
|
173
|
+
.where("status", "=", "pending")
|
|
174
|
+
.orderBy("created_at", "asc")
|
|
175
|
+
.limit(limit)
|
|
176
|
+
.execute();
|
|
177
|
+
|
|
178
|
+
return rows.map((r) => this.rowToJob(r));
|
|
179
|
+
}
|
|
180
|
+
|
|
181
|
+
async getScheduledReady(now: Date): Promise<Job[]> {
|
|
182
|
+
const rows = await this.db
|
|
183
|
+
.selectFrom("__donkeylabs_jobs__")
|
|
184
|
+
.selectAll()
|
|
185
|
+
.where("status", "=", "scheduled")
|
|
186
|
+
.where("run_at", "<=", now.toISOString())
|
|
187
|
+
.orderBy("run_at", "asc")
|
|
188
|
+
.execute();
|
|
189
|
+
|
|
190
|
+
return rows.map((r) => this.rowToJob(r));
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
async getByName(name: string, status?: JobStatus): Promise<Job[]> {
|
|
194
|
+
let query = this.db
|
|
195
|
+
.selectFrom("__donkeylabs_jobs__")
|
|
196
|
+
.selectAll()
|
|
197
|
+
.where("name", "=", name);
|
|
198
|
+
|
|
199
|
+
if (status) {
|
|
200
|
+
query = query.where("status", "=", status);
|
|
201
|
+
}
|
|
202
|
+
|
|
203
|
+
const rows = await query.orderBy("created_at", "desc").execute();
|
|
204
|
+
return rows.map((r) => this.rowToJob(r));
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
async getRunningExternal(): Promise<Job[]> {
|
|
208
|
+
const rows = await this.db
|
|
209
|
+
.selectFrom("__donkeylabs_jobs__")
|
|
210
|
+
.selectAll()
|
|
211
|
+
.where("external", "=", 1)
|
|
212
|
+
.where("status", "=", "running")
|
|
213
|
+
.execute();
|
|
214
|
+
|
|
215
|
+
return rows.map((r) => this.rowToJob(r));
|
|
216
|
+
}
|
|
217
|
+
|
|
218
|
+
async getOrphanedExternal(): Promise<Job[]> {
|
|
219
|
+
const rows = await this.db
|
|
220
|
+
.selectFrom("__donkeylabs_jobs__")
|
|
221
|
+
.selectAll()
|
|
222
|
+
.where("external", "=", 1)
|
|
223
|
+
.where("status", "=", "running")
|
|
224
|
+
.where((eb) =>
|
|
225
|
+
eb.or([
|
|
226
|
+
eb("process_state", "=", "running"),
|
|
227
|
+
eb("process_state", "=", "orphaned"),
|
|
228
|
+
eb("process_state", "=", "spawning"),
|
|
229
|
+
])
|
|
230
|
+
)
|
|
231
|
+
.execute();
|
|
232
|
+
|
|
233
|
+
return rows.map((r) => this.rowToJob(r));
|
|
234
|
+
}
|
|
235
|
+
|
|
236
|
+
private rowToJob(row: JobsTable): Job {
|
|
237
|
+
return {
|
|
238
|
+
id: row.id,
|
|
239
|
+
name: row.name,
|
|
240
|
+
data: JSON.parse(row.data),
|
|
241
|
+
status: row.status as JobStatus,
|
|
242
|
+
createdAt: new Date(row.created_at),
|
|
243
|
+
runAt: row.run_at ? new Date(row.run_at) : undefined,
|
|
244
|
+
startedAt: row.started_at ? new Date(row.started_at) : undefined,
|
|
245
|
+
completedAt: row.completed_at ? new Date(row.completed_at) : undefined,
|
|
246
|
+
result: row.result ? JSON.parse(row.result) : undefined,
|
|
247
|
+
error: row.error ?? undefined,
|
|
248
|
+
attempts: row.attempts,
|
|
249
|
+
maxAttempts: row.max_attempts,
|
|
250
|
+
// External job fields
|
|
251
|
+
external: row.external === 1 ? true : undefined,
|
|
252
|
+
pid: row.pid ?? undefined,
|
|
253
|
+
socketPath: row.socket_path ?? undefined,
|
|
254
|
+
tcpPort: row.tcp_port ?? undefined,
|
|
255
|
+
lastHeartbeat: row.last_heartbeat ? new Date(row.last_heartbeat) : undefined,
|
|
256
|
+
processState: row.process_state as ExternalJobProcessState | undefined,
|
|
257
|
+
};
|
|
258
|
+
}
|
|
259
|
+
|
|
260
|
+
/** Clean up old completed/failed jobs */
|
|
261
|
+
private async cleanup(): Promise<void> {
|
|
262
|
+
if (this.cleanupDays <= 0) return;
|
|
263
|
+
|
|
264
|
+
try {
|
|
265
|
+
const cutoff = new Date();
|
|
266
|
+
cutoff.setDate(cutoff.getDate() - this.cleanupDays);
|
|
267
|
+
|
|
268
|
+
const result = await this.db
|
|
269
|
+
.deleteFrom("__donkeylabs_jobs__")
|
|
270
|
+
.where((eb) =>
|
|
271
|
+
eb.or([eb("status", "=", "completed"), eb("status", "=", "failed")])
|
|
272
|
+
)
|
|
273
|
+
.where("completed_at", "<", cutoff.toISOString())
|
|
274
|
+
.execute();
|
|
275
|
+
|
|
276
|
+
const numDeleted = Number(result[0]?.numDeletedRows ?? 0);
|
|
277
|
+
if (numDeleted > 0) {
|
|
278
|
+
console.log(`[Jobs] Cleaned up ${numDeleted} old jobs`);
|
|
279
|
+
}
|
|
280
|
+
} catch (err) {
|
|
281
|
+
console.error("[Jobs] Cleanup error:", err);
|
|
282
|
+
}
|
|
283
|
+
}
|
|
284
|
+
|
|
285
|
+
/** Stop the adapter and cleanup timer */
|
|
286
|
+
stop(): void {
|
|
287
|
+
if (this.cleanupTimer) {
|
|
288
|
+
clearInterval(this.cleanupTimer);
|
|
289
|
+
this.cleanupTimer = undefined;
|
|
290
|
+
}
|
|
291
|
+
}
|
|
292
|
+
}
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core Migration: Audit Log Table
|
|
3
|
+
*
|
|
4
|
+
* Creates the __donkeylabs_audit__ table for audit logging in the shared database.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import type { Kysely } from "kysely";
|
|
8
|
+
|
|
9
|
+
export async function up(db: Kysely<any>): Promise<void> {
|
|
10
|
+
await db.schema
|
|
11
|
+
.createTable("__donkeylabs_audit__")
|
|
12
|
+
.ifNotExists()
|
|
13
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
14
|
+
.addColumn("timestamp", "text", (col) => col.notNull())
|
|
15
|
+
.addColumn("action", "text", (col) => col.notNull())
|
|
16
|
+
.addColumn("actor", "text", (col) => col.notNull())
|
|
17
|
+
.addColumn("resource", "text", (col) => col.notNull())
|
|
18
|
+
.addColumn("resource_id", "text")
|
|
19
|
+
.addColumn("metadata", "text")
|
|
20
|
+
.addColumn("ip", "text")
|
|
21
|
+
.addColumn("request_id", "text")
|
|
22
|
+
.execute();
|
|
23
|
+
|
|
24
|
+
// Create indexes for efficient queries
|
|
25
|
+
await db.schema
|
|
26
|
+
.createIndex("idx_donkeylabs_audit_actor")
|
|
27
|
+
.ifNotExists()
|
|
28
|
+
.on("__donkeylabs_audit__")
|
|
29
|
+
.column("actor")
|
|
30
|
+
.execute();
|
|
31
|
+
|
|
32
|
+
await db.schema
|
|
33
|
+
.createIndex("idx_donkeylabs_audit_timestamp")
|
|
34
|
+
.ifNotExists()
|
|
35
|
+
.on("__donkeylabs_audit__")
|
|
36
|
+
.column("timestamp")
|
|
37
|
+
.execute();
|
|
38
|
+
|
|
39
|
+
await db.schema
|
|
40
|
+
.createIndex("idx_donkeylabs_audit_resource")
|
|
41
|
+
.ifNotExists()
|
|
42
|
+
.on("__donkeylabs_audit__")
|
|
43
|
+
.columns(["resource", "resource_id"])
|
|
44
|
+
.execute();
|
|
45
|
+
|
|
46
|
+
await db.schema
|
|
47
|
+
.createIndex("idx_donkeylabs_audit_action")
|
|
48
|
+
.ifNotExists()
|
|
49
|
+
.on("__donkeylabs_audit__")
|
|
50
|
+
.column("action")
|
|
51
|
+
.execute();
|
|
52
|
+
}
|
|
53
|
+
|
|
54
|
+
export async function down(db: Kysely<any>): Promise<void> {
|
|
55
|
+
await db.schema.dropTable("__donkeylabs_audit__").ifExists().execute();
|
|
56
|
+
}
|
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core Migration: Jobs Table
|
|
3
|
+
*
|
|
4
|
+
* Creates the __donkeylabs_jobs__ table for job persistence in the shared database.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import type { Kysely } from "kysely";
|
|
8
|
+
|
|
9
|
+
export async function up(db: Kysely<any>): Promise<void> {
|
|
10
|
+
await db.schema
|
|
11
|
+
.createTable("__donkeylabs_jobs__")
|
|
12
|
+
.ifNotExists()
|
|
13
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
14
|
+
.addColumn("name", "text", (col) => col.notNull())
|
|
15
|
+
.addColumn("data", "text", (col) => col.notNull())
|
|
16
|
+
.addColumn("status", "text", (col) => col.notNull().defaultTo("pending"))
|
|
17
|
+
.addColumn("created_at", "text", (col) => col.notNull())
|
|
18
|
+
.addColumn("run_at", "text")
|
|
19
|
+
.addColumn("started_at", "text")
|
|
20
|
+
.addColumn("completed_at", "text")
|
|
21
|
+
.addColumn("result", "text")
|
|
22
|
+
.addColumn("error", "text")
|
|
23
|
+
.addColumn("attempts", "integer", (col) => col.notNull().defaultTo(0))
|
|
24
|
+
.addColumn("max_attempts", "integer", (col) => col.notNull().defaultTo(3))
|
|
25
|
+
.addColumn("external", "integer", (col) => col.defaultTo(0))
|
|
26
|
+
.addColumn("pid", "integer")
|
|
27
|
+
.addColumn("socket_path", "text")
|
|
28
|
+
.addColumn("tcp_port", "integer")
|
|
29
|
+
.addColumn("last_heartbeat", "text")
|
|
30
|
+
.addColumn("process_state", "text")
|
|
31
|
+
.execute();
|
|
32
|
+
|
|
33
|
+
// Create indexes for efficient queries
|
|
34
|
+
await db.schema
|
|
35
|
+
.createIndex("idx_donkeylabs_jobs_status")
|
|
36
|
+
.ifNotExists()
|
|
37
|
+
.on("__donkeylabs_jobs__")
|
|
38
|
+
.column("status")
|
|
39
|
+
.execute();
|
|
40
|
+
|
|
41
|
+
await db.schema
|
|
42
|
+
.createIndex("idx_donkeylabs_jobs_name")
|
|
43
|
+
.ifNotExists()
|
|
44
|
+
.on("__donkeylabs_jobs__")
|
|
45
|
+
.column("name")
|
|
46
|
+
.execute();
|
|
47
|
+
|
|
48
|
+
await db.schema
|
|
49
|
+
.createIndex("idx_donkeylabs_jobs_external")
|
|
50
|
+
.ifNotExists()
|
|
51
|
+
.on("__donkeylabs_jobs__")
|
|
52
|
+
.columns(["external", "status"])
|
|
53
|
+
.execute();
|
|
54
|
+
|
|
55
|
+
await db.schema
|
|
56
|
+
.createIndex("idx_donkeylabs_jobs_scheduled")
|
|
57
|
+
.ifNotExists()
|
|
58
|
+
.on("__donkeylabs_jobs__")
|
|
59
|
+
.columns(["status", "run_at"])
|
|
60
|
+
.execute();
|
|
61
|
+
}
|
|
62
|
+
|
|
63
|
+
export async function down(db: Kysely<any>): Promise<void> {
|
|
64
|
+
await db.schema.dropTable("__donkeylabs_jobs__").ifExists().execute();
|
|
65
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core Migration: Processes Table
|
|
3
|
+
*
|
|
4
|
+
* Creates the __donkeylabs_processes__ table for process persistence in the shared database.
|
|
5
|
+
*/
|
|
6
|
+
|
|
7
|
+
import type { Kysely } from "kysely";
|
|
8
|
+
|
|
9
|
+
export async function up(db: Kysely<any>): Promise<void> {
|
|
10
|
+
await db.schema
|
|
11
|
+
.createTable("__donkeylabs_processes__")
|
|
12
|
+
.ifNotExists()
|
|
13
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
14
|
+
.addColumn("name", "text", (col) => col.notNull())
|
|
15
|
+
.addColumn("pid", "integer")
|
|
16
|
+
.addColumn("socket_path", "text")
|
|
17
|
+
.addColumn("tcp_port", "integer")
|
|
18
|
+
.addColumn("status", "text", (col) => col.notNull().defaultTo("stopped"))
|
|
19
|
+
.addColumn("config", "text", (col) => col.notNull())
|
|
20
|
+
.addColumn("metadata", "text")
|
|
21
|
+
.addColumn("created_at", "text", (col) => col.notNull())
|
|
22
|
+
.addColumn("started_at", "text")
|
|
23
|
+
.addColumn("stopped_at", "text")
|
|
24
|
+
.addColumn("last_heartbeat", "text")
|
|
25
|
+
.addColumn("restart_count", "integer", (col) => col.defaultTo(0))
|
|
26
|
+
.addColumn("consecutive_failures", "integer", (col) => col.defaultTo(0))
|
|
27
|
+
.addColumn("error", "text")
|
|
28
|
+
.execute();
|
|
29
|
+
|
|
30
|
+
// Create indexes for efficient queries
|
|
31
|
+
await db.schema
|
|
32
|
+
.createIndex("idx_donkeylabs_processes_status")
|
|
33
|
+
.ifNotExists()
|
|
34
|
+
.on("__donkeylabs_processes__")
|
|
35
|
+
.column("status")
|
|
36
|
+
.execute();
|
|
37
|
+
|
|
38
|
+
await db.schema
|
|
39
|
+
.createIndex("idx_donkeylabs_processes_name")
|
|
40
|
+
.ifNotExists()
|
|
41
|
+
.on("__donkeylabs_processes__")
|
|
42
|
+
.column("name")
|
|
43
|
+
.execute();
|
|
44
|
+
|
|
45
|
+
await db.schema
|
|
46
|
+
.createIndex("idx_donkeylabs_processes_name_status")
|
|
47
|
+
.ifNotExists()
|
|
48
|
+
.on("__donkeylabs_processes__")
|
|
49
|
+
.columns(["name", "status"])
|
|
50
|
+
.execute();
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export async function down(db: Kysely<any>): Promise<void> {
|
|
54
|
+
await db.schema.dropTable("__donkeylabs_processes__").ifExists().execute();
|
|
55
|
+
}
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
/**
|
|
2
|
+
* Core Migration: Workflow Instances Table
|
|
3
|
+
*
|
|
4
|
+
* Creates the __donkeylabs_workflow_instances__ table for workflow persistence in the shared database.
|
|
5
|
+
* This is a critical migration as workflows previously had NO persistence (in-memory only).
|
|
6
|
+
*/
|
|
7
|
+
|
|
8
|
+
import type { Kysely } from "kysely";
|
|
9
|
+
|
|
10
|
+
export async function up(db: Kysely<any>): Promise<void> {
|
|
11
|
+
await db.schema
|
|
12
|
+
.createTable("__donkeylabs_workflow_instances__")
|
|
13
|
+
.ifNotExists()
|
|
14
|
+
.addColumn("id", "text", (col) => col.primaryKey())
|
|
15
|
+
.addColumn("workflow_name", "text", (col) => col.notNull())
|
|
16
|
+
.addColumn("status", "text", (col) => col.notNull().defaultTo("pending"))
|
|
17
|
+
.addColumn("current_step", "text")
|
|
18
|
+
.addColumn("input", "text", (col) => col.notNull())
|
|
19
|
+
.addColumn("output", "text")
|
|
20
|
+
.addColumn("error", "text")
|
|
21
|
+
.addColumn("step_results", "text", (col) => col.notNull().defaultTo("{}"))
|
|
22
|
+
.addColumn("branch_instances", "text")
|
|
23
|
+
.addColumn("created_at", "text", (col) => col.notNull())
|
|
24
|
+
.addColumn("started_at", "text")
|
|
25
|
+
.addColumn("completed_at", "text")
|
|
26
|
+
.addColumn("parent_id", "text")
|
|
27
|
+
.addColumn("branch_name", "text")
|
|
28
|
+
.execute();
|
|
29
|
+
|
|
30
|
+
// Create indexes for efficient queries
|
|
31
|
+
await db.schema
|
|
32
|
+
.createIndex("idx_donkeylabs_workflows_name")
|
|
33
|
+
.ifNotExists()
|
|
34
|
+
.on("__donkeylabs_workflow_instances__")
|
|
35
|
+
.column("workflow_name")
|
|
36
|
+
.execute();
|
|
37
|
+
|
|
38
|
+
await db.schema
|
|
39
|
+
.createIndex("idx_donkeylabs_workflows_status")
|
|
40
|
+
.ifNotExists()
|
|
41
|
+
.on("__donkeylabs_workflow_instances__")
|
|
42
|
+
.column("status")
|
|
43
|
+
.execute();
|
|
44
|
+
|
|
45
|
+
await db.schema
|
|
46
|
+
.createIndex("idx_donkeylabs_workflows_parent")
|
|
47
|
+
.ifNotExists()
|
|
48
|
+
.on("__donkeylabs_workflow_instances__")
|
|
49
|
+
.column("parent_id")
|
|
50
|
+
.execute();
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
export async function down(db: Kysely<any>): Promise<void> {
|
|
54
|
+
await db.schema.dropTable("__donkeylabs_workflow_instances__").ifExists().execute();
|
|
55
|
+
}
|