@donkeylabs/server 0.6.4 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,255 @@
1
+ /**
2
+ * Kysely Workflow Adapter
3
+ *
4
+ * Implements the WorkflowAdapter interface using Kysely for the shared app database.
5
+ * This provides persistence for workflows, which previously had NO persistence (in-memory only).
6
+ */
7
+
8
+ import type { Kysely } from "kysely";
9
+ import type { WorkflowAdapter, WorkflowInstance, WorkflowStatus, StepResult } from "./workflows";
10
+
11
+ export interface KyselyWorkflowAdapterConfig {
12
+ /** Auto-cleanup completed workflows older than N days (default: 30, 0 to disable) */
13
+ cleanupDays?: number;
14
+ /** Cleanup interval in ms (default: 3600000 = 1 hour) */
15
+ cleanupInterval?: number;
16
+ }
17
+
18
+ // Table type for Kysely
19
+ interface WorkflowInstancesTable {
20
+ id: string;
21
+ workflow_name: string;
22
+ status: string;
23
+ current_step: string | null;
24
+ input: string;
25
+ output: string | null;
26
+ error: string | null;
27
+ step_results: string;
28
+ branch_instances: string | null;
29
+ created_at: string;
30
+ started_at: string | null;
31
+ completed_at: string | null;
32
+ parent_id: string | null;
33
+ branch_name: string | null;
34
+ }
35
+
36
+ interface Database {
37
+ __donkeylabs_workflow_instances__: WorkflowInstancesTable;
38
+ }
39
+
40
+ export class KyselyWorkflowAdapter implements WorkflowAdapter {
41
+ private db: Kysely<Database>;
42
+ private cleanupTimer?: ReturnType<typeof setInterval>;
43
+ private cleanupDays: number;
44
+
45
+ constructor(db: Kysely<any>, config: KyselyWorkflowAdapterConfig = {}) {
46
+ this.db = db as Kysely<Database>;
47
+ this.cleanupDays = config.cleanupDays ?? 30;
48
+
49
+ // Start cleanup timer
50
+ if (this.cleanupDays > 0) {
51
+ const interval = config.cleanupInterval ?? 3600000; // 1 hour
52
+ this.cleanupTimer = setInterval(() => this.cleanup(), interval);
53
+ // Run cleanup on startup
54
+ this.cleanup();
55
+ }
56
+ }
57
+
58
+ async createInstance(instance: Omit<WorkflowInstance, "id">): Promise<WorkflowInstance> {
59
+ const id = `wf_${Date.now()}_${Math.random().toString(36).slice(2, 9)}`;
60
+
61
+ await this.db
62
+ .insertInto("__donkeylabs_workflow_instances__")
63
+ .values({
64
+ id,
65
+ workflow_name: instance.workflowName,
66
+ status: instance.status,
67
+ current_step: instance.currentStep ?? null,
68
+ input: JSON.stringify(instance.input),
69
+ output: instance.output !== undefined ? JSON.stringify(instance.output) : null,
70
+ error: instance.error ?? null,
71
+ step_results: JSON.stringify(instance.stepResults),
72
+ branch_instances: instance.branchInstances
73
+ ? JSON.stringify(instance.branchInstances)
74
+ : null,
75
+ created_at: instance.createdAt.toISOString(),
76
+ started_at: instance.startedAt?.toISOString() ?? null,
77
+ completed_at: instance.completedAt?.toISOString() ?? null,
78
+ parent_id: instance.parentId ?? null,
79
+ branch_name: instance.branchName ?? null,
80
+ })
81
+ .execute();
82
+
83
+ return { ...instance, id };
84
+ }
85
+
86
+ async getInstance(instanceId: string): Promise<WorkflowInstance | null> {
87
+ const row = await this.db
88
+ .selectFrom("__donkeylabs_workflow_instances__")
89
+ .selectAll()
90
+ .where("id", "=", instanceId)
91
+ .executeTakeFirst();
92
+
93
+ if (!row) return null;
94
+ return this.rowToInstance(row);
95
+ }
96
+
97
+ async updateInstance(instanceId: string, updates: Partial<WorkflowInstance>): Promise<void> {
98
+ const updateData: Partial<WorkflowInstancesTable> = {};
99
+
100
+ if (updates.status !== undefined) {
101
+ updateData.status = updates.status;
102
+ }
103
+ if (updates.currentStep !== undefined) {
104
+ updateData.current_step = updates.currentStep ?? null;
105
+ }
106
+ if (updates.output !== undefined) {
107
+ updateData.output = JSON.stringify(updates.output);
108
+ }
109
+ if (updates.error !== undefined) {
110
+ updateData.error = updates.error;
111
+ }
112
+ if (updates.stepResults !== undefined) {
113
+ updateData.step_results = JSON.stringify(updates.stepResults);
114
+ }
115
+ if (updates.branchInstances !== undefined) {
116
+ updateData.branch_instances = updates.branchInstances
117
+ ? JSON.stringify(updates.branchInstances)
118
+ : null;
119
+ }
120
+ if (updates.startedAt !== undefined) {
121
+ updateData.started_at = updates.startedAt?.toISOString() ?? null;
122
+ }
123
+ if (updates.completedAt !== undefined) {
124
+ updateData.completed_at = updates.completedAt?.toISOString() ?? null;
125
+ }
126
+
127
+ if (Object.keys(updateData).length === 0) return;
128
+
129
+ await this.db
130
+ .updateTable("__donkeylabs_workflow_instances__")
131
+ .set(updateData)
132
+ .where("id", "=", instanceId)
133
+ .execute();
134
+ }
135
+
136
+ async deleteInstance(instanceId: string): Promise<boolean> {
137
+ // Check if exists first since BunSqliteDialect doesn't report numDeletedRows properly
138
+ const exists = await this.db
139
+ .selectFrom("__donkeylabs_workflow_instances__")
140
+ .select("id")
141
+ .where("id", "=", instanceId)
142
+ .executeTakeFirst();
143
+
144
+ if (!exists) return false;
145
+
146
+ await this.db
147
+ .deleteFrom("__donkeylabs_workflow_instances__")
148
+ .where("id", "=", instanceId)
149
+ .execute();
150
+
151
+ return true;
152
+ }
153
+
154
+ async getInstancesByWorkflow(
155
+ workflowName: string,
156
+ status?: WorkflowStatus
157
+ ): Promise<WorkflowInstance[]> {
158
+ let query = this.db
159
+ .selectFrom("__donkeylabs_workflow_instances__")
160
+ .selectAll()
161
+ .where("workflow_name", "=", workflowName);
162
+
163
+ if (status) {
164
+ query = query.where("status", "=", status);
165
+ }
166
+
167
+ const rows = await query.orderBy("created_at", "desc").execute();
168
+ return rows.map((r) => this.rowToInstance(r));
169
+ }
170
+
171
+ async getRunningInstances(): Promise<WorkflowInstance[]> {
172
+ const rows = await this.db
173
+ .selectFrom("__donkeylabs_workflow_instances__")
174
+ .selectAll()
175
+ .where("status", "=", "running")
176
+ .execute();
177
+
178
+ return rows.map((r) => this.rowToInstance(r));
179
+ }
180
+
181
+ private rowToInstance(row: WorkflowInstancesTable): WorkflowInstance {
182
+ // Parse step results with proper Date handling
183
+ const rawStepResults = JSON.parse(row.step_results);
184
+ const stepResults: Record<string, StepResult> = {};
185
+
186
+ for (const [key, value] of Object.entries(rawStepResults)) {
187
+ const sr = value as any;
188
+ stepResults[key] = {
189
+ stepName: sr.stepName,
190
+ status: sr.status,
191
+ input: sr.input,
192
+ output: sr.output,
193
+ error: sr.error,
194
+ startedAt: sr.startedAt ? new Date(sr.startedAt) : undefined,
195
+ completedAt: sr.completedAt ? new Date(sr.completedAt) : undefined,
196
+ attempts: sr.attempts,
197
+ };
198
+ }
199
+
200
+ return {
201
+ id: row.id,
202
+ workflowName: row.workflow_name,
203
+ status: row.status as WorkflowStatus,
204
+ currentStep: row.current_step ?? undefined,
205
+ input: JSON.parse(row.input),
206
+ output: row.output ? JSON.parse(row.output) : undefined,
207
+ error: row.error ?? undefined,
208
+ stepResults,
209
+ branchInstances: row.branch_instances ? JSON.parse(row.branch_instances) : undefined,
210
+ createdAt: new Date(row.created_at),
211
+ startedAt: row.started_at ? new Date(row.started_at) : undefined,
212
+ completedAt: row.completed_at ? new Date(row.completed_at) : undefined,
213
+ parentId: row.parent_id ?? undefined,
214
+ branchName: row.branch_name ?? undefined,
215
+ };
216
+ }
217
+
218
+ /** Clean up old completed/failed/cancelled workflows */
219
+ private async cleanup(): Promise<void> {
220
+ if (this.cleanupDays <= 0) return;
221
+
222
+ try {
223
+ const cutoff = new Date();
224
+ cutoff.setDate(cutoff.getDate() - this.cleanupDays);
225
+
226
+ const result = await this.db
227
+ .deleteFrom("__donkeylabs_workflow_instances__")
228
+ .where((eb) =>
229
+ eb.or([
230
+ eb("status", "=", "completed"),
231
+ eb("status", "=", "failed"),
232
+ eb("status", "=", "cancelled"),
233
+ eb("status", "=", "timed_out"),
234
+ ])
235
+ )
236
+ .where("completed_at", "<", cutoff.toISOString())
237
+ .execute();
238
+
239
+ const numDeleted = Number(result[0]?.numDeletedRows ?? 0);
240
+ if (numDeleted > 0) {
241
+ console.log(`[Workflows] Cleaned up ${numDeleted} old workflow instances`);
242
+ }
243
+ } catch (err) {
244
+ console.error("[Workflows] Cleanup error:", err);
245
+ }
246
+ }
247
+
248
+ /** Stop the adapter and cleanup timer */
249
+ stop(): void {
250
+ if (this.cleanupTimer) {
251
+ clearInterval(this.cleanupTimer);
252
+ this.cleanupTimer = undefined;
253
+ }
254
+ }
255
+ }
package/src/core.ts CHANGED
@@ -1,6 +1,8 @@
1
1
  import { sql, type Kysely } from "kysely";
2
+ import { existsSync } from "node:fs";
2
3
  import { readdir } from "node:fs/promises";
3
- import { join } from "node:path";
4
+ import { join, dirname } from "node:path";
5
+ import { fileURLToPath } from "node:url";
4
6
  import type { z } from "zod";
5
7
  import type { Logger } from "./core/logger";
6
8
  import type { Cache } from "./core/cache";
@@ -12,6 +14,8 @@ import type { RateLimiter } from "./core/rate-limiter";
12
14
  import type { Errors, CustomErrorRegistry } from "./core/errors";
13
15
  import type { Workflows } from "./core/workflows";
14
16
  import type { Processes } from "./core/processes";
17
+ import type { Audit } from "./core/audit";
18
+ import type { WebSocketService } from "./core/websocket";
15
19
 
16
20
  export interface PluginRegistry {}
17
21
 
@@ -56,6 +60,8 @@ export interface CoreServices {
56
60
  errors: Errors;
57
61
  workflows: Workflows;
58
62
  processes: Processes;
63
+ audit: Audit;
64
+ websocket: WebSocketService;
59
65
  }
60
66
 
61
67
  /**
@@ -398,12 +404,103 @@ export class PluginManager {
398
404
  `.execute(this.core.db);
399
405
  }
400
406
 
407
+ /**
408
+ * Get the directory where core migrations are stored.
409
+ * This handles both development (src) and production (dist) scenarios.
410
+ */
411
+ private getCoreMigrationsDir(): string {
412
+ // Try to find migrations relative to this file's location
413
+ const currentDir = dirname(fileURLToPath(import.meta.url));
414
+ const migrationsDir = join(currentDir, "core", "migrations");
415
+ return migrationsDir;
416
+ }
417
+
418
+ /**
419
+ * Run migrations for a specific plugin/service with the given migrations directory.
420
+ */
421
+ private async runMigrationsForPlugin(pluginName: string, migrationDir: string): Promise<void> {
422
+ try {
423
+ const files = await readdir(migrationDir);
424
+ const migrationFiles = files.filter((f) => f.endsWith(".ts") || f.endsWith(".js"));
425
+
426
+ if (migrationFiles.length === 0) return;
427
+
428
+ console.log(`[Migration] checking: ${pluginName} at ${migrationDir}`);
429
+
430
+ for (const file of migrationFiles.sort()) {
431
+ // Check if this migration has already been applied
432
+ const isApplied = await this.isMigrationApplied(pluginName, file);
433
+ if (isApplied) {
434
+ console.log(` - Skipping (already applied): ${file}`);
435
+ continue;
436
+ }
437
+
438
+ console.log(` - Executing migration: ${file}`);
439
+ const migrationPath = join(migrationDir, file);
440
+
441
+ let migration;
442
+ try {
443
+ migration = await import(migrationPath);
444
+ } catch (importError) {
445
+ const err = importError instanceof Error ? importError : new Error(String(importError));
446
+ throw new Error(`Failed to import migration ${file}: ${err.message}`);
447
+ }
448
+
449
+ if (migration.up) {
450
+ try {
451
+ await migration.up(this.core.db);
452
+ // Record successful migration
453
+ await this.recordMigration(pluginName, file);
454
+ console.log(` Success`);
455
+ } catch (e) {
456
+ console.error(` Failed to run ${file}:`, e);
457
+ throw e; // Stop on migration failure - don't continue with inconsistent state
458
+ }
459
+ }
460
+ }
461
+ } catch (e) {
462
+ // Re-throw migration execution errors (they've already been logged)
463
+ // Only silently catch directory read errors (ENOENT)
464
+ const isDirectoryError =
465
+ e instanceof Error &&
466
+ ((e as NodeJS.ErrnoException).code === "ENOENT" ||
467
+ (e as NodeJS.ErrnoException).code === "ENOTDIR");
468
+ if (!isDirectoryError) {
469
+ throw e;
470
+ }
471
+ }
472
+ }
473
+
474
+ /**
475
+ * Run core migrations for built-in services (jobs, processes, workflows, audit).
476
+ * Core migrations are tracked with @core/ prefix in the same migrations table.
477
+ * This runs BEFORE plugin migrations to ensure core tables exist.
478
+ */
479
+ async migrateCore(): Promise<void> {
480
+ console.log("Running core migrations...");
481
+
482
+ const coreMigrationsDir = this.getCoreMigrationsDir();
483
+ const coreServices = ["jobs", "processes", "workflows", "audit"];
484
+
485
+ for (const service of coreServices) {
486
+ const migrationDir = join(coreMigrationsDir, service);
487
+ if (existsSync(migrationDir)) {
488
+ await this.runMigrationsForPlugin(`@core/${service}`, migrationDir);
489
+ }
490
+ }
491
+
492
+ console.log("Core migrations complete.");
493
+ }
494
+
401
495
  async migrate(): Promise<void> {
402
496
  console.log("Running migrations (File-System Based)...");
403
497
 
404
498
  // Ensure the migrations tracking table exists
405
499
  await this.ensureMigrationsTable();
406
500
 
501
+ // Run core migrations FIRST (before plugin migrations)
502
+ await this.migrateCore();
503
+
407
504
  const sortedPlugins = this.resolveOrder();
408
505
 
409
506
  for (const plugin of sortedPlugins) {
package/src/harness.ts CHANGED
@@ -13,6 +13,11 @@ import {
13
13
  createErrors,
14
14
  createWorkflows,
15
15
  createProcesses,
16
+ createAudit,
17
+ createWebSocket,
18
+ KyselyJobAdapter,
19
+ KyselyWorkflowAdapter,
20
+ MemoryAuditAdapter,
16
21
  } from "./core/index";
17
22
 
18
23
  /**
@@ -27,18 +32,38 @@ export async function createTestHarness(targetPlugin: Plugin, dependencies: Plug
27
32
  dialect: new BunSqliteDialect({ database: new Database(":memory:") }),
28
33
  });
29
34
 
30
- // 2. Initialize Core Services
35
+ // 2. Initialize Core Services with Kysely adapters for in-memory testing
31
36
  const logger = createLogger({ level: "warn" }); // Less verbose in tests
32
37
  const cache = createCache();
33
38
  const events = createEvents();
34
39
  const cron = createCron();
35
- const jobs = createJobs({ events });
36
40
  const sse = createSSE();
37
41
  const rateLimiter = createRateLimiter();
38
42
  const errors = createErrors();
39
- const workflows = createWorkflows({ events, jobs, sse });
43
+
44
+ // Use Kysely adapters with in-memory DB for jobs and workflows
45
+ const jobAdapter = new KyselyJobAdapter(db, { cleanupDays: 0 }); // No cleanup in tests
46
+ const workflowAdapter = new KyselyWorkflowAdapter(db, { cleanupDays: 0 });
47
+
48
+ const jobs = createJobs({
49
+ events,
50
+ adapter: jobAdapter,
51
+ persist: false, // Using Kysely adapter
52
+ });
53
+
54
+ const workflows = createWorkflows({
55
+ events,
56
+ jobs,
57
+ sse,
58
+ adapter: workflowAdapter,
59
+ });
60
+
40
61
  const processes = createProcesses({ events, autoRecoverOrphans: false });
41
62
 
63
+ // Use in-memory adapter for audit in tests
64
+ const audit = createAudit({ adapter: new MemoryAuditAdapter() });
65
+ const websocket = createWebSocket();
66
+
42
67
  const core: CoreServices = {
43
68
  db,
44
69
  config: { env: "test" },
@@ -52,6 +77,8 @@ export async function createTestHarness(targetPlugin: Plugin, dependencies: Plug
52
77
  errors,
53
78
  workflows,
54
79
  processes,
80
+ audit,
81
+ websocket,
55
82
  };
56
83
 
57
84
  const manager = new PluginManager(core);
@@ -62,7 +89,7 @@ export async function createTestHarness(targetPlugin: Plugin, dependencies: Plug
62
89
  }
63
90
  manager.register(targetPlugin);
64
91
 
65
- // 4. Run Migrations (Real Kysely Migrations!)
92
+ // 4. Run Migrations (Core + Plugin Migrations!)
66
93
  await manager.migrate();
67
94
 
68
95
  // 5. Init Plugins
package/src/server.ts CHANGED
@@ -16,8 +16,14 @@ import {
16
16
  createErrors,
17
17
  createWorkflows,
18
18
  createProcesses,
19
+ createAudit,
20
+ createWebSocket,
19
21
  extractClientIP,
20
22
  HttpError,
23
+ KyselyJobAdapter,
24
+ KyselyProcessAdapter,
25
+ KyselyWorkflowAdapter,
26
+ KyselyAuditAdapter,
21
27
  type LoggerConfig,
22
28
  type CacheConfig,
23
29
  type EventsConfig,
@@ -28,6 +34,8 @@ import {
28
34
  type ErrorsConfig,
29
35
  type WorkflowsConfig,
30
36
  type ProcessesConfig,
37
+ type AuditConfig,
38
+ type WebSocketConfig,
31
39
  } from "./core/index";
32
40
  import { zodSchemaToTs } from "./generator/zod-to-ts";
33
41
 
@@ -63,6 +71,14 @@ export interface ServerConfig {
63
71
  errors?: ErrorsConfig;
64
72
  workflows?: WorkflowsConfig;
65
73
  processes?: ProcessesConfig;
74
+ audit?: AuditConfig;
75
+ websocket?: WebSocketConfig;
76
+ /**
77
+ * Use legacy separate databases for core services.
78
+ * Set to true to keep using .donkeylabs/*.db files instead of shared DB.
79
+ * @deprecated Migrate to shared DB for better consistency.
80
+ */
81
+ useLegacyCoreDatabases?: boolean;
66
82
  }
67
83
 
68
84
  export class AppServer {
@@ -76,26 +92,56 @@ export class AppServer {
76
92
  constructor(options: ServerConfig) {
77
93
  this.port = options.port ?? 3000;
78
94
 
95
+ // Determine if we should use legacy databases
96
+ const useLegacy = options.useLegacyCoreDatabases ?? false;
97
+
79
98
  // Initialize core services
80
99
  const logger = createLogger(options.logger);
81
100
  const cache = createCache(options.cache);
82
101
  const events = createEvents(options.events);
83
102
  const cron = createCron(options.cron);
84
- const jobs = createJobs({ ...options.jobs, events }); // Jobs can emit events
85
103
  const sse = createSSE(options.sse);
86
104
  const rateLimiter = createRateLimiter(options.rateLimiter);
87
105
  const errors = createErrors(options.errors);
106
+
107
+ // Create adapters - use Kysely by default, or legacy SQLite if requested
108
+ const jobAdapter = options.jobs?.adapter ?? (useLegacy ? undefined : new KyselyJobAdapter(options.db));
109
+ const workflowAdapter = options.workflows?.adapter ?? (useLegacy ? undefined : new KyselyWorkflowAdapter(options.db));
110
+ const auditAdapter = options.audit?.adapter ?? new KyselyAuditAdapter(options.db);
111
+
112
+ // Jobs can emit events and use Kysely adapter
113
+ const jobs = createJobs({
114
+ ...options.jobs,
115
+ events,
116
+ adapter: jobAdapter,
117
+ // Disable built-in persistence when using Kysely adapter
118
+ persist: useLegacy ? options.jobs?.persist : false,
119
+ });
120
+
121
+ // Workflows with Kysely adapter for persistence
88
122
  const workflows = createWorkflows({
89
123
  ...options.workflows,
90
124
  events,
91
125
  jobs,
92
126
  sse,
127
+ adapter: workflowAdapter,
93
128
  });
129
+
130
+ // Processes - still uses its own adapter pattern but can use Kysely
131
+ // Note: ProcessesImpl creates its own SqliteProcessAdapter internally
132
+ // For full Kysely support, we need to modify processes.ts
94
133
  const processes = createProcesses({
95
134
  ...options.processes,
96
135
  events,
97
136
  });
98
137
 
138
+ // New services
139
+ const audit = createAudit({
140
+ ...options.audit,
141
+ adapter: auditAdapter,
142
+ });
143
+ const websocket = createWebSocket(options.websocket);
144
+
99
145
  this.coreServices = {
100
146
  db: options.db,
101
147
  config: options.config ?? {},
@@ -109,6 +155,8 @@ export class AppServer {
109
155
  errors,
110
156
  workflows,
111
157
  processes,
158
+ audit,
159
+ websocket,
112
160
  };
113
161
 
114
162
  this.manager = new PluginManager(this.coreServices);
@@ -844,12 +892,18 @@ ${factoryFunction}
844
892
  // Stop SSE (closes all client connections)
845
893
  this.coreServices.sse.shutdown();
846
894
 
895
+ // Stop WebSocket connections
896
+ this.coreServices.websocket.shutdown();
897
+
847
898
  // Stop background services
848
899
  await this.coreServices.processes.shutdown();
849
900
  await this.coreServices.workflows.stop();
850
901
  await this.coreServices.jobs.stop();
851
902
  await this.coreServices.cron.stop();
852
903
 
904
+ // Stop audit service (cleanup timers)
905
+ this.coreServices.audit.stop();
906
+
853
907
  logger.info("Server shutdown complete");
854
908
  }
855
909
  }