@donkeylabs/server 0.5.0 → 0.5.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,287 @@
1
+ /**
2
+ * Built-in SQLite Job Adapter
3
+ *
4
+ * Provides automatic persistence for jobs, enabling server restart resilience
5
+ * for external jobs without requiring user configuration.
6
+ */
7
+
8
+ import { Database } from "bun:sqlite";
9
+ import { mkdir } from "node:fs/promises";
10
+ import { dirname } from "node:path";
11
+ import type { Job, JobAdapter, JobStatus } from "./jobs";
12
+ import type { ExternalJobProcessState } from "./external-jobs";
13
+
14
+ export interface SqliteJobAdapterConfig {
15
+ /** Path to SQLite database file (default: .donkeylabs/jobs.db) */
16
+ path?: string;
17
+ /** Auto-cleanup completed jobs older than N days (default: 7, 0 to disable) */
18
+ cleanupDays?: number;
19
+ /** Cleanup interval in ms (default: 3600000 = 1 hour) */
20
+ cleanupInterval?: number;
21
+ }
22
+
23
+ export class SqliteJobAdapter implements JobAdapter {
24
+ private db: Database;
25
+ private initialized = false;
26
+ private cleanupTimer?: ReturnType<typeof setInterval>;
27
+ private cleanupDays: number;
28
+
29
+ constructor(config: SqliteJobAdapterConfig = {}) {
30
+ const dbPath = config.path ?? ".donkeylabs/jobs.db";
31
+ this.cleanupDays = config.cleanupDays ?? 7;
32
+
33
+ // Ensure directory exists
34
+ this.ensureDir(dbPath);
35
+
36
+ this.db = new Database(dbPath);
37
+ this.init();
38
+
39
+ // Start cleanup timer
40
+ if (this.cleanupDays > 0) {
41
+ const interval = config.cleanupInterval ?? 3600000; // 1 hour
42
+ this.cleanupTimer = setInterval(() => this.cleanup(), interval);
43
+ // Run cleanup on startup
44
+ this.cleanup();
45
+ }
46
+ }
47
+
48
+ private ensureDir(dbPath: string): void {
49
+ const dir = dirname(dbPath);
50
+ if (dir && dir !== ".") {
51
+ // Sync mkdir for constructor
52
+ try {
53
+ Bun.spawnSync(["mkdir", "-p", dir]);
54
+ } catch {
55
+ // Directory may already exist
56
+ }
57
+ }
58
+ }
59
+
60
+ private init(): void {
61
+ if (this.initialized) return;
62
+
63
+ this.db.run(`
64
+ CREATE TABLE IF NOT EXISTS jobs (
65
+ id TEXT PRIMARY KEY,
66
+ name TEXT NOT NULL,
67
+ data TEXT NOT NULL,
68
+ status TEXT NOT NULL DEFAULT 'pending',
69
+ created_at TEXT NOT NULL,
70
+ run_at TEXT,
71
+ started_at TEXT,
72
+ completed_at TEXT,
73
+ result TEXT,
74
+ error TEXT,
75
+ attempts INTEGER NOT NULL DEFAULT 0,
76
+ max_attempts INTEGER NOT NULL DEFAULT 3,
77
+ -- External job fields
78
+ external INTEGER DEFAULT 0,
79
+ pid INTEGER,
80
+ socket_path TEXT,
81
+ tcp_port INTEGER,
82
+ last_heartbeat TEXT,
83
+ process_state TEXT
84
+ )
85
+ `);
86
+
87
+ // Indexes for efficient queries
88
+ this.db.run(`CREATE INDEX IF NOT EXISTS idx_jobs_status ON jobs(status)`);
89
+ this.db.run(`CREATE INDEX IF NOT EXISTS idx_jobs_name ON jobs(name)`);
90
+ this.db.run(`CREATE INDEX IF NOT EXISTS idx_jobs_external ON jobs(external, status)`);
91
+ this.db.run(`CREATE INDEX IF NOT EXISTS idx_jobs_scheduled ON jobs(status, run_at)`);
92
+
93
+ this.initialized = true;
94
+ }
95
+
96
+ async create(job: Omit<Job, "id">): Promise<Job> {
97
+ const id = `job_${Date.now()}_${Math.random().toString(36).slice(2, 9)}`;
98
+
99
+ this.db.run(
100
+ `INSERT INTO jobs (
101
+ id, name, data, status, created_at, run_at, attempts, max_attempts,
102
+ external, process_state
103
+ ) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`,
104
+ [
105
+ id,
106
+ job.name,
107
+ JSON.stringify(job.data),
108
+ job.status,
109
+ job.createdAt.toISOString(),
110
+ job.runAt?.toISOString() ?? null,
111
+ job.attempts,
112
+ job.maxAttempts,
113
+ job.external ? 1 : 0,
114
+ job.processState ?? null,
115
+ ]
116
+ );
117
+
118
+ return { ...job, id };
119
+ }
120
+
121
+ async get(jobId: string): Promise<Job | null> {
122
+ const row = this.db.query(`SELECT * FROM jobs WHERE id = ?`).get(jobId) as any;
123
+ if (!row) return null;
124
+ return this.rowToJob(row);
125
+ }
126
+
127
+ async update(jobId: string, updates: Partial<Job>): Promise<void> {
128
+ const sets: string[] = [];
129
+ const values: any[] = [];
130
+
131
+ if (updates.status !== undefined) {
132
+ sets.push("status = ?");
133
+ values.push(updates.status);
134
+ }
135
+ if (updates.startedAt !== undefined) {
136
+ sets.push("started_at = ?");
137
+ values.push(updates.startedAt?.toISOString() ?? null);
138
+ }
139
+ if (updates.completedAt !== undefined) {
140
+ sets.push("completed_at = ?");
141
+ values.push(updates.completedAt?.toISOString() ?? null);
142
+ }
143
+ if (updates.result !== undefined) {
144
+ sets.push("result = ?");
145
+ values.push(JSON.stringify(updates.result));
146
+ }
147
+ if (updates.error !== undefined) {
148
+ sets.push("error = ?");
149
+ values.push(updates.error);
150
+ }
151
+ if (updates.attempts !== undefined) {
152
+ sets.push("attempts = ?");
153
+ values.push(updates.attempts);
154
+ }
155
+ // External job fields
156
+ if (updates.pid !== undefined) {
157
+ sets.push("pid = ?");
158
+ values.push(updates.pid);
159
+ }
160
+ if (updates.socketPath !== undefined) {
161
+ sets.push("socket_path = ?");
162
+ values.push(updates.socketPath);
163
+ }
164
+ if (updates.tcpPort !== undefined) {
165
+ sets.push("tcp_port = ?");
166
+ values.push(updates.tcpPort);
167
+ }
168
+ if (updates.lastHeartbeat !== undefined) {
169
+ sets.push("last_heartbeat = ?");
170
+ values.push(updates.lastHeartbeat?.toISOString() ?? null);
171
+ }
172
+ if (updates.processState !== undefined) {
173
+ sets.push("process_state = ?");
174
+ values.push(updates.processState);
175
+ }
176
+
177
+ if (sets.length === 0) return;
178
+
179
+ values.push(jobId);
180
+ this.db.run(`UPDATE jobs SET ${sets.join(", ")} WHERE id = ?`, values);
181
+ }
182
+
183
+ async delete(jobId: string): Promise<boolean> {
184
+ const result = this.db.run(`DELETE FROM jobs WHERE id = ?`, [jobId]);
185
+ return result.changes > 0;
186
+ }
187
+
188
+ async getPending(limit: number = 100): Promise<Job[]> {
189
+ const rows = this.db
190
+ .query(`SELECT * FROM jobs WHERE status = 'pending' ORDER BY created_at LIMIT ?`)
191
+ .all(limit) as any[];
192
+ return rows.map((r) => this.rowToJob(r));
193
+ }
194
+
195
+ async getScheduledReady(now: Date): Promise<Job[]> {
196
+ const rows = this.db
197
+ .query(`SELECT * FROM jobs WHERE status = 'scheduled' AND run_at <= ? ORDER BY run_at`)
198
+ .all(now.toISOString()) as any[];
199
+ return rows.map((r) => this.rowToJob(r));
200
+ }
201
+
202
+ async getByName(name: string, status?: JobStatus): Promise<Job[]> {
203
+ let query = `SELECT * FROM jobs WHERE name = ?`;
204
+ const params: any[] = [name];
205
+
206
+ if (status) {
207
+ query += ` AND status = ?`;
208
+ params.push(status);
209
+ }
210
+
211
+ query += ` ORDER BY created_at DESC`;
212
+
213
+ const rows = this.db.query(query).all(...params) as any[];
214
+ return rows.map((r) => this.rowToJob(r));
215
+ }
216
+
217
+ async getRunningExternal(): Promise<Job[]> {
218
+ const rows = this.db
219
+ .query(`SELECT * FROM jobs WHERE external = 1 AND status = 'running'`)
220
+ .all() as any[];
221
+ return rows.map((r) => this.rowToJob(r));
222
+ }
223
+
224
+ async getOrphanedExternal(): Promise<Job[]> {
225
+ // Get external jobs that were running when server died
226
+ const rows = this.db
227
+ .query(
228
+ `SELECT * FROM jobs WHERE external = 1 AND status = 'running'
229
+ AND (process_state = 'running' OR process_state = 'orphaned' OR process_state = 'spawning')`
230
+ )
231
+ .all() as any[];
232
+ return rows.map((r) => this.rowToJob(r));
233
+ }
234
+
235
+ private rowToJob(row: any): Job {
236
+ return {
237
+ id: row.id,
238
+ name: row.name,
239
+ data: JSON.parse(row.data),
240
+ status: row.status as JobStatus,
241
+ createdAt: new Date(row.created_at),
242
+ runAt: row.run_at ? new Date(row.run_at) : undefined,
243
+ startedAt: row.started_at ? new Date(row.started_at) : undefined,
244
+ completedAt: row.completed_at ? new Date(row.completed_at) : undefined,
245
+ result: row.result ? JSON.parse(row.result) : undefined,
246
+ error: row.error ?? undefined,
247
+ attempts: row.attempts,
248
+ maxAttempts: row.max_attempts,
249
+ // External job fields
250
+ external: row.external === 1 ? true : undefined,
251
+ pid: row.pid ?? undefined,
252
+ socketPath: row.socket_path ?? undefined,
253
+ tcpPort: row.tcp_port ?? undefined,
254
+ lastHeartbeat: row.last_heartbeat ? new Date(row.last_heartbeat) : undefined,
255
+ processState: row.process_state as ExternalJobProcessState | undefined,
256
+ };
257
+ }
258
+
259
+ /** Clean up old completed/failed jobs */
260
+ private cleanup(): void {
261
+ if (this.cleanupDays <= 0) return;
262
+
263
+ try {
264
+ const cutoff = new Date();
265
+ cutoff.setDate(cutoff.getDate() - this.cleanupDays);
266
+
267
+ const result = this.db.run(
268
+ `DELETE FROM jobs WHERE (status = 'completed' OR status = 'failed') AND completed_at < ?`,
269
+ [cutoff.toISOString()]
270
+ );
271
+
272
+ if (result.changes > 0) {
273
+ console.log(`[Jobs] Cleaned up ${result.changes} old jobs`);
274
+ }
275
+ } catch (err) {
276
+ console.error("[Jobs] Cleanup error:", err);
277
+ }
278
+ }
279
+
280
+ /** Stop the adapter and cleanup timer */
281
+ stop(): void {
282
+ if (this.cleanupTimer) {
283
+ clearInterval(this.cleanupTimer);
284
+ this.cleanupTimer = undefined;
285
+ }
286
+ }
287
+ }
package/src/core/jobs.ts CHANGED
@@ -25,6 +25,7 @@ import {
25
25
  createExternalJobSocketServer,
26
26
  type ExternalJobSocketServer,
27
27
  } from "./external-job-socket";
28
+ import { SqliteJobAdapter } from "./job-adapter-sqlite";
28
29
 
29
30
  export type JobStatus = "pending" | "running" | "completed" | "failed" | "scheduled";
30
31
 
@@ -82,6 +83,13 @@ export interface JobsConfig {
82
83
  maxAttempts?: number; // Default retry attempts, default 3
83
84
  /** External jobs configuration */
84
85
  external?: ExternalJobsConfig;
86
+ /**
87
+ * Use SQLite for persistence (default: true when external jobs are used)
88
+ * Set to false to use MemoryJobAdapter (not recommended for production)
89
+ */
90
+ persist?: boolean;
91
+ /** SQLite database path (default: .donkeylabs/jobs.db) */
92
+ dbPath?: string;
85
93
  }
86
94
 
87
95
  export interface Jobs {
@@ -188,6 +196,7 @@ export class MemoryJobAdapter implements JobAdapter {
188
196
 
189
197
  class JobsImpl implements Jobs {
190
198
  private adapter: JobAdapter;
199
+ private sqliteAdapter?: SqliteJobAdapter;
191
200
  private events?: Events;
192
201
  private handlers = new Map<string, JobHandler>();
193
202
  private running = false;
@@ -197,6 +206,8 @@ class JobsImpl implements Jobs {
197
206
  private concurrency: number;
198
207
  private pollInterval: number;
199
208
  private defaultMaxAttempts: number;
209
+ private usePersistence: boolean;
210
+ private dbPath?: string;
200
211
 
201
212
  // External jobs support
202
213
  private externalConfigs = new Map<string, ExternalJobConfig>();
@@ -205,12 +216,23 @@ class JobsImpl implements Jobs {
205
216
  private externalProcesses = new Map<string, { pid: number; timeout?: ReturnType<typeof setTimeout> }>();
206
217
 
207
218
  constructor(config: JobsConfig = {}) {
208
- this.adapter = config.adapter ?? new MemoryJobAdapter();
209
219
  this.events = config.events;
210
220
  this.concurrency = config.concurrency ?? 5;
211
221
  this.pollInterval = config.pollInterval ?? 1000;
212
222
  this.defaultMaxAttempts = config.maxAttempts ?? 3;
213
223
  this.externalConfig = config.external ?? {};
224
+ this.usePersistence = config.persist ?? true; // Default to SQLite persistence
225
+ this.dbPath = config.dbPath;
226
+
227
+ // Use provided adapter, or create SQLite adapter if persistence enabled
228
+ if (config.adapter) {
229
+ this.adapter = config.adapter;
230
+ } else if (this.usePersistence) {
231
+ this.sqliteAdapter = new SqliteJobAdapter({ path: this.dbPath });
232
+ this.adapter = this.sqliteAdapter;
233
+ } else {
234
+ this.adapter = new MemoryJobAdapter();
235
+ }
214
236
  }
215
237
 
216
238
  register<T = any, R = any>(name: string, handler: JobHandler<T, R>): void {
@@ -362,6 +384,11 @@ class JobsImpl implements Jobs {
362
384
  this.socketServer = null;
363
385
  }
364
386
 
387
+ // Stop SQLite adapter cleanup timer
388
+ if (this.sqliteAdapter) {
389
+ this.sqliteAdapter.stop();
390
+ }
391
+
365
392
  // Wait for active in-process jobs to complete (with timeout)
366
393
  const maxWait = 30000; // 30 seconds
367
394
  const startTime = Date.now();
@@ -476,6 +503,9 @@ class JobsImpl implements Jobs {
476
503
  console.log(`[Jobs] Found orphaned job ${job.id} with PID ${job.pid}, attempting reconnect`);
477
504
  activeJobIds.add(job.id);
478
505
 
506
+ // Reserve the socket path/port to prevent new jobs from using it
507
+ this.socketServer?.reserve(job.id, job.socketPath, job.tcpPort);
508
+
479
509
  // Try to reconnect to the socket
480
510
  const reconnected = await this.socketServer?.reconnect(
481
511
  job.id,
@@ -496,7 +526,7 @@ class JobsImpl implements Jobs {
496
526
  });
497
527
  }
498
528
  } else {
499
- // Mark as orphaned, but keep tracking
529
+ // Mark as orphaned, but keep tracking (reservation remains)
500
530
  await this.adapter.update(job.id, { processState: "orphaned" });
501
531
 
502
532
  if (this.events) {
@@ -507,7 +537,7 @@ class JobsImpl implements Jobs {
507
537
  }
508
538
  }
509
539
  } else {
510
- // Process is dead, mark job as failed
540
+ // Process is dead, mark job as failed and release any reservations
511
541
  console.log(`[Jobs] Orphaned job ${job.id} process (PID ${job.pid}) is dead`);
512
542
  await this.adapter.update(job.id, {
513
543
  status: "failed",
@@ -515,6 +545,9 @@ class JobsImpl implements Jobs {
515
545
  completedAt: new Date(),
516
546
  });
517
547
 
548
+ // Release reservation since the job is done
549
+ this.socketServer?.release(job.id);
550
+
518
551
  if (this.events) {
519
552
  await this.events.emit("job.failed", {
520
553
  jobId: job.id,
package/src/core.ts CHANGED
@@ -1,4 +1,4 @@
1
- import type { Kysely } from "kysely";
1
+ import { sql, type Kysely } from "kysely";
2
2
  import { readdir } from "node:fs/promises";
3
3
  import { join } from "node:path";
4
4
  import type { z } from "zod";
@@ -355,8 +355,53 @@ export class PluginManager {
355
355
  this.plugins.set(plugin.name, plugin);
356
356
  }
357
357
 
358
+ /**
359
+ * Ensures the migrations tracking table exists.
360
+ * This table tracks which migrations have been applied for each plugin.
361
+ */
362
+ private async ensureMigrationsTable(): Promise<void> {
363
+ await this.core.db.schema
364
+ .createTable("__donkeylabs_migrations__")
365
+ .ifNotExists()
366
+ .addColumn("id", "integer", (col) => col.primaryKey().autoIncrement())
367
+ .addColumn("plugin_name", "text", (col) => col.notNull())
368
+ .addColumn("migration_name", "text", (col) => col.notNull())
369
+ .addColumn("executed_at", "text", (col) => col.defaultTo(sql`CURRENT_TIMESTAMP`))
370
+ .execute();
371
+
372
+ // Create unique index for plugin_name + migration_name (if not exists)
373
+ // Using raw SQL since Kysely doesn't have ifNotExists for indexes
374
+ await sql`CREATE UNIQUE INDEX IF NOT EXISTS idx_migrations_unique
375
+ ON __donkeylabs_migrations__(plugin_name, migration_name)`.execute(this.core.db);
376
+ }
377
+
378
+ /**
379
+ * Checks if a migration has already been applied for a specific plugin.
380
+ */
381
+ private async isMigrationApplied(pluginName: string, migrationName: string): Promise<boolean> {
382
+ const result = await sql<{ count: number }>`
383
+ SELECT COUNT(*) as count FROM __donkeylabs_migrations__
384
+ WHERE plugin_name = ${pluginName} AND migration_name = ${migrationName}
385
+ `.execute(this.core.db);
386
+ return (result.rows[0]?.count ?? 0) > 0;
387
+ }
388
+
389
+ /**
390
+ * Records that a migration has been applied for a specific plugin.
391
+ */
392
+ private async recordMigration(pluginName: string, migrationName: string): Promise<void> {
393
+ await sql`
394
+ INSERT INTO __donkeylabs_migrations__ (plugin_name, migration_name)
395
+ VALUES (${pluginName}, ${migrationName})
396
+ `.execute(this.core.db);
397
+ }
398
+
358
399
  async migrate(): Promise<void> {
359
400
  console.log("Running migrations (File-System Based)...");
401
+
402
+ // Ensure the migrations tracking table exists
403
+ await this.ensureMigrationsTable();
404
+
360
405
  const sortedPlugins = this.resolveOrder();
361
406
 
362
407
  for (const plugin of sortedPlugins) {
@@ -392,22 +437,46 @@ export class PluginManager {
392
437
  console.log(`[Migration] checking plugin: ${pluginName} at ${migrationDir}`);
393
438
 
394
439
  for (const file of migrationFiles.sort()) {
440
+ // Check if this migration has already been applied
441
+ const isApplied = await this.isMigrationApplied(pluginName, file);
442
+ if (isApplied) {
443
+ console.log(` - Skipping (already applied): ${file}`);
444
+ continue;
445
+ }
446
+
395
447
  console.log(` - Executing migration: ${file}`);
396
448
  const migrationPath = join(migrationDir, file);
397
- const migration = await import(migrationPath);
449
+
450
+ let migration;
451
+ try {
452
+ migration = await import(migrationPath);
453
+ } catch (importError) {
454
+ const err = importError instanceof Error ? importError : new Error(String(importError));
455
+ throw new Error(`Failed to import migration ${file}: ${err.message}`);
456
+ }
398
457
 
399
458
  if (migration.up) {
400
459
  try {
401
460
  await migration.up(this.core.db);
461
+ // Record successful migration
462
+ await this.recordMigration(pluginName, file);
402
463
  console.log(` Success`);
403
464
  } catch (e) {
404
465
  console.error(` Failed to run ${file}:`, e);
466
+ throw e; // Stop on migration failure - don't continue with inconsistent state
405
467
  }
406
468
  }
407
469
  }
408
470
  }
409
- } catch {
410
- // Migration directory doesn't exist, skip
471
+ } catch (e) {
472
+ // Re-throw migration execution errors (they've already been logged)
473
+ // Only silently catch directory read errors (ENOENT)
474
+ const isDirectoryError = e instanceof Error &&
475
+ ((e as NodeJS.ErrnoException).code === 'ENOENT' ||
476
+ (e as NodeJS.ErrnoException).code === 'ENOTDIR');
477
+ if (!isDirectoryError) {
478
+ throw e;
479
+ }
411
480
  }
412
481
  }
413
482
  }
package/src/index.ts CHANGED
@@ -45,6 +45,7 @@ export {
45
45
  type InferHandlers,
46
46
  type InferMiddleware,
47
47
  type InferDependencies,
48
+ type EventSchemas,
48
49
  } from "./core";
49
50
 
50
51
  // Middleware
@@ -72,3 +73,14 @@ export function defineConfig(config: DonkeylabsConfig): DonkeylabsConfig {
72
73
 
73
74
  // Re-export HttpError for custom error creation
74
75
  export { HttpError } from "./core/errors";
76
+
77
+ // Workflows (step functions)
78
+ export {
79
+ workflow,
80
+ WorkflowBuilder,
81
+ type WorkflowDefinition,
82
+ type WorkflowInstance,
83
+ type WorkflowStatus,
84
+ type WorkflowContext,
85
+ type Workflows,
86
+ } from "./core/workflows";