@donkeylabs/server 2.0.21 → 2.0.22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,398 @@
1
+ /**
2
+ * Core Logs Service
3
+ *
4
+ * Persistent, filterable, event-driven logging.
5
+ * Writes log entries to a separate logs database with buffered writes.
6
+ * Emits events so users can build their own SSE routes or subscribers.
7
+ */
8
+
9
+ import type { Events } from "./events";
10
+ import type { LogLevel } from "./logger";
11
+
12
+ // ============================================
13
+ // Types
14
+ // ============================================
15
+
16
+ export type LogSource = "system" | "cron" | "job" | "workflow" | "plugin" | "route";
17
+
18
+ export interface PersistentLogEntry {
19
+ id: string;
20
+ timestamp: Date;
21
+ level: LogLevel;
22
+ message: string;
23
+ source: LogSource;
24
+ sourceId?: string;
25
+ tags?: string[];
26
+ data?: Record<string, any>;
27
+ context?: Record<string, any>;
28
+ }
29
+
30
+ export interface LogsQueryFilters {
31
+ /** Filter by source type */
32
+ source?: LogSource;
33
+ /** Filter by source identifier */
34
+ sourceId?: string;
35
+ /** Filter by minimum log level */
36
+ level?: LogLevel;
37
+ /** Filter by tags (entries must contain all specified tags) */
38
+ tags?: string[];
39
+ /** Search message text (LIKE on message) */
40
+ search?: string;
41
+ /** Filter by date range (start) */
42
+ startDate?: Date;
43
+ /** Filter by date range (end) */
44
+ endDate?: Date;
45
+ /** Maximum number of results (default: 100) */
46
+ limit?: number;
47
+ /** Offset for pagination */
48
+ offset?: number;
49
+ }
50
+
51
+ export interface LogsRetentionConfig {
52
+ /** Default retention in days (default: 14) */
53
+ defaultDays?: number;
54
+ /** Per-source retention overrides in days */
55
+ bySource?: Partial<Record<LogSource, number>>;
56
+ /** Cleanup interval in ms (default: 86400000 = 24h) */
57
+ cleanupInterval?: number;
58
+ }
59
+
60
+ export interface LogsConfig {
61
+ /** Custom adapter (defaults to KyselyLogsAdapter) */
62
+ adapter?: LogsAdapter;
63
+ /** Events service for emitting log events */
64
+ events?: Events;
65
+ /** Retention configuration */
66
+ retention?: LogsRetentionConfig;
67
+ /** Minimum level for persistence (default: "info") */
68
+ minLevel?: LogLevel;
69
+ /** Buffer flush interval in ms (default: 50) */
70
+ flushInterval?: number;
71
+ /** Maximum buffer size before forced flush (default: 100) */
72
+ maxBufferSize?: number;
73
+ /** Database path (default: ".donkeylabs/logs.db") */
74
+ dbPath?: string;
75
+ }
76
+
77
+ // ============================================
78
+ // Adapter Interface
79
+ // ============================================
80
+
81
+ export interface LogsAdapter {
82
+ /** Write a batch of log entries */
83
+ writeBatch(entries: PersistentLogEntry[]): Promise<void>;
84
+ /** Write a single log entry */
85
+ write(entry: PersistentLogEntry): Promise<void>;
86
+ /** Query log entries with filters */
87
+ query(filters: LogsQueryFilters): Promise<PersistentLogEntry[]>;
88
+ /** Get log entries by source */
89
+ getBySource(source: LogSource, sourceId?: string, limit?: number): Promise<PersistentLogEntry[]>;
90
+ /** Count log entries matching filters */
91
+ count(filters: LogsQueryFilters): Promise<number>;
92
+ /** Delete entries older than a given date, optionally for a specific source */
93
+ deleteOlderThan(date: Date, source?: LogSource): Promise<number>;
94
+ /** Stop the adapter (cleanup resources) */
95
+ stop(): void;
96
+ }
97
+
98
+ // ============================================
99
+ // Service Interface
100
+ // ============================================
101
+
102
+ export interface Logs {
103
+ /** Write a log entry (synchronous, enqueues to buffer) */
104
+ write(entry: Omit<PersistentLogEntry, "id" | "timestamp">): void;
105
+ /** Query log entries with filters */
106
+ query(filters: LogsQueryFilters): Promise<PersistentLogEntry[]>;
107
+ /** Get log entries by source */
108
+ getBySource(source: LogSource, sourceId?: string, limit?: number): Promise<PersistentLogEntry[]>;
109
+ /** Count log entries matching filters */
110
+ count(filters: LogsQueryFilters): Promise<number>;
111
+ /** Flush the write buffer */
112
+ flush(): Promise<void>;
113
+ /** Stop the logs service (flush + cleanup) */
114
+ stop(): void;
115
+ }
116
+
117
+ // ============================================
118
+ // Log Level Ordering
119
+ // ============================================
120
+
121
+ const LOG_LEVEL_VALUES: Record<LogLevel, number> = {
122
+ debug: 0,
123
+ info: 1,
124
+ warn: 2,
125
+ error: 3,
126
+ };
127
+
128
+ // ============================================
129
+ // In-Memory Adapter (for testing)
130
+ // ============================================
131
+
132
+ export class MemoryLogsAdapter implements LogsAdapter {
133
+ private entries: PersistentLogEntry[] = [];
134
+
135
+ async writeBatch(entries: PersistentLogEntry[]): Promise<void> {
136
+ this.entries.push(...entries);
137
+ }
138
+
139
+ async write(entry: PersistentLogEntry): Promise<void> {
140
+ this.entries.push(entry);
141
+ }
142
+
143
+ async query(filters: LogsQueryFilters): Promise<PersistentLogEntry[]> {
144
+ let results = [...this.entries];
145
+
146
+ if (filters.source) {
147
+ results = results.filter((e) => e.source === filters.source);
148
+ }
149
+ if (filters.sourceId) {
150
+ results = results.filter((e) => e.sourceId === filters.sourceId);
151
+ }
152
+ if (filters.level) {
153
+ const minLevel = LOG_LEVEL_VALUES[filters.level];
154
+ results = results.filter((e) => LOG_LEVEL_VALUES[e.level] >= minLevel);
155
+ }
156
+ if (filters.tags && filters.tags.length > 0) {
157
+ results = results.filter(
158
+ (e) => e.tags && filters.tags!.every((t) => e.tags!.includes(t))
159
+ );
160
+ }
161
+ if (filters.search) {
162
+ const searchLower = filters.search.toLowerCase();
163
+ results = results.filter((e) =>
164
+ e.message.toLowerCase().includes(searchLower)
165
+ );
166
+ }
167
+ if (filters.startDate) {
168
+ results = results.filter((e) => e.timestamp >= filters.startDate!);
169
+ }
170
+ if (filters.endDate) {
171
+ results = results.filter((e) => e.timestamp <= filters.endDate!);
172
+ }
173
+
174
+ // Sort by timestamp descending (newest first)
175
+ results.sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());
176
+
177
+ const offset = filters.offset ?? 0;
178
+ const limit = filters.limit ?? 100;
179
+ return results.slice(offset, offset + limit);
180
+ }
181
+
182
+ async getBySource(
183
+ source: LogSource,
184
+ sourceId?: string,
185
+ limit: number = 100
186
+ ): Promise<PersistentLogEntry[]> {
187
+ return this.query({ source, sourceId, limit });
188
+ }
189
+
190
+ async count(filters: LogsQueryFilters): Promise<number> {
191
+ const results = await this.query({ ...filters, limit: undefined, offset: undefined });
192
+ return results.length;
193
+ }
194
+
195
+ async deleteOlderThan(date: Date, source?: LogSource): Promise<number> {
196
+ const before = this.entries.length;
197
+ this.entries = this.entries.filter((e) => {
198
+ if (source && e.source !== source) return true;
199
+ return e.timestamp >= date;
200
+ });
201
+ return before - this.entries.length;
202
+ }
203
+
204
+ stop(): void {
205
+ // No cleanup needed
206
+ }
207
+ }
208
+
209
+ // ============================================
210
+ // Service Implementation
211
+ // ============================================
212
+
213
+ const MAX_BUFFER_OVERFLOW = 10_000;
214
+
215
+ class LogsImpl implements Logs {
216
+ private adapter: LogsAdapter;
217
+ private events?: Events;
218
+ private buffer: PersistentLogEntry[] = [];
219
+ private flushTimer: ReturnType<typeof setInterval> | null = null;
220
+ private cleanupTimer: ReturnType<typeof setInterval> | null = null;
221
+ private maxBufferSize: number;
222
+ private minLevel: number;
223
+ private retention: LogsRetentionConfig;
224
+ private stopped = false;
225
+ private flushing = false;
226
+
227
+ constructor(config: LogsConfig = {}) {
228
+ this.adapter = config.adapter ?? new MemoryLogsAdapter();
229
+ this.events = config.events;
230
+ this.maxBufferSize = config.maxBufferSize ?? 100;
231
+ this.minLevel = LOG_LEVEL_VALUES[config.minLevel ?? "info"];
232
+ this.retention = config.retention ?? {};
233
+
234
+ // Start flush timer
235
+ const flushInterval = config.flushInterval ?? 50;
236
+ this.flushTimer = setInterval(() => {
237
+ this.flush().catch((err) => {
238
+ // Use console.log to avoid circular logging
239
+ console.error("[Logs] Flush error:", err);
240
+ });
241
+ }, flushInterval);
242
+
243
+ // Start retention cleanup timer
244
+ const cleanupInterval = this.retention.cleanupInterval ?? 86400000; // 24h
245
+ this.cleanupTimer = setInterval(() => {
246
+ this.runCleanup().catch((err) => {
247
+ console.error("[Logs] Cleanup error:", err);
248
+ });
249
+ }, cleanupInterval);
250
+ }
251
+
252
+ write(entry: Omit<PersistentLogEntry, "id" | "timestamp">): void {
253
+ if (this.stopped) return;
254
+
255
+ // Check minimum level
256
+ if (LOG_LEVEL_VALUES[entry.level] < this.minLevel) return;
257
+
258
+ const fullEntry: PersistentLogEntry = {
259
+ ...entry,
260
+ id: `log_${Date.now()}_${Math.random().toString(36).slice(2, 9)}`,
261
+ timestamp: new Date(),
262
+ };
263
+
264
+ this.buffer.push(fullEntry);
265
+
266
+ // Check buffer overflow
267
+ if (this.buffer.length > MAX_BUFFER_OVERFLOW) {
268
+ console.warn(
269
+ `[Logs] Buffer overflow (${this.buffer.length} entries), dropping oldest entries`
270
+ );
271
+ this.buffer = this.buffer.slice(-this.maxBufferSize);
272
+ }
273
+
274
+ // Flush if buffer is full
275
+ if (this.buffer.length >= this.maxBufferSize) {
276
+ this.flush().catch((err) => {
277
+ console.error("[Logs] Flush error:", err);
278
+ });
279
+ }
280
+ }
281
+
282
+ async query(filters: LogsQueryFilters): Promise<PersistentLogEntry[]> {
283
+ return this.adapter.query(filters);
284
+ }
285
+
286
+ async getBySource(
287
+ source: LogSource,
288
+ sourceId?: string,
289
+ limit?: number
290
+ ): Promise<PersistentLogEntry[]> {
291
+ return this.adapter.getBySource(source, sourceId, limit);
292
+ }
293
+
294
+ async count(filters: LogsQueryFilters): Promise<number> {
295
+ return this.adapter.count(filters);
296
+ }
297
+
298
+ async flush(): Promise<void> {
299
+ if (this.buffer.length === 0 || this.flushing) return;
300
+
301
+ this.flushing = true;
302
+ const entries = this.buffer.splice(0);
303
+
304
+ try {
305
+ await this.adapter.writeBatch(entries);
306
+
307
+ // Emit events for each entry after successful write
308
+ if (this.events) {
309
+ for (const entry of entries) {
310
+ try {
311
+ // All logs
312
+ await this.events.emit("log.created", entry);
313
+ // By source type: "log.cron", "log.workflow"
314
+ await this.events.emit(`log.${entry.source}`, entry);
315
+ // Specific: "log.cron.cleanup-task", "log.workflow.wf_123"
316
+ if (entry.sourceId) {
317
+ await this.events.emit(
318
+ `log.${entry.source}.${entry.sourceId}`,
319
+ entry
320
+ );
321
+ }
322
+ } catch (err) {
323
+ // Don't let event emission errors break the flush
324
+ console.error("[Logs] Event emission error:", err);
325
+ }
326
+ }
327
+ }
328
+ } catch (err) {
329
+ // Put entries back if write failed (they'll be retried next flush)
330
+ this.buffer.unshift(...entries);
331
+ // Silently catch - will retry next flush
332
+ // Use console to avoid circular logging
333
+ console.error("[Logs] Write batch failed, will retry:", err);
334
+ } finally {
335
+ this.flushing = false;
336
+ }
337
+ }
338
+
339
+ stop(): void {
340
+ this.stopped = true;
341
+
342
+ if (this.flushTimer) {
343
+ clearInterval(this.flushTimer);
344
+ this.flushTimer = null;
345
+ }
346
+
347
+ if (this.cleanupTimer) {
348
+ clearInterval(this.cleanupTimer);
349
+ this.cleanupTimer = null;
350
+ }
351
+
352
+ this.adapter.stop();
353
+ }
354
+
355
+ private async runCleanup(): Promise<void> {
356
+ const defaultDays = this.retention.defaultDays ?? 14;
357
+ const bySource = this.retention.bySource ?? {};
358
+
359
+ // Get all source types to clean
360
+ const sources: LogSource[] = [
361
+ "system",
362
+ "cron",
363
+ "job",
364
+ "workflow",
365
+ "plugin",
366
+ "route",
367
+ ];
368
+
369
+ for (const source of sources) {
370
+ const days = bySource[source] ?? defaultDays;
371
+ if (days <= 0) continue;
372
+
373
+ const cutoff = new Date();
374
+ cutoff.setDate(cutoff.getDate() - days);
375
+
376
+ try {
377
+ const deleted = await this.adapter.deleteOlderThan(cutoff, source);
378
+ if (deleted > 0) {
379
+ console.log(
380
+ `[Logs] Cleaned up ${deleted} old ${source} log entries (>${days} days)`
381
+ );
382
+ }
383
+ } catch (err: any) {
384
+ // Silently ignore table-not-found errors
385
+ if (err?.message?.includes("no such table")) return;
386
+ console.error(`[Logs] Cleanup error for ${source}:`, err);
387
+ }
388
+ }
389
+ }
390
+ }
391
+
392
+ // ============================================
393
+ // Factory Function
394
+ // ============================================
395
+
396
+ export function createLogs(config?: LogsConfig): Logs {
397
+ return new LogsImpl(config);
398
+ }
@@ -44,10 +44,10 @@ async function main(): Promise<void> {
44
44
  const proxyConnection = new WorkflowProxyConnection(socket);
45
45
 
46
46
  // Create database connection + adapter (subprocess owns its own persistence)
47
+ const sqlite = new Database(dbPath);
48
+ sqlite.run("PRAGMA busy_timeout = 5000");
47
49
  const db = new Kysely<any>({
48
- dialect: new BunSqliteDialect({
49
- database: new Database(dbPath),
50
- }),
50
+ dialect: new BunSqliteDialect({ database: sqlite }),
51
51
  });
52
52
  const adapter = new KyselyWorkflowAdapter(db, { cleanupDays: 0 });
53
53
 
@@ -76,10 +76,19 @@ async function main(): Promise<void> {
76
76
  const plugins = createPluginsProxy(proxyConnection);
77
77
  const coreServices = createCoreServicesProxy(proxyConnection);
78
78
 
79
+ // Wrap coreServices proxy so that `db` resolves locally instead of via IPC.
80
+ // Spreading a Proxy with no ownKeys trap loses all proxied properties.
81
+ const coreWithDb = new Proxy(coreServices, {
82
+ get(target, prop, receiver) {
83
+ if (prop === "db") return db;
84
+ return Reflect.get(target, prop, receiver);
85
+ },
86
+ });
87
+
79
88
  // Create state machine with IPC event bridge
80
89
  const sm = new WorkflowStateMachine({
81
90
  adapter,
82
- core: { ...coreServices, db } as any,
91
+ core: coreWithDb as any,
83
92
  plugins,
84
93
  events: createIpcEventBridge(socket, instanceId),
85
94
  pollInterval: 1000,
@@ -412,6 +412,62 @@ describe("WorkflowDefinition", () => {
412
412
  expect(isolatedWf.isolated).toBe(true);
413
413
  expect(inlineWf.isolated).toBe(false);
414
414
  });
415
+
416
+ it("should auto-detect sourceModule as a valid file:// URL after build()", () => {
417
+ const wf = workflow("auto-detect")
418
+ .task("s", { handler: async () => 1 })
419
+ .build();
420
+
421
+ expect(wf.sourceModule).toBeDefined();
422
+ expect(wf.sourceModule).toMatch(/^file:\/\//);
423
+ // Should point to this test file
424
+ expect(wf.sourceModule).toContain("workflows.test.ts");
425
+ });
426
+ });
427
+
428
+ describe("register() with auto-detected sourceModule", () => {
429
+ let workflows: ReturnType<typeof createWorkflows>;
430
+ let adapter: MemoryWorkflowAdapter;
431
+
432
+ beforeEach(() => {
433
+ adapter = new MemoryWorkflowAdapter();
434
+ workflows = createWorkflows({ adapter });
435
+ });
436
+
437
+ afterEach(async () => {
438
+ await workflows.stop();
439
+ });
440
+
441
+ it("should not warn when registering isolated workflow with auto-detected sourceModule", () => {
442
+ const wf = workflow("auto-isolated")
443
+ .task("s", { handler: async () => 1 })
444
+ .build();
445
+
446
+ // sourceModule should be set by build()
447
+ expect(wf.sourceModule).toBeDefined();
448
+
449
+ const warnings: string[] = [];
450
+ const origWarn = console.warn;
451
+ console.warn = (...args: any[]) => warnings.push(args.join(" "));
452
+ try {
453
+ workflows.register(wf);
454
+ } finally {
455
+ console.warn = origWarn;
456
+ }
457
+
458
+ expect(warnings.filter((w) => w.includes("no modulePath"))).toHaveLength(0);
459
+ });
460
+
461
+ it("should prefer explicit modulePath over auto-detected sourceModule", () => {
462
+ const wf = workflow("explicit-path")
463
+ .task("s", { handler: async () => 1 })
464
+ .build();
465
+
466
+ // Register with explicit modulePath
467
+ expect(() => {
468
+ workflows.register(wf, { modulePath: "file:///explicit/path.ts" });
469
+ }).not.toThrow();
470
+ });
415
471
  });
416
472
 
417
473
  describe("Choice steps (inline)", () => {
@@ -12,9 +12,10 @@ import type { Events } from "./events";
12
12
  import type { Jobs } from "./jobs";
13
13
  import type { SSE } from "./sse";
14
14
  import type { z } from "zod";
15
+ import { sql } from "kysely";
15
16
  import type { CoreServices } from "../core";
16
- import { dirname, join } from "node:path";
17
- import { fileURLToPath } from "node:url";
17
+ import { dirname, join, resolve } from "node:path";
18
+ import { fileURLToPath, pathToFileURL } from "node:url";
18
19
  import {
19
20
  createWorkflowSocketServer,
20
21
  type WorkflowSocketServer,
@@ -24,6 +25,31 @@ import {
24
25
  import { isProcessAlive } from "./external-jobs";
25
26
  import { WorkflowStateMachine, type StateMachineEvents } from "./workflow-state-machine";
26
27
 
28
+ // ============================================
29
+ // Auto-detect caller module for isolated workflows
30
+ // ============================================
31
+
32
+ const WORKFLOWS_FILE = resolve(fileURLToPath(import.meta.url));
33
+
34
+ /**
35
+ * Walk the call stack to find the file that invoked build().
36
+ * Returns a file:// URL string or undefined if detection fails.
37
+ */
38
+ function captureCallerUrl(): string | undefined {
39
+ const stack = new Error().stack ?? "";
40
+ for (const line of stack.split("\n").slice(1)) {
41
+ const match = line.match(/at\s+(?:.*?\s+\(?)?([^\s():]+):\d+:\d+/);
42
+ if (match) {
43
+ let filePath = match[1];
44
+ if (filePath.startsWith("file://")) filePath = fileURLToPath(filePath);
45
+ if (filePath.startsWith("native")) continue;
46
+ filePath = resolve(filePath);
47
+ if (filePath !== WORKFLOWS_FILE) return pathToFileURL(filePath).href;
48
+ }
49
+ }
50
+ return undefined;
51
+ }
52
+
27
53
  // Type helper for Zod schema inference
28
54
  type ZodSchema = z.ZodTypeAny;
29
55
  type InferZodOutput<T extends ZodSchema> = z.infer<T>;
@@ -144,6 +170,8 @@ export interface WorkflowDefinition {
144
170
  * Set to false for lightweight workflows that benefit from inline execution.
145
171
  */
146
172
  isolated?: boolean;
173
+ /** Auto-detected module URL where this workflow was built. Used as fallback for isolated execution. */
174
+ sourceModule?: string;
147
175
  }
148
176
 
149
177
  // ============================================
@@ -576,6 +604,7 @@ export class WorkflowBuilder {
576
604
  timeout: this._timeout,
577
605
  defaultRetry: this._defaultRetry,
578
606
  isolated: this._isolated,
607
+ sourceModule: captureCallerUrl(),
579
608
  };
580
609
  }
581
610
  }
@@ -617,10 +646,15 @@ export interface WorkflowsConfig {
617
646
  export interface WorkflowRegisterOptions {
618
647
  /**
619
648
  * Module path for isolated workflows.
620
- * Required when workflow.isolated !== false and running in isolated mode.
621
- * Use `import.meta.url` to get the current module's path.
649
+ * Auto-detected from the call site of `build()` in most cases.
650
+ * Only needed if the workflow definition is re-exported from a different
651
+ * module than the one that calls `build()`.
622
652
  *
623
653
  * @example
654
+ * // Usually not needed — auto-detected:
655
+ * workflows.register(myWorkflow);
656
+ *
657
+ * // Override when re-exporting from another module:
624
658
  * workflows.register(myWorkflow, { modulePath: import.meta.url });
625
659
  */
626
660
  modulePath?: string;
@@ -649,6 +683,8 @@ export interface Workflows {
649
683
  stop(): Promise<void>;
650
684
  /** Set core services (called after initialization to resolve circular dependency) */
651
685
  setCore(core: CoreServices): void;
686
+ /** Resolve dbPath from the database instance (call after setCore, before resume) */
687
+ resolveDbPath(): Promise<void>;
652
688
  /** Set plugin services (called after plugins are initialized) */
653
689
  setPlugins(plugins: Record<string, any>): void;
654
690
  /** Update metadata for a workflow instance (used by isolated workflows) */
@@ -728,19 +764,21 @@ class WorkflowsImpl implements Workflows {
728
764
 
729
765
  setCore(core: CoreServices): void {
730
766
  this.core = core;
731
- // Extract DB path if using Kysely adapter (for isolated workflows)
732
- if (!this.dbPath && (core.db as any)?.getExecutor) {
733
- // Try to get the database path from the Kysely instance
734
- // This is a bit hacky but necessary for isolated workflows
735
- try {
736
- const executor = (core.db as any).getExecutor();
737
- const adapter = executor?.adapter;
738
- if (adapter?.db?.filename) {
739
- this.dbPath = adapter.db.filename;
740
- }
741
- } catch {
742
- // Ignore - dbPath might be set manually
767
+ }
768
+
769
+ async resolveDbPath(): Promise<void> {
770
+ if (this.dbPath) return;
771
+ if (!this.core?.db) return;
772
+
773
+ // Use PRAGMA database_list to get the file path — works with any SQLite dialect
774
+ try {
775
+ const result = await sql<{ name: string; file: string }>`PRAGMA database_list`.execute(this.core.db);
776
+ const main = result.rows.find((r) => r.name === "main");
777
+ if (main?.file && main.file !== "" && main.file !== ":memory:") {
778
+ this.dbPath = main.file;
743
779
  }
780
+ } catch {
781
+ // Not a SQLite database or PRAGMA not supported — dbPath stays unset
744
782
  }
745
783
  }
746
784
 
@@ -761,14 +799,15 @@ class WorkflowsImpl implements Workflows {
761
799
  throw new Error(`Workflow "${definition.name}" is already registered`);
762
800
  }
763
801
 
764
- // Store module path for isolated workflows
765
- if (options?.modulePath) {
766
- this.workflowModulePaths.set(definition.name, options.modulePath);
802
+ // Resolve module path: explicit option > auto-detected sourceModule
803
+ const modulePath = options?.modulePath ?? definition.sourceModule;
804
+ if (modulePath) {
805
+ this.workflowModulePaths.set(definition.name, modulePath);
767
806
  } else if (definition.isolated !== false) {
768
- // Warn if isolated workflow has no module path
807
+ // Warn only if neither explicit nor auto-detected path is available
769
808
  console.warn(
770
- `[Workflows] Workflow "${definition.name}" is isolated but no modulePath provided. ` +
771
- `Use: workflows.register(myWorkflow, { modulePath: import.meta.url })`
809
+ `[Workflows] Workflow "${definition.name}" is isolated but no modulePath could be detected. ` +
810
+ `Pass { modulePath: import.meta.url } to register().`
772
811
  );
773
812
  }
774
813
 
@@ -823,6 +862,11 @@ class WorkflowsImpl implements Workflows {
823
862
  console.warn(
824
863
  `[Workflows] Workflow "${workflowName}" falling back to inline execution (no modulePath)`
825
864
  );
865
+ } else if (isIsolated && modulePath && !this.dbPath) {
866
+ console.warn(
867
+ `[Workflows] Workflow "${workflowName}" falling back to inline execution (dbPath could not be auto-detected). ` +
868
+ `Set workflows.dbPath in your server config to enable isolated execution.`
869
+ );
826
870
  }
827
871
  this.startInlineWorkflow(instance.id, definition);
828
872
  }
package/src/core.ts CHANGED
@@ -16,6 +16,7 @@ import type { Processes } from "./core/processes";
16
16
  import type { Audit } from "./core/audit";
17
17
  import type { WebSocketService } from "./core/websocket";
18
18
  import type { Storage } from "./core/storage";
19
+ import type { Logs } from "./core/logs";
19
20
 
20
21
  export interface PluginRegistry {}
21
22
 
@@ -109,6 +110,7 @@ export interface CoreServices {
109
110
  audit: Audit;
110
111
  websocket: WebSocketService;
111
112
  storage: Storage;
113
+ logs: Logs;
112
114
  }
113
115
 
114
116
  /**
package/src/harness.ts CHANGED
@@ -16,9 +16,11 @@ import {
16
16
  createAudit,
17
17
  createWebSocket,
18
18
  createStorage,
19
+ createLogs,
19
20
  KyselyJobAdapter,
20
21
  KyselyWorkflowAdapter,
21
22
  MemoryAuditAdapter,
23
+ MemoryLogsAdapter,
22
24
  } from "./core/index";
23
25
  import { AppServer, type ServerConfig } from "./server";
24
26
  import type { IRouter, RouteDefinition } from "./router";
@@ -68,6 +70,7 @@ export async function createTestHarness(targetPlugin: Plugin, dependencies: Plug
68
70
  const audit = createAudit({ adapter: new MemoryAuditAdapter() });
69
71
  const websocket = createWebSocket();
70
72
  const storage = createStorage(); // Uses memory adapter by default
73
+ const logs = createLogs({ adapter: new MemoryLogsAdapter(), events });
71
74
 
72
75
  const core: CoreServices = {
73
76
  db,
@@ -85,6 +88,7 @@ export async function createTestHarness(targetPlugin: Plugin, dependencies: Plug
85
88
  audit,
86
89
  websocket,
87
90
  storage,
91
+ logs,
88
92
  };
89
93
 
90
94
  const manager = new PluginManager(core);