@donkeylabs/server 2.0.21 → 2.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,398 @@
1
+ /**
2
+ * Core Logs Service
3
+ *
4
+ * Persistent, filterable, event-driven logging.
5
+ * Writes log entries to a separate logs database with buffered writes.
6
+ * Emits events so users can build their own SSE routes or subscribers.
7
+ */
8
+
9
+ import type { Events } from "./events";
10
+ import type { LogLevel } from "./logger";
11
+
12
+ // ============================================
13
+ // Types
14
+ // ============================================
15
+
16
+ export type LogSource = "system" | "cron" | "job" | "workflow" | "plugin" | "route";
17
+
18
+ export interface PersistentLogEntry {
19
+ id: string;
20
+ timestamp: Date;
21
+ level: LogLevel;
22
+ message: string;
23
+ source: LogSource;
24
+ sourceId?: string;
25
+ tags?: string[];
26
+ data?: Record<string, any>;
27
+ context?: Record<string, any>;
28
+ }
29
+
30
+ export interface LogsQueryFilters {
31
+ /** Filter by source type */
32
+ source?: LogSource;
33
+ /** Filter by source identifier */
34
+ sourceId?: string;
35
+ /** Filter by minimum log level */
36
+ level?: LogLevel;
37
+ /** Filter by tags (entries must contain all specified tags) */
38
+ tags?: string[];
39
+ /** Search message text (LIKE on message) */
40
+ search?: string;
41
+ /** Filter by date range (start) */
42
+ startDate?: Date;
43
+ /** Filter by date range (end) */
44
+ endDate?: Date;
45
+ /** Maximum number of results (default: 100) */
46
+ limit?: number;
47
+ /** Offset for pagination */
48
+ offset?: number;
49
+ }
50
+
51
+ export interface LogsRetentionConfig {
52
+ /** Default retention in days (default: 14) */
53
+ defaultDays?: number;
54
+ /** Per-source retention overrides in days */
55
+ bySource?: Partial<Record<LogSource, number>>;
56
+ /** Cleanup interval in ms (default: 86400000 = 24h) */
57
+ cleanupInterval?: number;
58
+ }
59
+
60
+ export interface LogsConfig {
61
+ /** Custom adapter (defaults to KyselyLogsAdapter) */
62
+ adapter?: LogsAdapter;
63
+ /** Events service for emitting log events */
64
+ events?: Events;
65
+ /** Retention configuration */
66
+ retention?: LogsRetentionConfig;
67
+ /** Minimum level for persistence (default: "info") */
68
+ minLevel?: LogLevel;
69
+ /** Buffer flush interval in ms (default: 50) */
70
+ flushInterval?: number;
71
+ /** Maximum buffer size before forced flush (default: 100) */
72
+ maxBufferSize?: number;
73
+ /** Database path (default: ".donkeylabs/logs.db") */
74
+ dbPath?: string;
75
+ }
76
+
77
+ // ============================================
78
+ // Adapter Interface
79
+ // ============================================
80
+
81
+ export interface LogsAdapter {
82
+ /** Write a batch of log entries */
83
+ writeBatch(entries: PersistentLogEntry[]): Promise<void>;
84
+ /** Write a single log entry */
85
+ write(entry: PersistentLogEntry): Promise<void>;
86
+ /** Query log entries with filters */
87
+ query(filters: LogsQueryFilters): Promise<PersistentLogEntry[]>;
88
+ /** Get log entries by source */
89
+ getBySource(source: LogSource, sourceId?: string, limit?: number): Promise<PersistentLogEntry[]>;
90
+ /** Count log entries matching filters */
91
+ count(filters: LogsQueryFilters): Promise<number>;
92
+ /** Delete entries older than a given date, optionally for a specific source */
93
+ deleteOlderThan(date: Date, source?: LogSource): Promise<number>;
94
+ /** Stop the adapter (cleanup resources) */
95
+ stop(): void;
96
+ }
97
+
98
+ // ============================================
99
+ // Service Interface
100
+ // ============================================
101
+
102
+ export interface Logs {
103
+ /** Write a log entry (synchronous, enqueues to buffer) */
104
+ write(entry: Omit<PersistentLogEntry, "id" | "timestamp">): void;
105
+ /** Query log entries with filters */
106
+ query(filters: LogsQueryFilters): Promise<PersistentLogEntry[]>;
107
+ /** Get log entries by source */
108
+ getBySource(source: LogSource, sourceId?: string, limit?: number): Promise<PersistentLogEntry[]>;
109
+ /** Count log entries matching filters */
110
+ count(filters: LogsQueryFilters): Promise<number>;
111
+ /** Flush the write buffer */
112
+ flush(): Promise<void>;
113
+ /** Stop the logs service (flush + cleanup) */
114
+ stop(): void;
115
+ }
116
+
117
+ // ============================================
118
+ // Log Level Ordering
119
+ // ============================================
120
+
121
+ const LOG_LEVEL_VALUES: Record<LogLevel, number> = {
122
+ debug: 0,
123
+ info: 1,
124
+ warn: 2,
125
+ error: 3,
126
+ };
127
+
128
+ // ============================================
129
+ // In-Memory Adapter (for testing)
130
+ // ============================================
131
+
132
+ export class MemoryLogsAdapter implements LogsAdapter {
133
+ private entries: PersistentLogEntry[] = [];
134
+
135
+ async writeBatch(entries: PersistentLogEntry[]): Promise<void> {
136
+ this.entries.push(...entries);
137
+ }
138
+
139
+ async write(entry: PersistentLogEntry): Promise<void> {
140
+ this.entries.push(entry);
141
+ }
142
+
143
+ async query(filters: LogsQueryFilters): Promise<PersistentLogEntry[]> {
144
+ let results = [...this.entries];
145
+
146
+ if (filters.source) {
147
+ results = results.filter((e) => e.source === filters.source);
148
+ }
149
+ if (filters.sourceId) {
150
+ results = results.filter((e) => e.sourceId === filters.sourceId);
151
+ }
152
+ if (filters.level) {
153
+ const minLevel = LOG_LEVEL_VALUES[filters.level];
154
+ results = results.filter((e) => LOG_LEVEL_VALUES[e.level] >= minLevel);
155
+ }
156
+ if (filters.tags && filters.tags.length > 0) {
157
+ results = results.filter(
158
+ (e) => e.tags && filters.tags!.every((t) => e.tags!.includes(t))
159
+ );
160
+ }
161
+ if (filters.search) {
162
+ const searchLower = filters.search.toLowerCase();
163
+ results = results.filter((e) =>
164
+ e.message.toLowerCase().includes(searchLower)
165
+ );
166
+ }
167
+ if (filters.startDate) {
168
+ results = results.filter((e) => e.timestamp >= filters.startDate!);
169
+ }
170
+ if (filters.endDate) {
171
+ results = results.filter((e) => e.timestamp <= filters.endDate!);
172
+ }
173
+
174
+ // Sort by timestamp descending (newest first)
175
+ results.sort((a, b) => b.timestamp.getTime() - a.timestamp.getTime());
176
+
177
+ const offset = filters.offset ?? 0;
178
+ const limit = filters.limit ?? 100;
179
+ return results.slice(offset, offset + limit);
180
+ }
181
+
182
+ async getBySource(
183
+ source: LogSource,
184
+ sourceId?: string,
185
+ limit: number = 100
186
+ ): Promise<PersistentLogEntry[]> {
187
+ return this.query({ source, sourceId, limit });
188
+ }
189
+
190
+ async count(filters: LogsQueryFilters): Promise<number> {
191
+ const results = await this.query({ ...filters, limit: undefined, offset: undefined });
192
+ return results.length;
193
+ }
194
+
195
+ async deleteOlderThan(date: Date, source?: LogSource): Promise<number> {
196
+ const before = this.entries.length;
197
+ this.entries = this.entries.filter((e) => {
198
+ if (source && e.source !== source) return true;
199
+ return e.timestamp >= date;
200
+ });
201
+ return before - this.entries.length;
202
+ }
203
+
204
+ stop(): void {
205
+ // No cleanup needed
206
+ }
207
+ }
208
+
209
+ // ============================================
210
+ // Service Implementation
211
+ // ============================================
212
+
213
+ const MAX_BUFFER_OVERFLOW = 10_000;
214
+
215
+ class LogsImpl implements Logs {
216
+ private adapter: LogsAdapter;
217
+ private events?: Events;
218
+ private buffer: PersistentLogEntry[] = [];
219
+ private flushTimer: ReturnType<typeof setInterval> | null = null;
220
+ private cleanupTimer: ReturnType<typeof setInterval> | null = null;
221
+ private maxBufferSize: number;
222
+ private minLevel: number;
223
+ private retention: LogsRetentionConfig;
224
+ private stopped = false;
225
+ private flushing = false;
226
+
227
+ constructor(config: LogsConfig = {}) {
228
+ this.adapter = config.adapter ?? new MemoryLogsAdapter();
229
+ this.events = config.events;
230
+ this.maxBufferSize = config.maxBufferSize ?? 100;
231
+ this.minLevel = LOG_LEVEL_VALUES[config.minLevel ?? "info"];
232
+ this.retention = config.retention ?? {};
233
+
234
+ // Start flush timer
235
+ const flushInterval = config.flushInterval ?? 50;
236
+ this.flushTimer = setInterval(() => {
237
+ this.flush().catch((err) => {
238
+ // Use console.log to avoid circular logging
239
+ console.error("[Logs] Flush error:", err);
240
+ });
241
+ }, flushInterval);
242
+
243
+ // Start retention cleanup timer
244
+ const cleanupInterval = this.retention.cleanupInterval ?? 86400000; // 24h
245
+ this.cleanupTimer = setInterval(() => {
246
+ this.runCleanup().catch((err) => {
247
+ console.error("[Logs] Cleanup error:", err);
248
+ });
249
+ }, cleanupInterval);
250
+ }
251
+
252
+ write(entry: Omit<PersistentLogEntry, "id" | "timestamp">): void {
253
+ if (this.stopped) return;
254
+
255
+ // Check minimum level
256
+ if (LOG_LEVEL_VALUES[entry.level] < this.minLevel) return;
257
+
258
+ const fullEntry: PersistentLogEntry = {
259
+ ...entry,
260
+ id: `log_${Date.now()}_${Math.random().toString(36).slice(2, 9)}`,
261
+ timestamp: new Date(),
262
+ };
263
+
264
+ this.buffer.push(fullEntry);
265
+
266
+ // Check buffer overflow
267
+ if (this.buffer.length > MAX_BUFFER_OVERFLOW) {
268
+ console.warn(
269
+ `[Logs] Buffer overflow (${this.buffer.length} entries), dropping oldest entries`
270
+ );
271
+ this.buffer = this.buffer.slice(-this.maxBufferSize);
272
+ }
273
+
274
+ // Flush if buffer is full
275
+ if (this.buffer.length >= this.maxBufferSize) {
276
+ this.flush().catch((err) => {
277
+ console.error("[Logs] Flush error:", err);
278
+ });
279
+ }
280
+ }
281
+
282
+ async query(filters: LogsQueryFilters): Promise<PersistentLogEntry[]> {
283
+ return this.adapter.query(filters);
284
+ }
285
+
286
+ async getBySource(
287
+ source: LogSource,
288
+ sourceId?: string,
289
+ limit?: number
290
+ ): Promise<PersistentLogEntry[]> {
291
+ return this.adapter.getBySource(source, sourceId, limit);
292
+ }
293
+
294
+ async count(filters: LogsQueryFilters): Promise<number> {
295
+ return this.adapter.count(filters);
296
+ }
297
+
298
+ async flush(): Promise<void> {
299
+ if (this.buffer.length === 0 || this.flushing) return;
300
+
301
+ this.flushing = true;
302
+ const entries = this.buffer.splice(0);
303
+
304
+ try {
305
+ await this.adapter.writeBatch(entries);
306
+
307
+ // Emit events for each entry after successful write
308
+ if (this.events) {
309
+ for (const entry of entries) {
310
+ try {
311
+ // All logs
312
+ await this.events.emit("log.created", entry);
313
+ // By source type: "log.cron", "log.workflow"
314
+ await this.events.emit(`log.${entry.source}`, entry);
315
+ // Specific: "log.cron.cleanup-task", "log.workflow.wf_123"
316
+ if (entry.sourceId) {
317
+ await this.events.emit(
318
+ `log.${entry.source}.${entry.sourceId}`,
319
+ entry
320
+ );
321
+ }
322
+ } catch (err) {
323
+ // Don't let event emission errors break the flush
324
+ console.error("[Logs] Event emission error:", err);
325
+ }
326
+ }
327
+ }
328
+ } catch (err) {
329
+ // Put entries back if write failed (they'll be retried next flush)
330
+ this.buffer.unshift(...entries);
331
+ // Silently catch - will retry next flush
332
+ // Use console to avoid circular logging
333
+ console.error("[Logs] Write batch failed, will retry:", err);
334
+ } finally {
335
+ this.flushing = false;
336
+ }
337
+ }
338
+
339
+ stop(): void {
340
+ this.stopped = true;
341
+
342
+ if (this.flushTimer) {
343
+ clearInterval(this.flushTimer);
344
+ this.flushTimer = null;
345
+ }
346
+
347
+ if (this.cleanupTimer) {
348
+ clearInterval(this.cleanupTimer);
349
+ this.cleanupTimer = null;
350
+ }
351
+
352
+ this.adapter.stop();
353
+ }
354
+
355
+ private async runCleanup(): Promise<void> {
356
+ const defaultDays = this.retention.defaultDays ?? 14;
357
+ const bySource = this.retention.bySource ?? {};
358
+
359
+ // Get all source types to clean
360
+ const sources: LogSource[] = [
361
+ "system",
362
+ "cron",
363
+ "job",
364
+ "workflow",
365
+ "plugin",
366
+ "route",
367
+ ];
368
+
369
+ for (const source of sources) {
370
+ const days = bySource[source] ?? defaultDays;
371
+ if (days <= 0) continue;
372
+
373
+ const cutoff = new Date();
374
+ cutoff.setDate(cutoff.getDate() - days);
375
+
376
+ try {
377
+ const deleted = await this.adapter.deleteOlderThan(cutoff, source);
378
+ if (deleted > 0) {
379
+ console.log(
380
+ `[Logs] Cleaned up ${deleted} old ${source} log entries (>${days} days)`
381
+ );
382
+ }
383
+ } catch (err: any) {
384
+ // Silently ignore table-not-found errors
385
+ if (err?.message?.includes("no such table")) return;
386
+ console.error(`[Logs] Cleanup error for ${source}:`, err);
387
+ }
388
+ }
389
+ }
390
+ }
391
+
392
+ // ============================================
393
+ // Factory Function
394
+ // ============================================
395
+
396
+ export function createLogs(config?: LogsConfig): Logs {
397
+ return new LogsImpl(config);
398
+ }
@@ -0,0 +1,241 @@
1
+ import { Kysely } from "kysely";
2
+ import { BunSqliteDialect } from "kysely-bun-sqlite";
3
+ import Database from "bun:sqlite";
4
+ import {
5
+ createLogger,
6
+ createCache,
7
+ createEvents,
8
+ createCron,
9
+ createJobs,
10
+ createSSE,
11
+ createRateLimiter,
12
+ createErrors,
13
+ createWorkflows,
14
+ createProcesses,
15
+ createAudit,
16
+ createWebSocket,
17
+ createStorage,
18
+ createLogs,
19
+ KyselyJobAdapter,
20
+ KyselyWorkflowAdapter,
21
+ MemoryAuditAdapter,
22
+ MemoryLogsAdapter,
23
+ } from "./index";
24
+ import { PluginManager, type CoreServices, type ConfiguredPlugin } from "../core";
25
+
26
+ export interface SubprocessPluginMetadata {
27
+ names: string[];
28
+ modulePaths: Record<string, string>;
29
+ configs: Record<string, any>;
30
+ }
31
+
32
+ export interface SubprocessBootstrapOptions {
33
+ dbPath: string;
34
+ coreConfig?: Record<string, any>;
35
+ pluginMetadata: SubprocessPluginMetadata;
36
+ startServices?: {
37
+ cron?: boolean;
38
+ jobs?: boolean;
39
+ workflows?: boolean;
40
+ processes?: boolean;
41
+ };
42
+ }
43
+
44
+ export interface SubprocessBootstrapResult {
45
+ core: CoreServices;
46
+ manager: PluginManager;
47
+ db: Kysely<any>;
48
+ workflowAdapter: KyselyWorkflowAdapter;
49
+ cleanup: () => Promise<void>;
50
+ }
51
+
52
+ export async function bootstrapSubprocess(
53
+ options: SubprocessBootstrapOptions
54
+ ): Promise<SubprocessBootstrapResult> {
55
+ const sqlite = new Database(options.dbPath);
56
+ sqlite.run("PRAGMA busy_timeout = 5000");
57
+
58
+ const db = new Kysely<any>({
59
+ dialect: new BunSqliteDialect({ database: sqlite }),
60
+ });
61
+
62
+ const cache = createCache();
63
+ const events = createEvents();
64
+ const sse = createSSE();
65
+ const rateLimiter = createRateLimiter();
66
+ const errors = createErrors();
67
+
68
+ const logs = createLogs({ adapter: new MemoryLogsAdapter(), events });
69
+ const logger = createLogger();
70
+
71
+ const cron = createCron({ logger });
72
+
73
+ const jobAdapter = new KyselyJobAdapter(db, { cleanupDays: 0 });
74
+ const workflowAdapter = new KyselyWorkflowAdapter(db, { cleanupDays: 0 });
75
+ const auditAdapter = new MemoryAuditAdapter();
76
+
77
+ const jobs = createJobs({
78
+ events,
79
+ logger,
80
+ adapter: jobAdapter,
81
+ persist: false,
82
+ });
83
+
84
+ const workflows = createWorkflows({
85
+ events,
86
+ jobs,
87
+ sse,
88
+ adapter: workflowAdapter,
89
+ });
90
+
91
+ const processes = createProcesses({ events, autoRecoverOrphans: false });
92
+ const audit = createAudit({ adapter: auditAdapter });
93
+ const websocket = createWebSocket();
94
+ const storage = createStorage();
95
+
96
+ const core: CoreServices = {
97
+ db,
98
+ config: options.coreConfig ?? {},
99
+ logger,
100
+ cache,
101
+ events,
102
+ cron,
103
+ jobs,
104
+ sse,
105
+ rateLimiter,
106
+ errors,
107
+ workflows,
108
+ processes,
109
+ audit,
110
+ websocket,
111
+ storage,
112
+ logs,
113
+ };
114
+
115
+ workflows.setCore(core);
116
+
117
+ const manager = new PluginManager(core);
118
+ const plugins = await loadConfiguredPlugins(options.pluginMetadata);
119
+
120
+ for (const plugin of plugins) {
121
+ manager.register(plugin);
122
+ }
123
+
124
+ await manager.init();
125
+ workflows.setPlugins(manager.getServices());
126
+
127
+ if (options.startServices?.cron) {
128
+ core.cron.start();
129
+ }
130
+ if (options.startServices?.jobs) {
131
+ core.jobs.start();
132
+ }
133
+ if (options.startServices?.workflows) {
134
+ await core.workflows.resolveDbPath();
135
+ await core.workflows.resume();
136
+ }
137
+ if (options.startServices?.processes) {
138
+ core.processes.start();
139
+ }
140
+
141
+ const cleanup = async () => {
142
+ await core.cron.stop();
143
+ await core.jobs.stop();
144
+ await core.workflows.stop();
145
+ await core.processes.shutdown();
146
+
147
+ if (typeof (logs as any).stop === "function") {
148
+ (logs as any).stop();
149
+ }
150
+
151
+ if (typeof (audit as any).stop === "function") {
152
+ (audit as any).stop();
153
+ }
154
+
155
+ await db.destroy();
156
+ sqlite.close();
157
+ };
158
+
159
+ return { core, manager, db, workflowAdapter, cleanup };
160
+ }
161
+
162
+ async function loadConfiguredPlugins(
163
+ metadata: SubprocessPluginMetadata
164
+ ): Promise<ConfiguredPlugin[]> {
165
+ const plugins: ConfiguredPlugin[] = [];
166
+
167
+ for (const name of metadata.names) {
168
+ const modulePath = metadata.modulePaths[name];
169
+ if (!modulePath) {
170
+ throw new Error(`Missing module path for plugin "${name}"`);
171
+ }
172
+
173
+ const module = await import(modulePath);
174
+ const config = metadata.configs?.[name];
175
+ const plugin = findPluginDefinition(module, name, config);
176
+
177
+ if (!plugin) {
178
+ throw new Error(
179
+ `Plugin "${name}" not found in module ${modulePath}. ` +
180
+ `Ensure the plugin is exported and its config is serializable.`
181
+ );
182
+ }
183
+
184
+ plugins.push(plugin);
185
+ }
186
+
187
+ return plugins;
188
+ }
189
+
190
+ function findPluginDefinition(
191
+ mod: any,
192
+ pluginName: string,
193
+ boundConfig?: any
194
+ ): ConfiguredPlugin | null {
195
+ for (const key of Object.keys(mod)) {
196
+ const exported = mod[key];
197
+ const direct = resolvePluginDefinition(exported, pluginName, boundConfig);
198
+ if (direct) return direct;
199
+ }
200
+
201
+ if (mod.default) {
202
+ const direct = resolvePluginDefinition(mod.default, pluginName, boundConfig);
203
+ if (direct) return direct;
204
+ }
205
+
206
+ return null;
207
+ }
208
+
209
+ function resolvePluginDefinition(
210
+ exported: any,
211
+ pluginName: string,
212
+ boundConfig?: any
213
+ ): ConfiguredPlugin | null {
214
+ if (!exported) return null;
215
+
216
+ if (
217
+ typeof exported === "object" &&
218
+ exported.name === pluginName &&
219
+ typeof exported.service === "function"
220
+ ) {
221
+ return exported as ConfiguredPlugin;
222
+ }
223
+
224
+ if (typeof exported === "function" && boundConfig !== undefined) {
225
+ try {
226
+ const result = exported(boundConfig);
227
+ if (
228
+ result &&
229
+ typeof result === "object" &&
230
+ result.name === pluginName &&
231
+ typeof result.service === "function"
232
+ ) {
233
+ return result as ConfiguredPlugin;
234
+ }
235
+ } catch {
236
+ return null;
237
+ }
238
+ }
239
+
240
+ return null;
241
+ }