@donkeylabs/server 2.0.21 → 2.0.23

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,287 @@
1
+ /**
2
+ * Kysely Logs Adapter
3
+ *
4
+ * Manages its own SQLite database at .donkeylabs/logs.db
5
+ * Self-creates the table and indexes (no migration needed).
6
+ */
7
+
8
+ import { Kysely, SqliteDialect, sql } from "kysely";
9
+ import type {
10
+ LogsAdapter,
11
+ PersistentLogEntry,
12
+ LogsQueryFilters,
13
+ LogSource,
14
+ } from "./logs";
15
+ import type { LogLevel } from "./logger";
16
+
17
+ // ============================================
18
+ // Database Types
19
+ // ============================================
20
+
21
+ interface LogsTable {
22
+ id: string;
23
+ timestamp: string;
24
+ level: string;
25
+ message: string;
26
+ source: string;
27
+ source_id: string | null;
28
+ tags: string | null;
29
+ data: string | null;
30
+ context: string | null;
31
+ }
32
+
33
+ interface Database {
34
+ __donkeylabs_logs__: LogsTable;
35
+ }
36
+
37
+ // Log level ordering for queries
38
+ const LOG_LEVEL_VALUES: Record<LogLevel, number> = {
39
+ debug: 0,
40
+ info: 1,
41
+ warn: 2,
42
+ error: 3,
43
+ };
44
+
45
+ // ============================================
46
+ // Adapter Configuration
47
+ // ============================================
48
+
49
+ export interface KyselyLogsAdapterConfig {
50
+ /** Database file path (default: ".donkeylabs/logs.db") */
51
+ dbPath?: string;
52
+ }
53
+
54
+ // ============================================
55
+ // Adapter Implementation
56
+ // ============================================
57
+
58
+ export class KyselyLogsAdapter implements LogsAdapter {
59
+ private db: Kysely<Database>;
60
+ private tableReady = false;
61
+ private ensureTablePromise: Promise<void> | null = null;
62
+
63
+ constructor(config: KyselyLogsAdapterConfig = {}) {
64
+ const dbPath = config.dbPath ?? ".donkeylabs/logs.db";
65
+
66
+ // Ensure directory exists
67
+ try {
68
+ const { mkdirSync } = require("node:fs");
69
+ const { dirname } = require("node:path");
70
+ mkdirSync(dirname(dbPath), { recursive: true });
71
+ } catch {
72
+ // Ignore - directory may already exist
73
+ }
74
+
75
+ // Create own Kysely instance with BunSqliteDialect
76
+ // Use dynamic import pattern for the SQLite database
77
+ const BunSqlite = require("bun:sqlite");
78
+ const sqliteDb = new BunSqlite.default(dbPath);
79
+
80
+ // Enable WAL mode for better concurrent read/write performance
81
+ sqliteDb.exec("PRAGMA journal_mode = WAL");
82
+ sqliteDb.exec("PRAGMA synchronous = NORMAL");
83
+
84
+ this.db = new Kysely<Database>({
85
+ dialect: new SqliteDialect({
86
+ database: sqliteDb,
87
+ }),
88
+ });
89
+ }
90
+
91
+ private async ensureTable(): Promise<void> {
92
+ if (this.tableReady) return;
93
+
94
+ // Use a shared promise to avoid multiple concurrent table creation attempts
95
+ if (!this.ensureTablePromise) {
96
+ this.ensureTablePromise = this.createTable();
97
+ }
98
+
99
+ await this.ensureTablePromise;
100
+ }
101
+
102
+ private async createTable(): Promise<void> {
103
+ try {
104
+ await this.db.schema
105
+ .createTable("__donkeylabs_logs__")
106
+ .ifNotExists()
107
+ .addColumn("id", "text", (col) => col.primaryKey())
108
+ .addColumn("timestamp", "text", (col) => col.notNull())
109
+ .addColumn("level", "text", (col) => col.notNull())
110
+ .addColumn("message", "text", (col) => col.notNull())
111
+ .addColumn("source", "text", (col) => col.notNull())
112
+ .addColumn("source_id", "text")
113
+ .addColumn("tags", "text")
114
+ .addColumn("data", "text")
115
+ .addColumn("context", "text")
116
+ .execute();
117
+
118
+ // Create indexes for common queries
119
+ await sql`CREATE INDEX IF NOT EXISTS idx_logs_timestamp ON __donkeylabs_logs__(timestamp)`.execute(this.db);
120
+ await sql`CREATE INDEX IF NOT EXISTS idx_logs_source ON __donkeylabs_logs__(source, source_id)`.execute(this.db);
121
+ await sql`CREATE INDEX IF NOT EXISTS idx_logs_level ON __donkeylabs_logs__(level)`.execute(this.db);
122
+ await sql`CREATE INDEX IF NOT EXISTS idx_logs_source_timestamp ON __donkeylabs_logs__(source, timestamp)`.execute(this.db);
123
+
124
+ this.tableReady = true;
125
+ } catch (err) {
126
+ console.error("[LogsAdapter] Failed to create table:", err);
127
+ throw err;
128
+ }
129
+ }
130
+
131
+ async writeBatch(entries: PersistentLogEntry[]): Promise<void> {
132
+ if (entries.length === 0) return;
133
+
134
+ await this.ensureTable();
135
+
136
+ // Use individual inserts within implicit transaction for SQLite compatibility
137
+ for (const entry of entries) {
138
+ await this.db
139
+ .insertInto("__donkeylabs_logs__")
140
+ .values({
141
+ id: entry.id,
142
+ timestamp: entry.timestamp.toISOString(),
143
+ level: entry.level,
144
+ message: entry.message,
145
+ source: entry.source,
146
+ source_id: entry.sourceId ?? null,
147
+ tags: entry.tags ? JSON.stringify(entry.tags) : null,
148
+ data: entry.data ? JSON.stringify(entry.data) : null,
149
+ context: entry.context ? JSON.stringify(entry.context) : null,
150
+ })
151
+ .execute();
152
+ }
153
+ }
154
+
155
+ async write(entry: PersistentLogEntry): Promise<void> {
156
+ await this.writeBatch([entry]);
157
+ }
158
+
159
+ async query(filters: LogsQueryFilters): Promise<PersistentLogEntry[]> {
160
+ await this.ensureTable();
161
+
162
+ let query = this.db.selectFrom("__donkeylabs_logs__").selectAll();
163
+
164
+ if (filters.source) {
165
+ query = query.where("source", "=", filters.source);
166
+ }
167
+ if (filters.sourceId) {
168
+ query = query.where("source_id", "=", filters.sourceId);
169
+ }
170
+ if (filters.level) {
171
+ // Filter by minimum level - include the specified level and above
172
+ const minLevel = LOG_LEVEL_VALUES[filters.level];
173
+ const validLevels = (Object.keys(LOG_LEVEL_VALUES) as LogLevel[]).filter(
174
+ (l) => LOG_LEVEL_VALUES[l] >= minLevel
175
+ );
176
+ query = query.where("level", "in", validLevels);
177
+ }
178
+ if (filters.search) {
179
+ query = query.where("message", "like", `%${filters.search}%`);
180
+ }
181
+ if (filters.startDate) {
182
+ query = query.where("timestamp", ">=", filters.startDate.toISOString());
183
+ }
184
+ if (filters.endDate) {
185
+ query = query.where("timestamp", "<=", filters.endDate.toISOString());
186
+ }
187
+
188
+ const limit = filters.limit ?? 100;
189
+ const offset = filters.offset ?? 0;
190
+
191
+ const rows = await query
192
+ .orderBy("timestamp", "desc")
193
+ .limit(limit)
194
+ .offset(offset)
195
+ .execute();
196
+
197
+ let results = rows.map((r) => this.rowToEntry(r));
198
+
199
+ // Tag filtering done in-memory (JSON column)
200
+ if (filters.tags && filters.tags.length > 0) {
201
+ results = results.filter(
202
+ (e) => e.tags && filters.tags!.every((t) => e.tags!.includes(t))
203
+ );
204
+ }
205
+
206
+ return results;
207
+ }
208
+
209
+ async getBySource(
210
+ source: LogSource,
211
+ sourceId?: string,
212
+ limit: number = 100
213
+ ): Promise<PersistentLogEntry[]> {
214
+ return this.query({ source, sourceId, limit });
215
+ }
216
+
217
+ async count(filters: LogsQueryFilters): Promise<number> {
218
+ await this.ensureTable();
219
+
220
+ let query = this.db
221
+ .selectFrom("__donkeylabs_logs__")
222
+ .select(sql<number>`count(*)`.as("count"));
223
+
224
+ if (filters.source) {
225
+ query = query.where("source", "=", filters.source);
226
+ }
227
+ if (filters.sourceId) {
228
+ query = query.where("source_id", "=", filters.sourceId);
229
+ }
230
+ if (filters.level) {
231
+ const minLevel = LOG_LEVEL_VALUES[filters.level];
232
+ const validLevels = (Object.keys(LOG_LEVEL_VALUES) as LogLevel[]).filter(
233
+ (l) => LOG_LEVEL_VALUES[l] >= minLevel
234
+ );
235
+ query = query.where("level", "in", validLevels);
236
+ }
237
+ if (filters.search) {
238
+ query = query.where("message", "like", `%${filters.search}%`);
239
+ }
240
+ if (filters.startDate) {
241
+ query = query.where("timestamp", ">=", filters.startDate.toISOString());
242
+ }
243
+ if (filters.endDate) {
244
+ query = query.where("timestamp", "<=", filters.endDate.toISOString());
245
+ }
246
+
247
+ const result = await query.executeTakeFirst();
248
+ return Number(result?.count ?? 0);
249
+ }
250
+
251
+ async deleteOlderThan(date: Date, source?: LogSource): Promise<number> {
252
+ await this.ensureTable();
253
+
254
+ let query = this.db
255
+ .deleteFrom("__donkeylabs_logs__")
256
+ .where("timestamp", "<", date.toISOString());
257
+
258
+ if (source) {
259
+ query = query.where("source", "=", source);
260
+ }
261
+
262
+ const result = await query.execute();
263
+ return Number(result[0]?.numDeletedRows ?? 0);
264
+ }
265
+
266
+ stop(): void {
267
+ try {
268
+ this.db.destroy();
269
+ } catch {
270
+ // Ignore errors during cleanup
271
+ }
272
+ }
273
+
274
+ private rowToEntry(row: LogsTable): PersistentLogEntry {
275
+ return {
276
+ id: row.id,
277
+ timestamp: new Date(row.timestamp),
278
+ level: row.level as LogLevel,
279
+ message: row.message,
280
+ source: row.source as LogSource,
281
+ sourceId: row.source_id ?? undefined,
282
+ tags: row.tags ? JSON.parse(row.tags) : undefined,
283
+ data: row.data ? JSON.parse(row.data) : undefined,
284
+ context: row.context ? JSON.parse(row.context) : undefined,
285
+ };
286
+ }
287
+ }
@@ -0,0 +1,83 @@
1
+ /**
2
+ * Persistent Transport
3
+ *
4
+ * Bridges the Logger to the Logs service.
5
+ * Extracts source info from entry context and routes to the persistent logs service.
6
+ */
7
+
8
+ import type { LogEntry, LogTransport, LogLevel } from "./logger";
9
+ import type { Logs, LogSource } from "./logs";
10
+
11
+ // Log level ordering
12
+ const LOG_LEVEL_VALUES: Record<LogLevel, number> = {
13
+ debug: 0,
14
+ info: 1,
15
+ warn: 2,
16
+ error: 3,
17
+ };
18
+
19
+ // Known source tags that map to LogSource types
20
+ const SOURCE_TAG_MAP: Record<string, LogSource> = {
21
+ cron: "cron",
22
+ job: "job",
23
+ workflow: "workflow",
24
+ plugin: "plugin",
25
+ system: "system",
26
+ route: "route",
27
+ };
28
+
29
+ export interface PersistentTransportConfig {
30
+ /** Minimum level to persist (default: "info") */
31
+ minLevel?: LogLevel;
32
+ }
33
+
34
+ export class PersistentTransport implements LogTransport {
35
+ private logs: Logs;
36
+ private minLevel: number;
37
+
38
+ constructor(logs: Logs, config: PersistentTransportConfig = {}) {
39
+ this.logs = logs;
40
+ this.minLevel = LOG_LEVEL_VALUES[config.minLevel ?? "info"];
41
+ }
42
+
43
+ log(entry: LogEntry): void {
44
+ // Check minimum level
45
+ if (LOG_LEVEL_VALUES[entry.level] < this.minLevel) return;
46
+
47
+ // Extract source from context (set by scoped loggers)
48
+ let source: LogSource = "system";
49
+ let sourceId: string | undefined;
50
+
51
+ if (entry.context?.logSource) {
52
+ const contextSource = entry.context.logSource as string;
53
+ source = (SOURCE_TAG_MAP[contextSource] ?? "plugin") as LogSource;
54
+ sourceId = entry.context.logSourceId as string | undefined;
55
+ } else if (entry.tags && entry.tags.length > 0) {
56
+ // Infer source from tags if no explicit context
57
+ for (const tag of entry.tags) {
58
+ const tagLower = tag.toLowerCase();
59
+ if (SOURCE_TAG_MAP[tagLower]) {
60
+ source = SOURCE_TAG_MAP[tagLower]!;
61
+ break;
62
+ }
63
+ }
64
+ }
65
+
66
+ // Strip logSource/logSourceId from context before persisting
67
+ let context = entry.context;
68
+ if (context?.logSource || context?.logSourceId) {
69
+ const { logSource, logSourceId, ...rest } = context;
70
+ context = Object.keys(rest).length > 0 ? rest : undefined;
71
+ }
72
+
73
+ this.logs.write({
74
+ level: entry.level,
75
+ message: entry.message,
76
+ source,
77
+ sourceId,
78
+ tags: entry.tags,
79
+ data: entry.data,
80
+ context,
81
+ });
82
+ }
83
+ }