devrager 0.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (43) hide show
  1. package/dist/cli.js +988 -0
  2. package/dist/cli.js.map +7 -0
  3. package/dist/lib/adapters/amp.d.ts +3 -0
  4. package/dist/lib/adapters/amp.d.ts.map +1 -0
  5. package/dist/lib/adapters/amp.js +74 -0
  6. package/dist/lib/adapters/amp.js.map +1 -0
  7. package/dist/lib/adapters/claude.d.ts +3 -0
  8. package/dist/lib/adapters/claude.d.ts.map +1 -0
  9. package/dist/lib/adapters/claude.js +137 -0
  10. package/dist/lib/adapters/claude.js.map +1 -0
  11. package/dist/lib/adapters/cline.d.ts +3 -0
  12. package/dist/lib/adapters/cline.d.ts.map +1 -0
  13. package/dist/lib/adapters/cline.js +122 -0
  14. package/dist/lib/adapters/cline.js.map +1 -0
  15. package/dist/lib/adapters/codex.d.ts +3 -0
  16. package/dist/lib/adapters/codex.d.ts.map +1 -0
  17. package/dist/lib/adapters/codex.js +99 -0
  18. package/dist/lib/adapters/codex.js.map +1 -0
  19. package/dist/lib/adapters/index.d.ts +17 -0
  20. package/dist/lib/adapters/index.d.ts.map +1 -0
  21. package/dist/lib/adapters/index.js +27 -0
  22. package/dist/lib/adapters/index.js.map +1 -0
  23. package/dist/lib/adapters/opencode.d.ts +3 -0
  24. package/dist/lib/adapters/opencode.d.ts.map +1 -0
  25. package/dist/lib/adapters/opencode.js +86 -0
  26. package/dist/lib/adapters/opencode.js.map +1 -0
  27. package/dist/lib/adapters/pi.d.ts +3 -0
  28. package/dist/lib/adapters/pi.d.ts.map +1 -0
  29. package/dist/lib/adapters/pi.js +112 -0
  30. package/dist/lib/adapters/pi.js.map +1 -0
  31. package/dist/lib/adapters/zed.d.ts +3 -0
  32. package/dist/lib/adapters/zed.d.ts.map +1 -0
  33. package/dist/lib/adapters/zed.js +156 -0
  34. package/dist/lib/adapters/zed.js.map +1 -0
  35. package/dist/lib/detector/index.d.ts +32 -0
  36. package/dist/lib/detector/index.d.ts.map +1 -0
  37. package/dist/lib/detector/index.js +224 -0
  38. package/dist/lib/detector/index.js.map +1 -0
  39. package/dist/lib/index.d.ts +3 -0
  40. package/dist/lib/index.d.ts.map +1 -0
  41. package/dist/lib/index.js +3 -0
  42. package/dist/lib/index.js.map +1 -0
  43. package/package.json +40 -0
package/dist/cli.js ADDED
@@ -0,0 +1,988 @@
1
+ #!/usr/bin/env node
2
+
3
+ // src/adapters/amp.ts
4
+ import { readdir, readFile } from "node:fs/promises";
5
+ import { homedir } from "node:os";
6
+ import { join } from "node:path";
7
+ function getAmpThreadsDir() {
8
+ return join(
9
+ process.env["XDG_DATA_HOME"] ?? join(homedir(), ".local", "share"),
10
+ "amp",
11
+ "threads"
12
+ );
13
+ }
14
+ function ampAdapter() {
15
+ return {
16
+ name: "amp",
17
+ async *messages(options) {
18
+ const threadsDir = getAmpThreadsDir();
19
+ let files;
20
+ try {
21
+ files = await readdir(threadsDir);
22
+ } catch {
23
+ return;
24
+ }
25
+ const jsonFiles = files.filter((f) => f.endsWith(".json"));
26
+ for (const file of jsonFiles) {
27
+ const filePath = join(threadsDir, file);
28
+ const threadId = file.replace(".json", "");
29
+ try {
30
+ const raw = await readFile(filePath, "utf-8");
31
+ const thread = JSON.parse(raw);
32
+ if (!thread.messages || !Array.isArray(thread.messages)) continue;
33
+ for (const msg of thread.messages) {
34
+ if (msg.role !== "user") continue;
35
+ const text = extractText(msg.content);
36
+ if (!text) continue;
37
+ const timestamp = msg.timestamp ?? msg.createdAt ?? void 0;
38
+ if (options?.since && timestamp) {
39
+ const ts = new Date(timestamp);
40
+ if (ts < options.since) continue;
41
+ }
42
+ yield {
43
+ text,
44
+ timestamp,
45
+ session: threadId
46
+ };
47
+ }
48
+ } catch {
49
+ }
50
+ }
51
+ }
52
+ };
53
+ }
54
+ function extractText(content) {
55
+ if (typeof content === "string") return content;
56
+ if (Array.isArray(content)) {
57
+ const parts = content.filter(
58
+ (p) => typeof p === "object" && p !== null && typeof p.text === "string"
59
+ ).map((p) => p.text);
60
+ return parts.length > 0 ? parts.join(" ") : null;
61
+ }
62
+ return null;
63
+ }
64
+
65
+ // src/adapters/claude.ts
66
+ import { createReadStream } from "node:fs";
67
+ import { readdir as readdir2, stat } from "node:fs/promises";
68
+ import { createInterface } from "node:readline";
69
+ import { homedir as homedir2 } from "node:os";
70
+ import { join as join2 } from "node:path";
71
+ var CLAUDE_DIR = join2(homedir2(), ".claude", "projects");
72
+ function claudeAdapter() {
73
+ return {
74
+ name: "claude",
75
+ async *messages(options) {
76
+ const projectsDir = CLAUDE_DIR;
77
+ let projectDirs;
78
+ try {
79
+ projectDirs = await readdir2(projectsDir);
80
+ } catch {
81
+ return;
82
+ }
83
+ for (const projectDir of projectDirs) {
84
+ const projectPath = join2(projectsDir, projectDir);
85
+ const projectStat = await stat(projectPath);
86
+ if (!projectStat.isDirectory()) continue;
87
+ const entries = await readdir2(projectPath);
88
+ const jsonlFiles = entries.filter((f) => f.endsWith(".jsonl"));
89
+ for (const file of jsonlFiles) {
90
+ const filePath = join2(projectPath, file);
91
+ const session = file.replace(".jsonl", "");
92
+ yield* parseClaudeJsonl(filePath, {
93
+ session,
94
+ project: projectDir,
95
+ since: options?.since
96
+ });
97
+ }
98
+ const subdirs = entries.filter((f) => !f.includes("."));
99
+ for (const subdir of subdirs) {
100
+ const subagentsDir = join2(projectPath, subdir, "subagents");
101
+ try {
102
+ const subFiles = await readdir2(subagentsDir);
103
+ const subJsonl = subFiles.filter((f) => f.endsWith(".jsonl"));
104
+ for (const file of subJsonl) {
105
+ yield* parseClaudeJsonl(join2(subagentsDir, file), {
106
+ session: `${subdir}/${file.replace(".jsonl", "")}`,
107
+ project: projectDir,
108
+ since: options?.since
109
+ });
110
+ }
111
+ } catch {
112
+ }
113
+ }
114
+ }
115
+ }
116
+ };
117
+ }
118
+ async function* parseClaudeJsonl(filePath, context) {
119
+ const rl = createInterface({
120
+ input: createReadStream(filePath, { encoding: "utf-8" }),
121
+ crlfDelay: Infinity
122
+ });
123
+ for await (const line of rl) {
124
+ if (!line.trim()) continue;
125
+ try {
126
+ const entry = JSON.parse(line);
127
+ const text = extractUserText(entry);
128
+ if (!text) continue;
129
+ const timestamp = extractTimestamp(entry);
130
+ if (context.since && timestamp) {
131
+ const ts = new Date(timestamp);
132
+ if (ts < context.since) continue;
133
+ }
134
+ yield {
135
+ text,
136
+ timestamp: timestamp ?? void 0,
137
+ session: context.session,
138
+ project: context.project
139
+ };
140
+ } catch {
141
+ }
142
+ }
143
+ }
144
+ function extractUserText(entry) {
145
+ if (entry["type"] === "user") {
146
+ const message = entry["message"];
147
+ if (!message) return null;
148
+ return contentToString(message["content"]);
149
+ }
150
+ if (entry["type"] === "human") {
151
+ const message = entry["message"];
152
+ if (!message) return null;
153
+ return contentToString(message["content"]);
154
+ }
155
+ if (entry["role"] === "user") {
156
+ return contentToString(entry["content"]);
157
+ }
158
+ return null;
159
+ }
160
+ function contentToString(content) {
161
+ if (typeof content === "string") return content;
162
+ if (Array.isArray(content)) {
163
+ const parts = content.filter(
164
+ (p) => typeof p === "object" && p !== null && p.type === "text"
165
+ ).map((p) => p.text);
166
+ return parts.length > 0 ? parts.join(" ") : null;
167
+ }
168
+ return null;
169
+ }
170
+ function extractTimestamp(entry) {
171
+ if (typeof entry["timestamp"] === "string") return entry["timestamp"];
172
+ if (typeof entry["createdAt"] === "string") return entry["createdAt"];
173
+ return null;
174
+ }
175
+
176
+ // src/adapters/cline.ts
177
+ import { readdir as readdir3, readFile as readFile2, stat as stat2 } from "node:fs/promises";
178
+ import { existsSync } from "node:fs";
179
+ import { homedir as homedir3 } from "node:os";
180
+ import { join as join3 } from "node:path";
181
+ function getClineTaskDirs() {
182
+ const dirs = [];
183
+ const vscodePaths = getVSCodeGlobalStoragePaths();
184
+ const extensionIds = ["saoudrizwan.claude-dev", "rooveterinaryinc.roo-cline"];
185
+ for (const basePath of vscodePaths) {
186
+ for (const extId of extensionIds) {
187
+ const tasksDir = join3(basePath, extId, "tasks");
188
+ if (existsSync(tasksDir)) dirs.push(tasksDir);
189
+ }
190
+ }
191
+ const clineStandalone = join3(homedir3(), ".cline", "data", "tasks");
192
+ if (existsSync(clineStandalone)) dirs.push(clineStandalone);
193
+ return dirs;
194
+ }
195
+ function getVSCodeGlobalStoragePaths() {
196
+ const paths = [];
197
+ if (process.platform === "darwin") {
198
+ paths.push(
199
+ join3(homedir3(), "Library", "Application Support", "Code", "User", "globalStorage"),
200
+ join3(homedir3(), "Library", "Application Support", "Code - Insiders", "User", "globalStorage"),
201
+ join3(homedir3(), "Library", "Application Support", "Cursor", "User", "globalStorage")
202
+ );
203
+ } else if (process.platform === "linux") {
204
+ const configBase = process.env["XDG_CONFIG_HOME"] ?? join3(homedir3(), ".config");
205
+ paths.push(
206
+ join3(configBase, "Code", "User", "globalStorage"),
207
+ join3(configBase, "Code - Insiders", "User", "globalStorage"),
208
+ join3(configBase, "Cursor", "User", "globalStorage")
209
+ );
210
+ } else {
211
+ const appData = process.env["APPDATA"] ?? join3(homedir3(), "AppData", "Roaming");
212
+ paths.push(
213
+ join3(appData, "Code", "User", "globalStorage"),
214
+ join3(appData, "Code - Insiders", "User", "globalStorage"),
215
+ join3(appData, "Cursor", "User", "globalStorage")
216
+ );
217
+ }
218
+ return paths;
219
+ }
220
+ function clineAdapter() {
221
+ return {
222
+ name: "cline",
223
+ async *messages(options) {
224
+ const taskDirs = getClineTaskDirs();
225
+ for (const tasksDir of taskDirs) {
226
+ let taskIds;
227
+ try {
228
+ taskIds = await readdir3(tasksDir);
229
+ } catch {
230
+ continue;
231
+ }
232
+ for (const taskId of taskIds) {
233
+ const taskDir = join3(tasksDir, taskId);
234
+ const taskStat = await stat2(taskDir).catch(() => null);
235
+ if (!taskStat?.isDirectory()) continue;
236
+ const historyFile = join3(taskDir, "api_conversation_history.json");
237
+ try {
238
+ const raw = await readFile2(historyFile, "utf-8");
239
+ const messages = JSON.parse(raw);
240
+ if (!Array.isArray(messages)) continue;
241
+ for (const msg of messages) {
242
+ if (msg.role !== "user") continue;
243
+ const text = extractText2(msg.content);
244
+ if (!text) continue;
245
+ const timestamp = msg.ts ?? void 0;
246
+ if (options?.since && timestamp) {
247
+ const ts = new Date(timestamp);
248
+ if (ts < options.since) continue;
249
+ }
250
+ yield {
251
+ text,
252
+ session: taskId
253
+ };
254
+ }
255
+ } catch {
256
+ }
257
+ }
258
+ }
259
+ }
260
+ };
261
+ }
262
+ function extractText2(content) {
263
+ if (typeof content === "string") return content;
264
+ if (Array.isArray(content)) {
265
+ const parts = content.filter(
266
+ (p) => typeof p === "object" && p !== null && p.type === "text" && typeof p.text === "string"
267
+ ).map((p) => p.text);
268
+ return parts.length > 0 ? parts.join(" ") : null;
269
+ }
270
+ return null;
271
+ }
272
+
273
+ // src/adapters/codex.ts
274
+ import { createReadStream as createReadStream2 } from "node:fs";
275
+ import { readdir as readdir4, stat as stat3 } from "node:fs/promises";
276
+ import { createInterface as createInterface2 } from "node:readline";
277
+ import { homedir as homedir4 } from "node:os";
278
+ import { join as join4 } from "node:path";
279
+ var CODEX_SESSIONS_DIR = join4(homedir4(), ".codex", "sessions");
280
+ function codexAdapter() {
281
+ return {
282
+ name: "codex",
283
+ async *messages(options) {
284
+ yield* walkCodexSessions(CODEX_SESSIONS_DIR, options);
285
+ }
286
+ };
287
+ }
288
+ async function* walkCodexSessions(dir, options) {
289
+ let entries;
290
+ try {
291
+ entries = await readdir4(dir);
292
+ } catch {
293
+ return;
294
+ }
295
+ for (const entry of entries) {
296
+ const fullPath = join4(dir, entry);
297
+ const entryStat = await stat3(fullPath);
298
+ if (entryStat.isDirectory()) {
299
+ yield* walkCodexSessions(fullPath, options);
300
+ } else if (entry.endsWith(".jsonl")) {
301
+ const session = entry.replace(".jsonl", "");
302
+ yield* parseCodexJsonl(fullPath, { session, since: options?.since });
303
+ }
304
+ }
305
+ }
306
+ async function* parseCodexJsonl(filePath, context) {
307
+ const rl = createInterface2({
308
+ input: createReadStream2(filePath, { encoding: "utf-8" }),
309
+ crlfDelay: Infinity
310
+ });
311
+ for await (const line of rl) {
312
+ if (!line.trim()) continue;
313
+ try {
314
+ const entry = JSON.parse(line);
315
+ if (entry.type !== "response_item") continue;
316
+ const payload = entry.payload;
317
+ if (!payload || payload.role !== "user") continue;
318
+ const text = extractText3(payload.content);
319
+ if (!text) continue;
320
+ if (text.startsWith("<environment_context>")) continue;
321
+ if (text.startsWith("<permissions instructions>")) continue;
322
+ if (context.since && entry.timestamp) {
323
+ const ts = new Date(entry.timestamp);
324
+ if (ts < context.since) continue;
325
+ }
326
+ yield {
327
+ text,
328
+ timestamp: entry.timestamp,
329
+ session: context.session
330
+ };
331
+ } catch {
332
+ }
333
+ }
334
+ }
335
+ function extractText3(content) {
336
+ if (!Array.isArray(content)) return null;
337
+ const parts = content.filter(
338
+ (p) => typeof p === "object" && p !== null && p.type === "input_text" && typeof p.text === "string"
339
+ ).map((p) => p.text);
340
+ return parts.length > 0 ? parts.join(" ") : null;
341
+ }
342
+
343
+ // src/adapters/opencode.ts
344
+ import { existsSync as existsSync2 } from "node:fs";
345
+ import { homedir as homedir5 } from "node:os";
346
+ import { join as join5 } from "node:path";
347
+ function getOpencodeDatabasePath() {
348
+ const xdgPath = join5(
349
+ process.env["XDG_DATA_HOME"] ?? join5(homedir5(), ".local", "share"),
350
+ "opencode",
351
+ "opencode.db"
352
+ );
353
+ if (existsSync2(xdgPath)) return xdgPath;
354
+ if (process.platform === "darwin") {
355
+ const macPath = join5(
356
+ homedir5(),
357
+ "Library",
358
+ "Application Support",
359
+ "opencode",
360
+ "opencode.db"
361
+ );
362
+ if (existsSync2(macPath)) return macPath;
363
+ }
364
+ return null;
365
+ }
366
+ function opencodeAdapter() {
367
+ return {
368
+ name: "opencode",
369
+ async *messages(options) {
370
+ const dbPath = getOpencodeDatabasePath();
371
+ if (!dbPath) return;
372
+ let db;
373
+ try {
374
+ const BetterSqlite3 = await import("better-sqlite3");
375
+ const Ctor = BetterSqlite3.default ?? BetterSqlite3;
376
+ db = new Ctor(dbPath, { readonly: true });
377
+ } catch {
378
+ console.warn(
379
+ "devrage: better-sqlite3 not available, skipping OpenCode sessions"
380
+ );
381
+ return;
382
+ }
383
+ try {
384
+ yield* queryUserMessages(db, options);
385
+ } finally {
386
+ db.close();
387
+ }
388
+ }
389
+ };
390
+ }
391
+ function* queryUserMessages(db, options) {
392
+ let query = `
393
+ SELECT
394
+ m.session_id,
395
+ m.time_created,
396
+ json_extract(p.data, '$.text') as text
397
+ FROM message m
398
+ JOIN part p ON p.message_id = m.id
399
+ WHERE json_extract(m.data, '$.role') = 'user'
400
+ AND json_extract(p.data, '$.type') = 'text'
401
+ `;
402
+ if (options?.since) {
403
+ const sinceMs = options.since.getTime();
404
+ query += ` AND m.time_created >= ${sinceMs}`;
405
+ }
406
+ query += ` ORDER BY m.time_created ASC`;
407
+ const rows = db.prepare(query).all();
408
+ for (const row of rows) {
409
+ if (!row.text || !row.text.trim()) continue;
410
+ yield {
411
+ text: row.text,
412
+ timestamp: new Date(row.time_created).toISOString(),
413
+ session: row.session_id
414
+ };
415
+ }
416
+ }
417
+
418
+ // src/adapters/pi.ts
419
+ import { createReadStream as createReadStream3 } from "node:fs";
420
+ import { readdir as readdir5, stat as stat4 } from "node:fs/promises";
421
+ import { createInterface as createInterface3 } from "node:readline";
422
+ import { homedir as homedir6 } from "node:os";
423
+ import { join as join6 } from "node:path";
424
+ var PI_SESSIONS_DIR = join6(homedir6(), ".pi", "agent", "sessions");
425
+ function piAdapter() {
426
+ return {
427
+ name: "pi",
428
+ async *messages(options) {
429
+ let projectDirs;
430
+ try {
431
+ projectDirs = await readdir5(PI_SESSIONS_DIR);
432
+ } catch {
433
+ return;
434
+ }
435
+ for (const dirName of projectDirs) {
436
+ const projectDirPath = join6(PI_SESSIONS_DIR, dirName);
437
+ const dirStat = await stat4(projectDirPath).catch(() => null);
438
+ if (!dirStat?.isDirectory()) continue;
439
+ if (dirName === "subagent-artifacts") continue;
440
+ let sessionFiles;
441
+ try {
442
+ sessionFiles = await readdir5(projectDirPath);
443
+ } catch {
444
+ continue;
445
+ }
446
+ const jsonlFiles = sessionFiles.filter((f) => f.endsWith(".jsonl"));
447
+ for (const file of jsonlFiles) {
448
+ const filePath = join6(projectDirPath, file);
449
+ const sessionId = file.replace(".jsonl", "");
450
+ yield* parsePiJsonl(filePath, {
451
+ session: sessionId,
452
+ project: dirName,
453
+ since: options?.since
454
+ });
455
+ }
456
+ }
457
+ }
458
+ };
459
+ }
460
+ async function* parsePiJsonl(filePath, context) {
461
+ const rl = createInterface3({
462
+ input: createReadStream3(filePath, { encoding: "utf-8" }),
463
+ crlfDelay: Infinity
464
+ });
465
+ for await (const line of rl) {
466
+ if (!line.trim()) continue;
467
+ try {
468
+ const entry = JSON.parse(line);
469
+ if (entry.type !== "message") continue;
470
+ const msg = entry.message;
471
+ if (!msg || msg.role !== "user") continue;
472
+ const text = extractText4(msg.content);
473
+ if (!text) continue;
474
+ const timestamp = entry.timestamp ?? (msg.timestamp != null ? new Date(msg.timestamp).toISOString() : void 0);
475
+ if (context.since && timestamp) {
476
+ const ts = new Date(timestamp);
477
+ if (ts < context.since) continue;
478
+ }
479
+ yield {
480
+ text,
481
+ timestamp,
482
+ session: context.session,
483
+ project: context.project
484
+ };
485
+ } catch {
486
+ }
487
+ }
488
+ }
489
+ function extractText4(content) {
490
+ if (typeof content === "string") return content;
491
+ if (Array.isArray(content)) {
492
+ const parts = content.filter(
493
+ (p) => typeof p === "object" && p !== null && p.type === "text" && typeof p.text === "string"
494
+ ).map((p) => p.text);
495
+ return parts.length > 0 ? parts.join(" ") : null;
496
+ }
497
+ return null;
498
+ }
499
+
500
+ // src/adapters/zed.ts
501
+ import { readdir as readdir6, readFile as readFile3 } from "node:fs/promises";
502
+ import { existsSync as existsSync3 } from "node:fs";
503
+ import { homedir as homedir7 } from "node:os";
504
+ import { join as join7 } from "node:path";
505
+ function getZedPaths() {
506
+ if (process.platform === "darwin") {
507
+ const base2 = join7(homedir7(), "Library", "Application Support", "Zed");
508
+ return {
509
+ conversations: join7(base2, "conversations"),
510
+ db: join7(base2, "db")
511
+ };
512
+ }
513
+ const base = join7(
514
+ process.env["XDG_DATA_HOME"] ?? join7(homedir7(), ".local", "share"),
515
+ "zed"
516
+ );
517
+ return {
518
+ conversations: join7(base, "conversations"),
519
+ db: join7(base, "db")
520
+ };
521
+ }
522
+ function zedAdapter() {
523
+ return {
524
+ name: "zed",
525
+ async *messages(options) {
526
+ const paths = getZedPaths();
527
+ yield* parseTextThreads(paths.conversations, options);
528
+ yield* parseAgentThreads(paths.db, options);
529
+ }
530
+ };
531
+ }
532
+ async function* parseTextThreads(dir, _options) {
533
+ if (!existsSync3(dir)) return;
534
+ let files;
535
+ try {
536
+ files = await readdir6(dir);
537
+ } catch {
538
+ return;
539
+ }
540
+ const jsonFiles = files.filter((f) => f.endsWith(".json"));
541
+ for (const file of jsonFiles) {
542
+ const filePath = join7(dir, file);
543
+ const session = file.replace(".json", "");
544
+ try {
545
+ const raw = await readFile3(filePath, "utf-8");
546
+ const conversation = JSON.parse(raw);
547
+ if (!conversation.messages || !Array.isArray(conversation.messages)) continue;
548
+ for (const msg of conversation.messages) {
549
+ if (msg.role !== "user") continue;
550
+ const text = typeof msg.content === "string" ? msg.content : null;
551
+ if (!text) continue;
552
+ yield {
553
+ text,
554
+ session
555
+ };
556
+ }
557
+ } catch {
558
+ }
559
+ }
560
+ }
561
+ async function* parseAgentThreads(dbDir, _options) {
562
+ if (!existsSync3(dbDir)) return;
563
+ let dbFiles;
564
+ try {
565
+ const entries = await readdir6(dbDir);
566
+ dbFiles = entries.filter((f) => f.endsWith(".db"));
567
+ } catch {
568
+ return;
569
+ }
570
+ if (dbFiles.length === 0) return;
571
+ let Database;
572
+ try {
573
+ const mod = await import("better-sqlite3");
574
+ Database = mod.default ?? mod;
575
+ } catch {
576
+ return;
577
+ }
578
+ for (const dbFile of dbFiles) {
579
+ const dbPath = join7(dbDir, dbFile);
580
+ let db;
581
+ try {
582
+ db = new Database(
583
+ dbPath,
584
+ { readonly: true }
585
+ );
586
+ } catch {
587
+ continue;
588
+ }
589
+ try {
590
+ const tables = db.prepare("SELECT name FROM sqlite_master WHERE type='table'").all();
591
+ const tableNames = tables.map((t) => t.name);
592
+ const msgTable = tableNames.find(
593
+ (t) => t === "messages" || t === "thread_messages" || t.includes("message")
594
+ );
595
+ if (!msgTable) {
596
+ db.close();
597
+ continue;
598
+ }
599
+ const columns = db.prepare(`PRAGMA table_info("${msgTable}")`).all();
600
+ const colNames = columns.map((c2) => c2.name);
601
+ const hasRole = colNames.includes("role");
602
+ if (!hasRole) {
603
+ db.close();
604
+ continue;
605
+ }
606
+ const contentCol = colNames.includes("content") ? "content" : colNames.includes("body") ? "body" : "text";
607
+ let query = `SELECT "${contentCol}" as text FROM "${msgTable}" WHERE role = 'user'`;
608
+ const rows = db.prepare(query).all();
609
+ for (const row of rows) {
610
+ if (!row.text?.trim()) continue;
611
+ yield { text: row.text };
612
+ }
613
+ } catch {
614
+ } finally {
615
+ db.close();
616
+ }
617
+ }
618
+ }
619
+
620
+ // src/adapters/index.ts
621
+ var ADAPTERS = {
622
+ claude: claudeAdapter,
623
+ codex: codexAdapter,
624
+ opencode: opencodeAdapter,
625
+ amp: ampAdapter,
626
+ cline: clineAdapter,
627
+ pi: piAdapter,
628
+ zed: zedAdapter
629
+ };
630
+ function createAdapter(name) {
631
+ const factory = ADAPTERS[name];
632
+ if (!factory) {
633
+ throw new Error(
634
+ `unknown adapter: ${name} (available: ${Object.keys(ADAPTERS).join(", ")})`
635
+ );
636
+ }
637
+ return factory();
638
+ }
639
+ function allAdapters() {
640
+ return Object.values(ADAPTERS).map((f) => f());
641
+ }
642
+
643
+ // src/detector/index.ts
644
+ var WORDLIST = [
645
+ // === FUCK family (strong) ===
646
+ // Canonical forms
647
+ { word: "fuck", severity: "strong", group: "fuck" },
648
+ { word: "fucking", severity: "strong", group: "fuck" },
649
+ { word: "fucked", severity: "strong", group: "fuck" },
650
+ { word: "fucker", severity: "strong", group: "fuck" },
651
+ { word: "fuckin", severity: "strong", group: "fuck" },
652
+ { word: "fucks", severity: "strong", group: "fuck" },
653
+ // Compound words
654
+ { word: "motherfucker", severity: "strong", group: "fuck" },
655
+ { word: "motherfucking", severity: "strong", group: "fuck" },
656
+ { word: "mothafucka", severity: "strong", group: "fuck" },
657
+ { word: "fuckup", severity: "strong", group: "fuck" },
658
+ { word: "fuckoff", severity: "strong", group: "fuck" },
659
+ { word: "clusterfuck", severity: "strong", group: "fuck" },
660
+ { word: "fuckwit", severity: "strong", group: "fuck" },
661
+ { word: "fucktard", severity: "strong", group: "fuck" },
662
+ { word: "fuckface", severity: "strong", group: "fuck" },
663
+ { word: "fuckhead", severity: "strong", group: "fuck" },
664
+ // Typos — transpositions
665
+ { word: "fukc", severity: "strong", group: "fuck" },
666
+ { word: "fukcing", severity: "strong", group: "fuck" },
667
+ { word: "fukced", severity: "strong", group: "fuck" },
668
+ { word: "fukcer", severity: "strong", group: "fuck" },
669
+ { word: "fcuk", severity: "strong", group: "fuck" },
670
+ { word: "fcuking", severity: "strong", group: "fuck" },
671
+ { word: "fcuked", severity: "strong", group: "fuck" },
672
+ { word: "fuk", severity: "strong", group: "fuck" },
673
+ { word: "fuking", severity: "strong", group: "fuck" },
674
+ { word: "fuked", severity: "strong", group: "fuck" },
675
+ { word: "fuker", severity: "strong", group: "fuck" },
676
+ { word: "fuxk", severity: "strong", group: "fuck" },
677
+ { word: "fuxking", severity: "strong", group: "fuck" },
678
+ // === SHIT family (strong) ===
679
+ { word: "shit", severity: "strong", group: "shit" },
680
+ { word: "shitty", severity: "strong", group: "shit" },
681
+ { word: "shitting", severity: "strong", group: "shit" },
682
+ { word: "shits", severity: "strong", group: "shit" },
683
+ { word: "shitted", severity: "strong", group: "shit" },
684
+ // Compound words
685
+ { word: "bullshit", severity: "strong", group: "shit" },
686
+ { word: "horseshit", severity: "strong", group: "shit" },
687
+ { word: "dipshit", severity: "strong", group: "shit" },
688
+ { word: "shitshow", severity: "strong", group: "shit" },
689
+ { word: "shithead", severity: "strong", group: "shit" },
690
+ { word: "shithole", severity: "strong", group: "shit" },
691
+ { word: "shitface", severity: "strong", group: "shit" },
692
+ { word: "shitfaced", severity: "strong", group: "shit" },
693
+ { word: "shitstain", severity: "strong", group: "shit" },
694
+ { word: "shitbag", severity: "strong", group: "shit" },
695
+ // Typos
696
+ { word: "hsit", severity: "strong", group: "shit" },
697
+ { word: "siht", severity: "strong", group: "shit" },
698
+ { word: "shti", severity: "strong", group: "shit" },
699
+ { word: "sjit", severity: "strong", group: "shit" },
700
+ { word: "shjt", severity: "strong", group: "shit" },
701
+ { word: "bulshit", severity: "strong", group: "shit" },
702
+ { word: "bullsht", severity: "strong", group: "shit" },
703
+ // === ASS family (moderate) ===
704
+ { word: "ass", severity: "moderate", group: "ass" },
705
+ { word: "asses", severity: "moderate", group: "ass" },
706
+ // Compound words (these are strong)
707
+ { word: "asshole", severity: "strong", group: "ass" },
708
+ { word: "assholes", severity: "strong", group: "ass" },
709
+ { word: "jackass", severity: "strong", group: "ass" },
710
+ { word: "dumbass", severity: "strong", group: "ass" },
711
+ { word: "fatass", severity: "moderate", group: "ass" },
712
+ { word: "asshat", severity: "strong", group: "ass" },
713
+ { word: "asswipe", severity: "strong", group: "ass" },
714
+ { word: "badass", severity: "mild", group: "ass" },
715
+ // === DAMN family (moderate) ===
716
+ { word: "damn", severity: "moderate", group: "damn" },
717
+ { word: "damned", severity: "moderate", group: "damn" },
718
+ { word: "damnit", severity: "moderate", group: "damn" },
719
+ { word: "dammit", severity: "moderate", group: "damn" },
720
+ { word: "goddamn", severity: "moderate", group: "damn" },
721
+ { word: "goddamnit", severity: "moderate", group: "damn" },
722
+ { word: "goddammit", severity: "moderate", group: "damn" },
723
+ // === BITCH family (strong) ===
724
+ { word: "bitch", severity: "strong", group: "bitch" },
725
+ { word: "bitches", severity: "strong", group: "bitch" },
726
+ { word: "bitching", severity: "strong", group: "bitch" },
727
+ { word: "bitchy", severity: "strong", group: "bitch" },
728
+ { word: "bitchass", severity: "strong", group: "bitch" },
729
+ // === BASTARD (strong) ===
730
+ { word: "bastard", severity: "strong", group: "bastard" },
731
+ { word: "bastards", severity: "strong", group: "bastard" },
732
+ // === PISS family (moderate) ===
733
+ { word: "piss", severity: "moderate", group: "piss" },
734
+ { word: "pissed", severity: "moderate", group: "piss" },
735
+ { word: "pissing", severity: "moderate", group: "piss" },
736
+ { word: "pissoff", severity: "moderate", group: "piss" },
737
+ // === DICK (moderate) ===
738
+ { word: "dick", severity: "moderate", group: "dick" },
739
+ { word: "dickhead", severity: "strong", group: "dick" },
740
+ // === CRAP (moderate) ===
741
+ { word: "crap", severity: "moderate", group: "crap" },
742
+ { word: "crappy", severity: "moderate", group: "crap" },
743
+ { word: "crapping", severity: "moderate", group: "crap" },
744
+ // === HELL (mild) ===
745
+ { word: "hell", severity: "mild", group: "hell" },
746
+ // === Abbreviations (mild) ===
747
+ { word: "wtf", severity: "mild", group: "wtf" },
748
+ { word: "stfu", severity: "mild", group: "stfu" },
749
+ { word: "lmfao", severity: "mild", group: "lmfao" },
750
+ { word: "lmao", severity: "mild", group: "lmao" },
751
+ // === CUNT (strong) ===
752
+ { word: "cunt", severity: "strong", group: "cunt" },
753
+ { word: "cunts", severity: "strong", group: "cunt" }
754
+ ];
755
+ function collapseRepeats(text) {
756
+ return text.replace(/(.)\1+/g, "$1");
757
+ }
758
+ function buildPattern(words) {
759
+ const sorted = [...words].sort((a, b) => b.word.length - a.word.length);
760
+ const pattern = sorted.map((w) => w.word).join("|");
761
+ return new RegExp(`\\b(${pattern})\\b`, "gi");
762
+ }
763
+ var DEFAULT_PATTERN = buildPattern(WORDLIST);
764
+ var WORD_MAP = new Map(WORDLIST.map((w) => [w.word.toLowerCase(), w]));
765
+ function detect(text) {
766
+ const matches = [];
767
+ const seen = /* @__PURE__ */ new Set();
768
+ runPattern(text, text.toLowerCase(), matches, seen);
769
+ const collapsed = collapseRepeats(text.toLowerCase());
770
+ if (collapsed !== text.toLowerCase()) {
771
+ runPattern(text, collapsed, matches, seen);
772
+ }
773
+ return { count: matches.length, matches };
774
+ }
775
+ function runPattern(_originalText, searchText, matches, seen) {
776
+ DEFAULT_PATTERN.lastIndex = 0;
777
+ let match;
778
+ while ((match = DEFAULT_PATTERN.exec(searchText)) !== null) {
779
+ if (seen.has(match.index)) continue;
780
+ const word = match[0].toLowerCase();
781
+ const entry = WORD_MAP.get(word);
782
+ if (!entry) continue;
783
+ seen.add(match.index);
784
+ matches.push({
785
+ word,
786
+ index: match.index,
787
+ severity: entry.severity,
788
+ group: entry.group
789
+ });
790
+ }
791
+ }
792
+
793
+ // src/commands/scan.ts
794
+ var c = {
795
+ reset: "\x1B[0m",
796
+ bold: "\x1B[1m",
797
+ dim: "\x1B[2m",
798
+ red: "\x1B[31m",
799
+ green: "\x1B[32m",
800
+ yellow: "\x1B[33m",
801
+ blue: "\x1B[34m",
802
+ magenta: "\x1B[35m",
803
+ cyan: "\x1B[36m",
804
+ white: "\x1B[37m",
805
+ gray: "\x1B[90m"
806
+ };
807
+ var SPINNER_MESSAGES = [
808
+ "Tallying the damage",
809
+ "Reviewing your outbursts",
810
+ "Judging your vocabulary",
811
+ "Computing your shame",
812
+ "Cataloging the profanity",
813
+ "Measuring your frustration",
814
+ "Assessing the verbal carnage",
815
+ "Quantifying your displeasure",
816
+ "Auditing your language",
817
+ "Tabulating regrets"
818
+ ];
819
+ function createSpinner() {
820
+ let messageIdx = 0;
821
+ let dotCount = 0;
822
+ let timer = null;
823
+ return {
824
+ start() {
825
+ messageIdx = Math.floor(Math.random() * SPINNER_MESSAGES.length);
826
+ timer = setInterval(() => {
827
+ dotCount = (dotCount + 1) % 4;
828
+ const msg = SPINNER_MESSAGES[messageIdx % SPINNER_MESSAGES.length];
829
+ const dots = ".".repeat(dotCount || 1);
830
+ process.stdout.write(
831
+ `\r ${c.dim}${msg}${dots}${c.reset} `
832
+ );
833
+ }, 300);
834
+ },
835
+ update() {
836
+ messageIdx++;
837
+ },
838
+ stop() {
839
+ if (timer) {
840
+ clearInterval(timer);
841
+ timer = null;
842
+ }
843
+ process.stdout.write("\r" + " ".repeat(60) + "\r");
844
+ }
845
+ };
846
+ }
847
+ function parseArgs(args) {
848
+ const options = {};
849
+ for (let i = 0; i < args.length; i++) {
850
+ const arg = args[i];
851
+ if (arg === "--agent" || arg === "-a") {
852
+ options.agent = args[++i];
853
+ } else if (arg === "--since" || arg === "-s") {
854
+ const val = args[++i];
855
+ if (val) {
856
+ options.since = new Date(val);
857
+ if (isNaN(options.since.getTime())) {
858
+ console.error(`invalid date: ${val}`);
859
+ process.exit(1);
860
+ }
861
+ }
862
+ } else if (arg === "--help" || arg === "-h") {
863
+ console.log(`devrager scan \u2014 scan sessions for profanity
864
+
865
+ Options:
866
+ --agent, -a <name> Scan only a specific agent (claude, codex, opencode, amp, cline, zed)
867
+ --since, -s <date> Only scan messages after this date (ISO 8601)
868
+ --help, -h Show this help`);
869
+ process.exit(0);
870
+ }
871
+ }
872
+ return options;
873
+ }
874
+ async function scan(args) {
875
+ const options = parseArgs(args);
876
+ const adapters = options.agent ? [createAdapter(options.agent)] : allAdapters();
877
+ const spinner = createSpinner();
878
+ spinner.start();
879
+ const groupTally = {};
880
+ const variantTally = {};
881
+ let totalMessages = 0;
882
+ let totalSwears = 0;
883
+ const perAgent = {};
884
+ for (const adapter of adapters) {
885
+ let agentMessages = 0;
886
+ let agentSwears = 0;
887
+ spinner.update();
888
+ for await (const message of adapter.messages({ since: options.since })) {
889
+ totalMessages++;
890
+ agentMessages++;
891
+ const result = detect(message.text);
892
+ if (result.count > 0) {
893
+ totalSwears += result.count;
894
+ agentSwears += result.count;
895
+ for (const match of result.matches) {
896
+ groupTally[match.group] = (groupTally[match.group] ?? 0) + 1;
897
+ const variants = variantTally[match.group] ??= {};
898
+ variants[match.word] = (variants[match.word] ?? 0) + 1;
899
+ }
900
+ }
901
+ }
902
+ if (agentMessages > 0) {
903
+ perAgent[adapter.name] = { messages: agentMessages, swears: agentSwears };
904
+ }
905
+ }
906
+ spinner.stop();
907
+ console.log("");
908
+ console.log(` ${c.bold}${c.red}devrage${c.reset} ${c.dim}report${c.reset}`);
909
+ console.log(` ${c.dim}${"\u2500".repeat(30)}${c.reset}`);
910
+ console.log("");
911
+ console.log(` ${c.dim}messages scanned${c.reset} ${c.bold}${totalMessages}${c.reset}`);
912
+ console.log(` ${c.dim}total swears${c.reset} ${c.bold}${c.red}${totalSwears}${c.reset}`);
913
+ const activeAgents = Object.entries(perAgent);
914
+ if (activeAgents.length > 1) {
915
+ console.log("");
916
+ console.log(` ${c.bold}by agent${c.reset}`);
917
+ for (const [name, stats] of activeAgents) {
918
+ const rate = (stats.swears / stats.messages * 100).toFixed(1);
919
+ console.log(
920
+ ` ${c.cyan}${name.padEnd(10)}${c.reset} ${c.bold}${String(stats.swears).padStart(4)}${c.reset} ${c.dim}in ${stats.messages} messages (${rate}%)${c.reset}`
921
+ );
922
+ }
923
+ }
924
+ if (totalSwears > 0) {
925
+ const sorted = Object.entries(groupTally).sort(([, a], [, b]) => b - a);
926
+ console.log("");
927
+ console.log(` ${c.bold}top words${c.reset}`);
928
+ for (const [group, count] of sorted.slice(0, 10)) {
929
+ const variants = variantTally[group] ?? {};
930
+ const variantList = Object.entries(variants).sort(([, a], [, b]) => b - a).filter(([v]) => v !== group).slice(0, 15).map(([v, cnt]) => `${c.dim}${v}${c.reset} ${cnt}`).join(`${c.dim},${c.reset} `);
931
+ const suffix = variantList ? ` ${c.dim}(${c.reset}${variantList}${c.dim})${c.reset}` : "";
932
+ console.log(
933
+ ` ${c.yellow}${group.padEnd(12)}${c.reset} ${c.bold}${String(count).padStart(4)}${c.reset}${suffix}`
934
+ );
935
+ }
936
+ }
937
+ console.log("");
938
+ if (totalSwears === 0) {
939
+ console.log(` ${c.green}squeaky clean! not a single swear found.${c.reset}`);
940
+ console.log("");
941
+ }
942
+ }
943
+
944
+ // src/cli.ts
945
+ var COMMANDS = {
946
+ scan
947
+ };
948
+ function usage() {
949
+ console.log(`devrager \u2014 count how many times you swear at your coding agents
950
+
951
+ Usage:
952
+ devrager <command> [options]
953
+
954
+ Commands:
955
+ scan Scan sessions for profanity
956
+
957
+ Options:
958
+ --help, -h Show this help message
959
+ --version Show version
960
+
961
+ Examples:
962
+ devrager scan
963
+ devrager scan --agent claude
964
+ devrager scan --since 2025-01-01`);
965
+ }
966
+ async function main() {
967
+ const args = process.argv.slice(2);
968
+ const command = args[0];
969
+ if (command === "--help" || command === "-h") {
970
+ usage();
971
+ process.exit(0);
972
+ }
973
+ if (command === "--version") {
974
+ console.log("0.0.4");
975
+ process.exit(0);
976
+ }
977
+ const handler = command ? COMMANDS[command] : void 0;
978
+ if (handler) {
979
+ await handler(args.slice(1));
980
+ } else {
981
+ await scan(args);
982
+ }
983
+ }
984
+ main().catch((err) => {
985
+ console.error(err);
986
+ process.exit(1);
987
+ });
988
+ //# sourceMappingURL=cli.js.map